virtuelle Umgebungen teil20 und teil20a
This commit is contained in:
		
							
								
								
									
										268
									
								
								teil20/lib/python3.11/site-packages/setuptools/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										268
									
								
								teil20/lib/python3.11/site-packages/setuptools/__init__.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,268 @@
 | 
			
		||||
"""Extensions to the 'distutils' for large or complex distributions"""
 | 
			
		||||
 | 
			
		||||
import functools
 | 
			
		||||
import os
 | 
			
		||||
import re
 | 
			
		||||
import warnings
 | 
			
		||||
 | 
			
		||||
import _distutils_hack.override  # noqa: F401
 | 
			
		||||
 | 
			
		||||
import distutils.core
 | 
			
		||||
from distutils.errors import DistutilsOptionError
 | 
			
		||||
from distutils.util import convert_path as _convert_path
 | 
			
		||||
 | 
			
		||||
from ._deprecation_warning import SetuptoolsDeprecationWarning
 | 
			
		||||
 | 
			
		||||
import setuptools.version
 | 
			
		||||
from setuptools.extension import Extension
 | 
			
		||||
from setuptools.dist import Distribution
 | 
			
		||||
from setuptools.depends import Require
 | 
			
		||||
from setuptools.discovery import PackageFinder, PEP420PackageFinder
 | 
			
		||||
from . import monkey
 | 
			
		||||
from . import logging
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
__all__ = [
 | 
			
		||||
    'setup',
 | 
			
		||||
    'Distribution',
 | 
			
		||||
    'Command',
 | 
			
		||||
    'Extension',
 | 
			
		||||
    'Require',
 | 
			
		||||
    'SetuptoolsDeprecationWarning',
 | 
			
		||||
    'find_packages',
 | 
			
		||||
    'find_namespace_packages',
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
__version__ = setuptools.version.__version__
 | 
			
		||||
 | 
			
		||||
bootstrap_install_from = None
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
find_packages = PackageFinder.find
 | 
			
		||||
find_namespace_packages = PEP420PackageFinder.find
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _install_setup_requires(attrs):
 | 
			
		||||
    # Note: do not use `setuptools.Distribution` directly, as
 | 
			
		||||
    # our PEP 517 backend patch `distutils.core.Distribution`.
 | 
			
		||||
    class MinimalDistribution(distutils.core.Distribution):
 | 
			
		||||
        """
 | 
			
		||||
        A minimal version of a distribution for supporting the
 | 
			
		||||
        fetch_build_eggs interface.
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
        def __init__(self, attrs):
 | 
			
		||||
            _incl = 'dependency_links', 'setup_requires'
 | 
			
		||||
            filtered = {k: attrs[k] for k in set(_incl) & set(attrs)}
 | 
			
		||||
            super().__init__(filtered)
 | 
			
		||||
            # Prevent accidentally triggering discovery with incomplete set of attrs
 | 
			
		||||
            self.set_defaults._disable()
 | 
			
		||||
 | 
			
		||||
        def _get_project_config_files(self, filenames=None):
 | 
			
		||||
            """Ignore ``pyproject.toml``, they are not related to setup_requires"""
 | 
			
		||||
            try:
 | 
			
		||||
                cfg, toml = super()._split_standard_project_metadata(filenames)
 | 
			
		||||
                return cfg, ()
 | 
			
		||||
            except Exception:
 | 
			
		||||
                return filenames, ()
 | 
			
		||||
 | 
			
		||||
        def finalize_options(self):
 | 
			
		||||
            """
 | 
			
		||||
            Disable finalize_options to avoid building the working set.
 | 
			
		||||
            Ref #2158.
 | 
			
		||||
            """
 | 
			
		||||
 | 
			
		||||
    dist = MinimalDistribution(attrs)
 | 
			
		||||
 | 
			
		||||
    # Honor setup.cfg's options.
 | 
			
		||||
    dist.parse_config_files(ignore_option_errors=True)
 | 
			
		||||
    if dist.setup_requires:
 | 
			
		||||
        _fetch_build_eggs(dist)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _fetch_build_eggs(dist):
 | 
			
		||||
    try:
 | 
			
		||||
        dist.fetch_build_eggs(dist.setup_requires)
 | 
			
		||||
    except Exception as ex:
 | 
			
		||||
        msg = """
 | 
			
		||||
        It is possible a package already installed in your system
 | 
			
		||||
        contains an version that is invalid according to PEP 440.
 | 
			
		||||
        You can try `pip install --use-pep517` as a workaround for this problem,
 | 
			
		||||
        or rely on a new virtual environment.
 | 
			
		||||
 | 
			
		||||
        If the problem refers to a package that is not installed yet,
 | 
			
		||||
        please contact that package's maintainers or distributors.
 | 
			
		||||
        """
 | 
			
		||||
        if "InvalidVersion" in ex.__class__.__name__:
 | 
			
		||||
            if hasattr(ex, "add_note"):
 | 
			
		||||
                ex.add_note(msg)  # PEP 678
 | 
			
		||||
            else:
 | 
			
		||||
                dist.announce(f"\n{msg}\n")
 | 
			
		||||
        raise
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def setup(**attrs):
 | 
			
		||||
    # Make sure we have any requirements needed to interpret 'attrs'.
 | 
			
		||||
    logging.configure()
 | 
			
		||||
    _install_setup_requires(attrs)
 | 
			
		||||
    return distutils.core.setup(**attrs)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
setup.__doc__ = distutils.core.setup.__doc__
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
_Command = monkey.get_unpatched(distutils.core.Command)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Command(_Command):
 | 
			
		||||
    """
 | 
			
		||||
    Setuptools internal actions are organized using a *command design pattern*.
 | 
			
		||||
    This means that each action (or group of closely related actions) executed during
 | 
			
		||||
    the build should be implemented as a ``Command`` subclass.
 | 
			
		||||
 | 
			
		||||
    These commands are abstractions and do not necessarily correspond to a command that
 | 
			
		||||
    can (or should) be executed via a terminal, in a CLI fashion (although historically
 | 
			
		||||
    they would).
 | 
			
		||||
 | 
			
		||||
    When creating a new command from scratch, custom defined classes **SHOULD** inherit
 | 
			
		||||
    from ``setuptools.Command`` and implement a few mandatory methods.
 | 
			
		||||
    Between these mandatory methods, are listed:
 | 
			
		||||
 | 
			
		||||
    .. method:: initialize_options(self)
 | 
			
		||||
 | 
			
		||||
        Set or (reset) all options/attributes/caches used by the command
 | 
			
		||||
        to their default values. Note that these values may be overwritten during
 | 
			
		||||
        the build.
 | 
			
		||||
 | 
			
		||||
    .. method:: finalize_options(self)
 | 
			
		||||
 | 
			
		||||
        Set final values for all options/attributes used by the command.
 | 
			
		||||
        Most of the time, each option/attribute/cache should only be set if it does not
 | 
			
		||||
        have any value yet (e.g. ``if self.attr is None: self.attr = val``).
 | 
			
		||||
 | 
			
		||||
    .. method:: run(self)
 | 
			
		||||
 | 
			
		||||
        Execute the actions intended by the command.
 | 
			
		||||
        (Side effects **SHOULD** only take place when ``run`` is executed,
 | 
			
		||||
        for example, creating new files or writing to the terminal output).
 | 
			
		||||
 | 
			
		||||
    A useful analogy for command classes is to think of them as subroutines with local
 | 
			
		||||
    variables called "options".  The options are "declared" in ``initialize_options()``
 | 
			
		||||
    and "defined" (given their final values, aka "finalized") in ``finalize_options()``,
 | 
			
		||||
    both of which must be defined by every command class. The "body" of the subroutine,
 | 
			
		||||
    (where it does all the work) is the ``run()`` method.
 | 
			
		||||
    Between ``initialize_options()`` and ``finalize_options()``, ``setuptools`` may set
 | 
			
		||||
    the values for options/attributes based on user's input (or circumstance),
 | 
			
		||||
    which means that the implementation should be careful to not overwrite values in
 | 
			
		||||
    ``finalize_options`` unless necessary.
 | 
			
		||||
 | 
			
		||||
    Please note that other commands (or other parts of setuptools) may also overwrite
 | 
			
		||||
    the values of the command's options/attributes multiple times during the build
 | 
			
		||||
    process.
 | 
			
		||||
    Therefore it is important to consistently implement ``initialize_options()`` and
 | 
			
		||||
    ``finalize_options()``. For example, all derived attributes (or attributes that
 | 
			
		||||
    depend on the value of other attributes) **SHOULD** be recomputed in
 | 
			
		||||
    ``finalize_options``.
 | 
			
		||||
 | 
			
		||||
    When overwriting existing commands, custom defined classes **MUST** abide by the
 | 
			
		||||
    same APIs implemented by the original class. They also **SHOULD** inherit from the
 | 
			
		||||
    original class.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    command_consumes_arguments = False
 | 
			
		||||
 | 
			
		||||
    def __init__(self, dist, **kw):
 | 
			
		||||
        """
 | 
			
		||||
        Construct the command for dist, updating
 | 
			
		||||
        vars(self) with any keyword parameters.
 | 
			
		||||
        """
 | 
			
		||||
        super().__init__(dist)
 | 
			
		||||
        vars(self).update(kw)
 | 
			
		||||
 | 
			
		||||
    def _ensure_stringlike(self, option, what, default=None):
 | 
			
		||||
        val = getattr(self, option)
 | 
			
		||||
        if val is None:
 | 
			
		||||
            setattr(self, option, default)
 | 
			
		||||
            return default
 | 
			
		||||
        elif not isinstance(val, str):
 | 
			
		||||
            raise DistutilsOptionError(
 | 
			
		||||
                "'%s' must be a %s (got `%s`)" % (option, what, val)
 | 
			
		||||
            )
 | 
			
		||||
        return val
 | 
			
		||||
 | 
			
		||||
    def ensure_string_list(self, option):
 | 
			
		||||
        r"""Ensure that 'option' is a list of strings.  If 'option' is
 | 
			
		||||
        currently a string, we split it either on /,\s*/ or /\s+/, so
 | 
			
		||||
        "foo bar baz", "foo,bar,baz", and "foo,   bar baz" all become
 | 
			
		||||
        ["foo", "bar", "baz"].
 | 
			
		||||
 | 
			
		||||
        ..
 | 
			
		||||
           TODO: This method seems to be similar to the one in ``distutils.cmd``
 | 
			
		||||
           Probably it is just here for backward compatibility with old Python versions?
 | 
			
		||||
 | 
			
		||||
        :meta private:
 | 
			
		||||
        """
 | 
			
		||||
        val = getattr(self, option)
 | 
			
		||||
        if val is None:
 | 
			
		||||
            return
 | 
			
		||||
        elif isinstance(val, str):
 | 
			
		||||
            setattr(self, option, re.split(r',\s*|\s+', val))
 | 
			
		||||
        else:
 | 
			
		||||
            if isinstance(val, list):
 | 
			
		||||
                ok = all(isinstance(v, str) for v in val)
 | 
			
		||||
            else:
 | 
			
		||||
                ok = False
 | 
			
		||||
            if not ok:
 | 
			
		||||
                raise DistutilsOptionError(
 | 
			
		||||
                    "'%s' must be a list of strings (got %r)" % (option, val)
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
    def reinitialize_command(self, command, reinit_subcommands=0, **kw):
 | 
			
		||||
        cmd = _Command.reinitialize_command(self, command, reinit_subcommands)
 | 
			
		||||
        vars(cmd).update(kw)
 | 
			
		||||
        return cmd
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _find_all_simple(path):
 | 
			
		||||
    """
 | 
			
		||||
    Find all files under 'path'
 | 
			
		||||
    """
 | 
			
		||||
    results = (
 | 
			
		||||
        os.path.join(base, file)
 | 
			
		||||
        for base, dirs, files in os.walk(path, followlinks=True)
 | 
			
		||||
        for file in files
 | 
			
		||||
    )
 | 
			
		||||
    return filter(os.path.isfile, results)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def findall(dir=os.curdir):
 | 
			
		||||
    """
 | 
			
		||||
    Find all files under 'dir' and return the list of full filenames.
 | 
			
		||||
    Unless dir is '.', return full filenames with dir prepended.
 | 
			
		||||
    """
 | 
			
		||||
    files = _find_all_simple(dir)
 | 
			
		||||
    if dir == os.curdir:
 | 
			
		||||
        make_rel = functools.partial(os.path.relpath, start=dir)
 | 
			
		||||
        files = map(make_rel, files)
 | 
			
		||||
    return list(files)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@functools.wraps(_convert_path)
 | 
			
		||||
def convert_path(pathname):
 | 
			
		||||
    from inspect import cleandoc
 | 
			
		||||
 | 
			
		||||
    msg = """
 | 
			
		||||
    The function `convert_path` is considered internal and not part of the public API.
 | 
			
		||||
    Its direct usage by 3rd-party packages is considered deprecated and the function
 | 
			
		||||
    may be removed in the future.
 | 
			
		||||
    """
 | 
			
		||||
    warnings.warn(cleandoc(msg), SetuptoolsDeprecationWarning)
 | 
			
		||||
    return _convert_path(pathname)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class sic(str):
 | 
			
		||||
    """Treat this string as-is (https://en.wikipedia.org/wiki/Sic)"""
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Apply monkey patches
 | 
			
		||||
monkey.patch_all()
 | 
			
		||||
@@ -0,0 +1,7 @@
 | 
			
		||||
class SetuptoolsDeprecationWarning(Warning):
 | 
			
		||||
    """
 | 
			
		||||
    Base class for warning deprecations in ``setuptools``
 | 
			
		||||
 | 
			
		||||
    This class is not derived from ``DeprecationWarning``, and as such is
 | 
			
		||||
    visible by default.
 | 
			
		||||
    """
 | 
			
		||||
@@ -0,0 +1,14 @@
 | 
			
		||||
import sys
 | 
			
		||||
import importlib
 | 
			
		||||
 | 
			
		||||
__version__, _, _ = sys.version.partition(' ')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
try:
 | 
			
		||||
    # Allow Debian and pkgsrc (only) to customize system
 | 
			
		||||
    # behavior. Ref pypa/distutils#2 and pypa/distutils#16.
 | 
			
		||||
    # This hook is deprecated and no other environments
 | 
			
		||||
    # should use it.
 | 
			
		||||
    importlib.import_module('_distutils_system_mod')
 | 
			
		||||
except ImportError:
 | 
			
		||||
    pass
 | 
			
		||||
@@ -0,0 +1,194 @@
 | 
			
		||||
import collections
 | 
			
		||||
import functools
 | 
			
		||||
import itertools
 | 
			
		||||
import operator
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# from jaraco.collections 3.5.1
 | 
			
		||||
class DictStack(list, collections.abc.Mapping):
 | 
			
		||||
    """
 | 
			
		||||
    A stack of dictionaries that behaves as a view on those dictionaries,
 | 
			
		||||
    giving preference to the last.
 | 
			
		||||
 | 
			
		||||
    >>> stack = DictStack([dict(a=1, c=2), dict(b=2, a=2)])
 | 
			
		||||
    >>> stack['a']
 | 
			
		||||
    2
 | 
			
		||||
    >>> stack['b']
 | 
			
		||||
    2
 | 
			
		||||
    >>> stack['c']
 | 
			
		||||
    2
 | 
			
		||||
    >>> len(stack)
 | 
			
		||||
    3
 | 
			
		||||
    >>> stack.push(dict(a=3))
 | 
			
		||||
    >>> stack['a']
 | 
			
		||||
    3
 | 
			
		||||
    >>> set(stack.keys()) == set(['a', 'b', 'c'])
 | 
			
		||||
    True
 | 
			
		||||
    >>> set(stack.items()) == set([('a', 3), ('b', 2), ('c', 2)])
 | 
			
		||||
    True
 | 
			
		||||
    >>> dict(**stack) == dict(stack) == dict(a=3, c=2, b=2)
 | 
			
		||||
    True
 | 
			
		||||
    >>> d = stack.pop()
 | 
			
		||||
    >>> stack['a']
 | 
			
		||||
    2
 | 
			
		||||
    >>> d = stack.pop()
 | 
			
		||||
    >>> stack['a']
 | 
			
		||||
    1
 | 
			
		||||
    >>> stack.get('b', None)
 | 
			
		||||
    >>> 'c' in stack
 | 
			
		||||
    True
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    def __iter__(self):
 | 
			
		||||
        dicts = list.__iter__(self)
 | 
			
		||||
        return iter(set(itertools.chain.from_iterable(c.keys() for c in dicts)))
 | 
			
		||||
 | 
			
		||||
    def __getitem__(self, key):
 | 
			
		||||
        for scope in reversed(tuple(list.__iter__(self))):
 | 
			
		||||
            if key in scope:
 | 
			
		||||
                return scope[key]
 | 
			
		||||
        raise KeyError(key)
 | 
			
		||||
 | 
			
		||||
    push = list.append
 | 
			
		||||
 | 
			
		||||
    def __contains__(self, other):
 | 
			
		||||
        return collections.abc.Mapping.__contains__(self, other)
 | 
			
		||||
 | 
			
		||||
    def __len__(self):
 | 
			
		||||
        return len(list(iter(self)))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# from jaraco.collections 3.7
 | 
			
		||||
class RangeMap(dict):
 | 
			
		||||
    """
 | 
			
		||||
    A dictionary-like object that uses the keys as bounds for a range.
 | 
			
		||||
    Inclusion of the value for that range is determined by the
 | 
			
		||||
    key_match_comparator, which defaults to less-than-or-equal.
 | 
			
		||||
    A value is returned for a key if it is the first key that matches in
 | 
			
		||||
    the sorted list of keys.
 | 
			
		||||
 | 
			
		||||
    One may supply keyword parameters to be passed to the sort function used
 | 
			
		||||
    to sort keys (i.e. key, reverse) as sort_params.
 | 
			
		||||
 | 
			
		||||
    Let's create a map that maps 1-3 -> 'a', 4-6 -> 'b'
 | 
			
		||||
 | 
			
		||||
    >>> r = RangeMap({3: 'a', 6: 'b'})  # boy, that was easy
 | 
			
		||||
    >>> r[1], r[2], r[3], r[4], r[5], r[6]
 | 
			
		||||
    ('a', 'a', 'a', 'b', 'b', 'b')
 | 
			
		||||
 | 
			
		||||
    Even float values should work so long as the comparison operator
 | 
			
		||||
    supports it.
 | 
			
		||||
 | 
			
		||||
    >>> r[4.5]
 | 
			
		||||
    'b'
 | 
			
		||||
 | 
			
		||||
    But you'll notice that the way rangemap is defined, it must be open-ended
 | 
			
		||||
    on one side.
 | 
			
		||||
 | 
			
		||||
    >>> r[0]
 | 
			
		||||
    'a'
 | 
			
		||||
    >>> r[-1]
 | 
			
		||||
    'a'
 | 
			
		||||
 | 
			
		||||
    One can close the open-end of the RangeMap by using undefined_value
 | 
			
		||||
 | 
			
		||||
    >>> r = RangeMap({0: RangeMap.undefined_value, 3: 'a', 6: 'b'})
 | 
			
		||||
    >>> r[0]
 | 
			
		||||
    Traceback (most recent call last):
 | 
			
		||||
    ...
 | 
			
		||||
    KeyError: 0
 | 
			
		||||
 | 
			
		||||
    One can get the first or last elements in the range by using RangeMap.Item
 | 
			
		||||
 | 
			
		||||
    >>> last_item = RangeMap.Item(-1)
 | 
			
		||||
    >>> r[last_item]
 | 
			
		||||
    'b'
 | 
			
		||||
 | 
			
		||||
    .last_item is a shortcut for Item(-1)
 | 
			
		||||
 | 
			
		||||
    >>> r[RangeMap.last_item]
 | 
			
		||||
    'b'
 | 
			
		||||
 | 
			
		||||
    Sometimes it's useful to find the bounds for a RangeMap
 | 
			
		||||
 | 
			
		||||
    >>> r.bounds()
 | 
			
		||||
    (0, 6)
 | 
			
		||||
 | 
			
		||||
    RangeMap supports .get(key, default)
 | 
			
		||||
 | 
			
		||||
    >>> r.get(0, 'not found')
 | 
			
		||||
    'not found'
 | 
			
		||||
 | 
			
		||||
    >>> r.get(7, 'not found')
 | 
			
		||||
    'not found'
 | 
			
		||||
 | 
			
		||||
    One often wishes to define the ranges by their left-most values,
 | 
			
		||||
    which requires use of sort params and a key_match_comparator.
 | 
			
		||||
 | 
			
		||||
    >>> r = RangeMap({1: 'a', 4: 'b'},
 | 
			
		||||
    ...     sort_params=dict(reverse=True),
 | 
			
		||||
    ...     key_match_comparator=operator.ge)
 | 
			
		||||
    >>> r[1], r[2], r[3], r[4], r[5], r[6]
 | 
			
		||||
    ('a', 'a', 'a', 'b', 'b', 'b')
 | 
			
		||||
 | 
			
		||||
    That wasn't nearly as easy as before, so an alternate constructor
 | 
			
		||||
    is provided:
 | 
			
		||||
 | 
			
		||||
    >>> r = RangeMap.left({1: 'a', 4: 'b', 7: RangeMap.undefined_value})
 | 
			
		||||
    >>> r[1], r[2], r[3], r[4], r[5], r[6]
 | 
			
		||||
    ('a', 'a', 'a', 'b', 'b', 'b')
 | 
			
		||||
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    def __init__(self, source, sort_params={}, key_match_comparator=operator.le):
 | 
			
		||||
        dict.__init__(self, source)
 | 
			
		||||
        self.sort_params = sort_params
 | 
			
		||||
        self.match = key_match_comparator
 | 
			
		||||
 | 
			
		||||
    @classmethod
 | 
			
		||||
    def left(cls, source):
 | 
			
		||||
        return cls(
 | 
			
		||||
            source, sort_params=dict(reverse=True), key_match_comparator=operator.ge
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def __getitem__(self, item):
 | 
			
		||||
        sorted_keys = sorted(self.keys(), **self.sort_params)
 | 
			
		||||
        if isinstance(item, RangeMap.Item):
 | 
			
		||||
            result = self.__getitem__(sorted_keys[item])
 | 
			
		||||
        else:
 | 
			
		||||
            key = self._find_first_match_(sorted_keys, item)
 | 
			
		||||
            result = dict.__getitem__(self, key)
 | 
			
		||||
            if result is RangeMap.undefined_value:
 | 
			
		||||
                raise KeyError(key)
 | 
			
		||||
        return result
 | 
			
		||||
 | 
			
		||||
    def get(self, key, default=None):
 | 
			
		||||
        """
 | 
			
		||||
        Return the value for key if key is in the dictionary, else default.
 | 
			
		||||
        If default is not given, it defaults to None, so that this method
 | 
			
		||||
        never raises a KeyError.
 | 
			
		||||
        """
 | 
			
		||||
        try:
 | 
			
		||||
            return self[key]
 | 
			
		||||
        except KeyError:
 | 
			
		||||
            return default
 | 
			
		||||
 | 
			
		||||
    def _find_first_match_(self, keys, item):
 | 
			
		||||
        is_match = functools.partial(self.match, item)
 | 
			
		||||
        matches = list(filter(is_match, keys))
 | 
			
		||||
        if matches:
 | 
			
		||||
            return matches[0]
 | 
			
		||||
        raise KeyError(item)
 | 
			
		||||
 | 
			
		||||
    def bounds(self):
 | 
			
		||||
        sorted_keys = sorted(self.keys(), **self.sort_params)
 | 
			
		||||
        return (sorted_keys[RangeMap.first_item], sorted_keys[RangeMap.last_item])
 | 
			
		||||
 | 
			
		||||
    # some special values for the RangeMap
 | 
			
		||||
    undefined_value = type(str('RangeValueUndefined'), (), {})()
 | 
			
		||||
 | 
			
		||||
    class Item(int):
 | 
			
		||||
        "RangeMap Item"
 | 
			
		||||
 | 
			
		||||
    first_item = Item(0)
 | 
			
		||||
    last_item = Item(-1)
 | 
			
		||||
@@ -0,0 +1,20 @@
 | 
			
		||||
import functools
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# from jaraco.functools 3.5
 | 
			
		||||
def pass_none(func):
 | 
			
		||||
    """
 | 
			
		||||
    Wrap func so it's not called if its first param is None
 | 
			
		||||
 | 
			
		||||
    >>> print_text = pass_none(print)
 | 
			
		||||
    >>> print_text('text')
 | 
			
		||||
    text
 | 
			
		||||
    >>> print_text(None)
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    @functools.wraps(func)
 | 
			
		||||
    def wrapper(param, *args, **kwargs):
 | 
			
		||||
        if param is not None:
 | 
			
		||||
            return func(param, *args, **kwargs)
 | 
			
		||||
 | 
			
		||||
    return wrapper
 | 
			
		||||
@@ -0,0 +1,4 @@
 | 
			
		||||
import logging
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
log = logging.getLogger()
 | 
			
		||||
@@ -0,0 +1,12 @@
 | 
			
		||||
import sys
 | 
			
		||||
import importlib
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def bypass_compiler_fixup(cmd, args):
 | 
			
		||||
    return cmd
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
if sys.platform == 'darwin':
 | 
			
		||||
    compiler_fixup = importlib.import_module('_osx_support').compiler_fixup
 | 
			
		||||
else:
 | 
			
		||||
    compiler_fixup = bypass_compiler_fixup
 | 
			
		||||
@@ -0,0 +1,572 @@
 | 
			
		||||
"""distutils._msvccompiler
 | 
			
		||||
 | 
			
		||||
Contains MSVCCompiler, an implementation of the abstract CCompiler class
 | 
			
		||||
for Microsoft Visual Studio 2015.
 | 
			
		||||
 | 
			
		||||
The module is compatible with VS 2015 and later. You can find legacy support
 | 
			
		||||
for older versions in distutils.msvc9compiler and distutils.msvccompiler.
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
# Written by Perry Stoll
 | 
			
		||||
# hacked by Robin Becker and Thomas Heller to do a better job of
 | 
			
		||||
#   finding DevStudio (through the registry)
 | 
			
		||||
# ported to VS 2005 and VS 2008 by Christian Heimes
 | 
			
		||||
# ported to VS 2015 by Steve Dower
 | 
			
		||||
 | 
			
		||||
import os
 | 
			
		||||
import subprocess
 | 
			
		||||
import contextlib
 | 
			
		||||
import warnings
 | 
			
		||||
import unittest.mock as mock
 | 
			
		||||
 | 
			
		||||
with contextlib.suppress(ImportError):
 | 
			
		||||
    import winreg
 | 
			
		||||
 | 
			
		||||
from .errors import (
 | 
			
		||||
    DistutilsExecError,
 | 
			
		||||
    DistutilsPlatformError,
 | 
			
		||||
    CompileError,
 | 
			
		||||
    LibError,
 | 
			
		||||
    LinkError,
 | 
			
		||||
)
 | 
			
		||||
from .ccompiler import CCompiler, gen_lib_options
 | 
			
		||||
from ._log import log
 | 
			
		||||
from .util import get_platform
 | 
			
		||||
 | 
			
		||||
from itertools import count
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _find_vc2015():
 | 
			
		||||
    try:
 | 
			
		||||
        key = winreg.OpenKeyEx(
 | 
			
		||||
            winreg.HKEY_LOCAL_MACHINE,
 | 
			
		||||
            r"Software\Microsoft\VisualStudio\SxS\VC7",
 | 
			
		||||
            access=winreg.KEY_READ | winreg.KEY_WOW64_32KEY,
 | 
			
		||||
        )
 | 
			
		||||
    except OSError:
 | 
			
		||||
        log.debug("Visual C++ is not registered")
 | 
			
		||||
        return None, None
 | 
			
		||||
 | 
			
		||||
    best_version = 0
 | 
			
		||||
    best_dir = None
 | 
			
		||||
    with key:
 | 
			
		||||
        for i in count():
 | 
			
		||||
            try:
 | 
			
		||||
                v, vc_dir, vt = winreg.EnumValue(key, i)
 | 
			
		||||
            except OSError:
 | 
			
		||||
                break
 | 
			
		||||
            if v and vt == winreg.REG_SZ and os.path.isdir(vc_dir):
 | 
			
		||||
                try:
 | 
			
		||||
                    version = int(float(v))
 | 
			
		||||
                except (ValueError, TypeError):
 | 
			
		||||
                    continue
 | 
			
		||||
                if version >= 14 and version > best_version:
 | 
			
		||||
                    best_version, best_dir = version, vc_dir
 | 
			
		||||
    return best_version, best_dir
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _find_vc2017():
 | 
			
		||||
    """Returns "15, path" based on the result of invoking vswhere.exe
 | 
			
		||||
    If no install is found, returns "None, None"
 | 
			
		||||
 | 
			
		||||
    The version is returned to avoid unnecessarily changing the function
 | 
			
		||||
    result. It may be ignored when the path is not None.
 | 
			
		||||
 | 
			
		||||
    If vswhere.exe is not available, by definition, VS 2017 is not
 | 
			
		||||
    installed.
 | 
			
		||||
    """
 | 
			
		||||
    root = os.environ.get("ProgramFiles(x86)") or os.environ.get("ProgramFiles")
 | 
			
		||||
    if not root:
 | 
			
		||||
        return None, None
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        path = subprocess.check_output(
 | 
			
		||||
            [
 | 
			
		||||
                os.path.join(
 | 
			
		||||
                    root, "Microsoft Visual Studio", "Installer", "vswhere.exe"
 | 
			
		||||
                ),
 | 
			
		||||
                "-latest",
 | 
			
		||||
                "-prerelease",
 | 
			
		||||
                "-requires",
 | 
			
		||||
                "Microsoft.VisualStudio.Component.VC.Tools.x86.x64",
 | 
			
		||||
                "-property",
 | 
			
		||||
                "installationPath",
 | 
			
		||||
                "-products",
 | 
			
		||||
                "*",
 | 
			
		||||
            ],
 | 
			
		||||
            encoding="mbcs",
 | 
			
		||||
            errors="strict",
 | 
			
		||||
        ).strip()
 | 
			
		||||
    except (subprocess.CalledProcessError, OSError, UnicodeDecodeError):
 | 
			
		||||
        return None, None
 | 
			
		||||
 | 
			
		||||
    path = os.path.join(path, "VC", "Auxiliary", "Build")
 | 
			
		||||
    if os.path.isdir(path):
 | 
			
		||||
        return 15, path
 | 
			
		||||
 | 
			
		||||
    return None, None
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
PLAT_SPEC_TO_RUNTIME = {
 | 
			
		||||
    'x86': 'x86',
 | 
			
		||||
    'x86_amd64': 'x64',
 | 
			
		||||
    'x86_arm': 'arm',
 | 
			
		||||
    'x86_arm64': 'arm64',
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _find_vcvarsall(plat_spec):
 | 
			
		||||
    # bpo-38597: Removed vcruntime return value
 | 
			
		||||
    _, best_dir = _find_vc2017()
 | 
			
		||||
 | 
			
		||||
    if not best_dir:
 | 
			
		||||
        best_version, best_dir = _find_vc2015()
 | 
			
		||||
 | 
			
		||||
    if not best_dir:
 | 
			
		||||
        log.debug("No suitable Visual C++ version found")
 | 
			
		||||
        return None, None
 | 
			
		||||
 | 
			
		||||
    vcvarsall = os.path.join(best_dir, "vcvarsall.bat")
 | 
			
		||||
    if not os.path.isfile(vcvarsall):
 | 
			
		||||
        log.debug("%s cannot be found", vcvarsall)
 | 
			
		||||
        return None, None
 | 
			
		||||
 | 
			
		||||
    return vcvarsall, None
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _get_vc_env(plat_spec):
 | 
			
		||||
    if os.getenv("DISTUTILS_USE_SDK"):
 | 
			
		||||
        return {key.lower(): value for key, value in os.environ.items()}
 | 
			
		||||
 | 
			
		||||
    vcvarsall, _ = _find_vcvarsall(plat_spec)
 | 
			
		||||
    if not vcvarsall:
 | 
			
		||||
        raise DistutilsPlatformError("Unable to find vcvarsall.bat")
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        out = subprocess.check_output(
 | 
			
		||||
            f'cmd /u /c "{vcvarsall}" {plat_spec} && set',
 | 
			
		||||
            stderr=subprocess.STDOUT,
 | 
			
		||||
        ).decode('utf-16le', errors='replace')
 | 
			
		||||
    except subprocess.CalledProcessError as exc:
 | 
			
		||||
        log.error(exc.output)
 | 
			
		||||
        raise DistutilsPlatformError(f"Error executing {exc.cmd}")
 | 
			
		||||
 | 
			
		||||
    env = {
 | 
			
		||||
        key.lower(): value
 | 
			
		||||
        for key, _, value in (line.partition('=') for line in out.splitlines())
 | 
			
		||||
        if key and value
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    return env
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _find_exe(exe, paths=None):
 | 
			
		||||
    """Return path to an MSVC executable program.
 | 
			
		||||
 | 
			
		||||
    Tries to find the program in several places: first, one of the
 | 
			
		||||
    MSVC program search paths from the registry; next, the directories
 | 
			
		||||
    in the PATH environment variable.  If any of those work, return an
 | 
			
		||||
    absolute path that is known to exist.  If none of them work, just
 | 
			
		||||
    return the original program name, 'exe'.
 | 
			
		||||
    """
 | 
			
		||||
    if not paths:
 | 
			
		||||
        paths = os.getenv('path').split(os.pathsep)
 | 
			
		||||
    for p in paths:
 | 
			
		||||
        fn = os.path.join(os.path.abspath(p), exe)
 | 
			
		||||
        if os.path.isfile(fn):
 | 
			
		||||
            return fn
 | 
			
		||||
    return exe
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# A map keyed by get_platform() return values to values accepted by
 | 
			
		||||
# 'vcvarsall.bat'. Always cross-compile from x86 to work with the
 | 
			
		||||
# lighter-weight MSVC installs that do not include native 64-bit tools.
 | 
			
		||||
PLAT_TO_VCVARS = {
 | 
			
		||||
    'win32': 'x86',
 | 
			
		||||
    'win-amd64': 'x86_amd64',
 | 
			
		||||
    'win-arm32': 'x86_arm',
 | 
			
		||||
    'win-arm64': 'x86_arm64',
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class MSVCCompiler(CCompiler):
 | 
			
		||||
    """Concrete class that implements an interface to Microsoft Visual C++,
 | 
			
		||||
    as defined by the CCompiler abstract class."""
 | 
			
		||||
 | 
			
		||||
    compiler_type = 'msvc'
 | 
			
		||||
 | 
			
		||||
    # Just set this so CCompiler's constructor doesn't barf.  We currently
 | 
			
		||||
    # don't use the 'set_executables()' bureaucracy provided by CCompiler,
 | 
			
		||||
    # as it really isn't necessary for this sort of single-compiler class.
 | 
			
		||||
    # Would be nice to have a consistent interface with UnixCCompiler,
 | 
			
		||||
    # though, so it's worth thinking about.
 | 
			
		||||
    executables = {}
 | 
			
		||||
 | 
			
		||||
    # Private class data (need to distinguish C from C++ source for compiler)
 | 
			
		||||
    _c_extensions = ['.c']
 | 
			
		||||
    _cpp_extensions = ['.cc', '.cpp', '.cxx']
 | 
			
		||||
    _rc_extensions = ['.rc']
 | 
			
		||||
    _mc_extensions = ['.mc']
 | 
			
		||||
 | 
			
		||||
    # Needed for the filename generation methods provided by the
 | 
			
		||||
    # base class, CCompiler.
 | 
			
		||||
    src_extensions = _c_extensions + _cpp_extensions + _rc_extensions + _mc_extensions
 | 
			
		||||
    res_extension = '.res'
 | 
			
		||||
    obj_extension = '.obj'
 | 
			
		||||
    static_lib_extension = '.lib'
 | 
			
		||||
    shared_lib_extension = '.dll'
 | 
			
		||||
    static_lib_format = shared_lib_format = '%s%s'
 | 
			
		||||
    exe_extension = '.exe'
 | 
			
		||||
 | 
			
		||||
    def __init__(self, verbose=0, dry_run=0, force=0):
 | 
			
		||||
        super().__init__(verbose, dry_run, force)
 | 
			
		||||
        # target platform (.plat_name is consistent with 'bdist')
 | 
			
		||||
        self.plat_name = None
 | 
			
		||||
        self.initialized = False
 | 
			
		||||
 | 
			
		||||
    @classmethod
 | 
			
		||||
    def _configure(cls, vc_env):
 | 
			
		||||
        """
 | 
			
		||||
        Set class-level include/lib dirs.
 | 
			
		||||
        """
 | 
			
		||||
        cls.include_dirs = cls._parse_path(vc_env.get('include', ''))
 | 
			
		||||
        cls.library_dirs = cls._parse_path(vc_env.get('lib', ''))
 | 
			
		||||
 | 
			
		||||
    @staticmethod
 | 
			
		||||
    def _parse_path(val):
 | 
			
		||||
        return [dir.rstrip(os.sep) for dir in val.split(os.pathsep) if dir]
 | 
			
		||||
 | 
			
		||||
    def initialize(self, plat_name=None):
 | 
			
		||||
        # multi-init means we would need to check platform same each time...
 | 
			
		||||
        assert not self.initialized, "don't init multiple times"
 | 
			
		||||
        if plat_name is None:
 | 
			
		||||
            plat_name = get_platform()
 | 
			
		||||
        # sanity check for platforms to prevent obscure errors later.
 | 
			
		||||
        if plat_name not in PLAT_TO_VCVARS:
 | 
			
		||||
            raise DistutilsPlatformError(
 | 
			
		||||
                f"--plat-name must be one of {tuple(PLAT_TO_VCVARS)}"
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        # Get the vcvarsall.bat spec for the requested platform.
 | 
			
		||||
        plat_spec = PLAT_TO_VCVARS[plat_name]
 | 
			
		||||
 | 
			
		||||
        vc_env = _get_vc_env(plat_spec)
 | 
			
		||||
        if not vc_env:
 | 
			
		||||
            raise DistutilsPlatformError(
 | 
			
		||||
                "Unable to find a compatible " "Visual Studio installation."
 | 
			
		||||
            )
 | 
			
		||||
        self._configure(vc_env)
 | 
			
		||||
 | 
			
		||||
        self._paths = vc_env.get('path', '')
 | 
			
		||||
        paths = self._paths.split(os.pathsep)
 | 
			
		||||
        self.cc = _find_exe("cl.exe", paths)
 | 
			
		||||
        self.linker = _find_exe("link.exe", paths)
 | 
			
		||||
        self.lib = _find_exe("lib.exe", paths)
 | 
			
		||||
        self.rc = _find_exe("rc.exe", paths)  # resource compiler
 | 
			
		||||
        self.mc = _find_exe("mc.exe", paths)  # message compiler
 | 
			
		||||
        self.mt = _find_exe("mt.exe", paths)  # message compiler
 | 
			
		||||
 | 
			
		||||
        self.preprocess_options = None
 | 
			
		||||
        # bpo-38597: Always compile with dynamic linking
 | 
			
		||||
        # Future releases of Python 3.x will include all past
 | 
			
		||||
        # versions of vcruntime*.dll for compatibility.
 | 
			
		||||
        self.compile_options = ['/nologo', '/O2', '/W3', '/GL', '/DNDEBUG', '/MD']
 | 
			
		||||
 | 
			
		||||
        self.compile_options_debug = [
 | 
			
		||||
            '/nologo',
 | 
			
		||||
            '/Od',
 | 
			
		||||
            '/MDd',
 | 
			
		||||
            '/Zi',
 | 
			
		||||
            '/W3',
 | 
			
		||||
            '/D_DEBUG',
 | 
			
		||||
        ]
 | 
			
		||||
 | 
			
		||||
        ldflags = ['/nologo', '/INCREMENTAL:NO', '/LTCG']
 | 
			
		||||
 | 
			
		||||
        ldflags_debug = ['/nologo', '/INCREMENTAL:NO', '/LTCG', '/DEBUG:FULL']
 | 
			
		||||
 | 
			
		||||
        self.ldflags_exe = [*ldflags, '/MANIFEST:EMBED,ID=1']
 | 
			
		||||
        self.ldflags_exe_debug = [*ldflags_debug, '/MANIFEST:EMBED,ID=1']
 | 
			
		||||
        self.ldflags_shared = [
 | 
			
		||||
            *ldflags,
 | 
			
		||||
            '/DLL',
 | 
			
		||||
            '/MANIFEST:EMBED,ID=2',
 | 
			
		||||
            '/MANIFESTUAC:NO',
 | 
			
		||||
        ]
 | 
			
		||||
        self.ldflags_shared_debug = [
 | 
			
		||||
            *ldflags_debug,
 | 
			
		||||
            '/DLL',
 | 
			
		||||
            '/MANIFEST:EMBED,ID=2',
 | 
			
		||||
            '/MANIFESTUAC:NO',
 | 
			
		||||
        ]
 | 
			
		||||
        self.ldflags_static = [*ldflags]
 | 
			
		||||
        self.ldflags_static_debug = [*ldflags_debug]
 | 
			
		||||
 | 
			
		||||
        self._ldflags = {
 | 
			
		||||
            (CCompiler.EXECUTABLE, None): self.ldflags_exe,
 | 
			
		||||
            (CCompiler.EXECUTABLE, False): self.ldflags_exe,
 | 
			
		||||
            (CCompiler.EXECUTABLE, True): self.ldflags_exe_debug,
 | 
			
		||||
            (CCompiler.SHARED_OBJECT, None): self.ldflags_shared,
 | 
			
		||||
            (CCompiler.SHARED_OBJECT, False): self.ldflags_shared,
 | 
			
		||||
            (CCompiler.SHARED_OBJECT, True): self.ldflags_shared_debug,
 | 
			
		||||
            (CCompiler.SHARED_LIBRARY, None): self.ldflags_static,
 | 
			
		||||
            (CCompiler.SHARED_LIBRARY, False): self.ldflags_static,
 | 
			
		||||
            (CCompiler.SHARED_LIBRARY, True): self.ldflags_static_debug,
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        self.initialized = True
 | 
			
		||||
 | 
			
		||||
    # -- Worker methods ------------------------------------------------
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def out_extensions(self):
 | 
			
		||||
        return {
 | 
			
		||||
            **super().out_extensions,
 | 
			
		||||
            **{
 | 
			
		||||
                ext: self.res_extension
 | 
			
		||||
                for ext in self._rc_extensions + self._mc_extensions
 | 
			
		||||
            },
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
    def compile(  # noqa: C901
 | 
			
		||||
        self,
 | 
			
		||||
        sources,
 | 
			
		||||
        output_dir=None,
 | 
			
		||||
        macros=None,
 | 
			
		||||
        include_dirs=None,
 | 
			
		||||
        debug=0,
 | 
			
		||||
        extra_preargs=None,
 | 
			
		||||
        extra_postargs=None,
 | 
			
		||||
        depends=None,
 | 
			
		||||
    ):
 | 
			
		||||
 | 
			
		||||
        if not self.initialized:
 | 
			
		||||
            self.initialize()
 | 
			
		||||
        compile_info = self._setup_compile(
 | 
			
		||||
            output_dir, macros, include_dirs, sources, depends, extra_postargs
 | 
			
		||||
        )
 | 
			
		||||
        macros, objects, extra_postargs, pp_opts, build = compile_info
 | 
			
		||||
 | 
			
		||||
        compile_opts = extra_preargs or []
 | 
			
		||||
        compile_opts.append('/c')
 | 
			
		||||
        if debug:
 | 
			
		||||
            compile_opts.extend(self.compile_options_debug)
 | 
			
		||||
        else:
 | 
			
		||||
            compile_opts.extend(self.compile_options)
 | 
			
		||||
 | 
			
		||||
        add_cpp_opts = False
 | 
			
		||||
 | 
			
		||||
        for obj in objects:
 | 
			
		||||
            try:
 | 
			
		||||
                src, ext = build[obj]
 | 
			
		||||
            except KeyError:
 | 
			
		||||
                continue
 | 
			
		||||
            if debug:
 | 
			
		||||
                # pass the full pathname to MSVC in debug mode,
 | 
			
		||||
                # this allows the debugger to find the source file
 | 
			
		||||
                # without asking the user to browse for it
 | 
			
		||||
                src = os.path.abspath(src)
 | 
			
		||||
 | 
			
		||||
            if ext in self._c_extensions:
 | 
			
		||||
                input_opt = "/Tc" + src
 | 
			
		||||
            elif ext in self._cpp_extensions:
 | 
			
		||||
                input_opt = "/Tp" + src
 | 
			
		||||
                add_cpp_opts = True
 | 
			
		||||
            elif ext in self._rc_extensions:
 | 
			
		||||
                # compile .RC to .RES file
 | 
			
		||||
                input_opt = src
 | 
			
		||||
                output_opt = "/fo" + obj
 | 
			
		||||
                try:
 | 
			
		||||
                    self.spawn([self.rc] + pp_opts + [output_opt, input_opt])
 | 
			
		||||
                except DistutilsExecError as msg:
 | 
			
		||||
                    raise CompileError(msg)
 | 
			
		||||
                continue
 | 
			
		||||
            elif ext in self._mc_extensions:
 | 
			
		||||
                # Compile .MC to .RC file to .RES file.
 | 
			
		||||
                #   * '-h dir' specifies the directory for the
 | 
			
		||||
                #     generated include file
 | 
			
		||||
                #   * '-r dir' specifies the target directory of the
 | 
			
		||||
                #     generated RC file and the binary message resource
 | 
			
		||||
                #     it includes
 | 
			
		||||
                #
 | 
			
		||||
                # For now (since there are no options to change this),
 | 
			
		||||
                # we use the source-directory for the include file and
 | 
			
		||||
                # the build directory for the RC file and message
 | 
			
		||||
                # resources. This works at least for win32all.
 | 
			
		||||
                h_dir = os.path.dirname(src)
 | 
			
		||||
                rc_dir = os.path.dirname(obj)
 | 
			
		||||
                try:
 | 
			
		||||
                    # first compile .MC to .RC and .H file
 | 
			
		||||
                    self.spawn([self.mc, '-h', h_dir, '-r', rc_dir, src])
 | 
			
		||||
                    base, _ = os.path.splitext(os.path.basename(src))
 | 
			
		||||
                    rc_file = os.path.join(rc_dir, base + '.rc')
 | 
			
		||||
                    # then compile .RC to .RES file
 | 
			
		||||
                    self.spawn([self.rc, "/fo" + obj, rc_file])
 | 
			
		||||
 | 
			
		||||
                except DistutilsExecError as msg:
 | 
			
		||||
                    raise CompileError(msg)
 | 
			
		||||
                continue
 | 
			
		||||
            else:
 | 
			
		||||
                # how to handle this file?
 | 
			
		||||
                raise CompileError(f"Don't know how to compile {src} to {obj}")
 | 
			
		||||
 | 
			
		||||
            args = [self.cc] + compile_opts + pp_opts
 | 
			
		||||
            if add_cpp_opts:
 | 
			
		||||
                args.append('/EHsc')
 | 
			
		||||
            args.append(input_opt)
 | 
			
		||||
            args.append("/Fo" + obj)
 | 
			
		||||
            args.extend(extra_postargs)
 | 
			
		||||
 | 
			
		||||
            try:
 | 
			
		||||
                self.spawn(args)
 | 
			
		||||
            except DistutilsExecError as msg:
 | 
			
		||||
                raise CompileError(msg)
 | 
			
		||||
 | 
			
		||||
        return objects
 | 
			
		||||
 | 
			
		||||
    def create_static_lib(
 | 
			
		||||
        self, objects, output_libname, output_dir=None, debug=0, target_lang=None
 | 
			
		||||
    ):
 | 
			
		||||
 | 
			
		||||
        if not self.initialized:
 | 
			
		||||
            self.initialize()
 | 
			
		||||
        objects, output_dir = self._fix_object_args(objects, output_dir)
 | 
			
		||||
        output_filename = self.library_filename(output_libname, output_dir=output_dir)
 | 
			
		||||
 | 
			
		||||
        if self._need_link(objects, output_filename):
 | 
			
		||||
            lib_args = objects + ['/OUT:' + output_filename]
 | 
			
		||||
            if debug:
 | 
			
		||||
                pass  # XXX what goes here?
 | 
			
		||||
            try:
 | 
			
		||||
                log.debug('Executing "%s" %s', self.lib, ' '.join(lib_args))
 | 
			
		||||
                self.spawn([self.lib] + lib_args)
 | 
			
		||||
            except DistutilsExecError as msg:
 | 
			
		||||
                raise LibError(msg)
 | 
			
		||||
        else:
 | 
			
		||||
            log.debug("skipping %s (up-to-date)", output_filename)
 | 
			
		||||
 | 
			
		||||
    def link(
 | 
			
		||||
        self,
 | 
			
		||||
        target_desc,
 | 
			
		||||
        objects,
 | 
			
		||||
        output_filename,
 | 
			
		||||
        output_dir=None,
 | 
			
		||||
        libraries=None,
 | 
			
		||||
        library_dirs=None,
 | 
			
		||||
        runtime_library_dirs=None,
 | 
			
		||||
        export_symbols=None,
 | 
			
		||||
        debug=0,
 | 
			
		||||
        extra_preargs=None,
 | 
			
		||||
        extra_postargs=None,
 | 
			
		||||
        build_temp=None,
 | 
			
		||||
        target_lang=None,
 | 
			
		||||
    ):
 | 
			
		||||
 | 
			
		||||
        if not self.initialized:
 | 
			
		||||
            self.initialize()
 | 
			
		||||
        objects, output_dir = self._fix_object_args(objects, output_dir)
 | 
			
		||||
        fixed_args = self._fix_lib_args(libraries, library_dirs, runtime_library_dirs)
 | 
			
		||||
        libraries, library_dirs, runtime_library_dirs = fixed_args
 | 
			
		||||
 | 
			
		||||
        if runtime_library_dirs:
 | 
			
		||||
            self.warn(
 | 
			
		||||
                "I don't know what to do with 'runtime_library_dirs': "
 | 
			
		||||
                + str(runtime_library_dirs)
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs, libraries)
 | 
			
		||||
        if output_dir is not None:
 | 
			
		||||
            output_filename = os.path.join(output_dir, output_filename)
 | 
			
		||||
 | 
			
		||||
        if self._need_link(objects, output_filename):
 | 
			
		||||
            ldflags = self._ldflags[target_desc, debug]
 | 
			
		||||
 | 
			
		||||
            export_opts = ["/EXPORT:" + sym for sym in (export_symbols or [])]
 | 
			
		||||
 | 
			
		||||
            ld_args = (
 | 
			
		||||
                ldflags + lib_opts + export_opts + objects + ['/OUT:' + output_filename]
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
            # The MSVC linker generates .lib and .exp files, which cannot be
 | 
			
		||||
            # suppressed by any linker switches. The .lib files may even be
 | 
			
		||||
            # needed! Make sure they are generated in the temporary build
 | 
			
		||||
            # directory. Since they have different names for debug and release
 | 
			
		||||
            # builds, they can go into the same directory.
 | 
			
		||||
            build_temp = os.path.dirname(objects[0])
 | 
			
		||||
            if export_symbols is not None:
 | 
			
		||||
                (dll_name, dll_ext) = os.path.splitext(
 | 
			
		||||
                    os.path.basename(output_filename)
 | 
			
		||||
                )
 | 
			
		||||
                implib_file = os.path.join(build_temp, self.library_filename(dll_name))
 | 
			
		||||
                ld_args.append('/IMPLIB:' + implib_file)
 | 
			
		||||
 | 
			
		||||
            if extra_preargs:
 | 
			
		||||
                ld_args[:0] = extra_preargs
 | 
			
		||||
            if extra_postargs:
 | 
			
		||||
                ld_args.extend(extra_postargs)
 | 
			
		||||
 | 
			
		||||
            output_dir = os.path.dirname(os.path.abspath(output_filename))
 | 
			
		||||
            self.mkpath(output_dir)
 | 
			
		||||
            try:
 | 
			
		||||
                log.debug('Executing "%s" %s', self.linker, ' '.join(ld_args))
 | 
			
		||||
                self.spawn([self.linker] + ld_args)
 | 
			
		||||
            except DistutilsExecError as msg:
 | 
			
		||||
                raise LinkError(msg)
 | 
			
		||||
        else:
 | 
			
		||||
            log.debug("skipping %s (up-to-date)", output_filename)
 | 
			
		||||
 | 
			
		||||
    def spawn(self, cmd):
 | 
			
		||||
        env = dict(os.environ, PATH=self._paths)
 | 
			
		||||
        with self._fallback_spawn(cmd, env) as fallback:
 | 
			
		||||
            return super().spawn(cmd, env=env)
 | 
			
		||||
        return fallback.value
 | 
			
		||||
 | 
			
		||||
    @contextlib.contextmanager
 | 
			
		||||
    def _fallback_spawn(self, cmd, env):
 | 
			
		||||
        """
 | 
			
		||||
        Discovered in pypa/distutils#15, some tools monkeypatch the compiler,
 | 
			
		||||
        so the 'env' kwarg causes a TypeError. Detect this condition and
 | 
			
		||||
        restore the legacy, unsafe behavior.
 | 
			
		||||
        """
 | 
			
		||||
        bag = type('Bag', (), {})()
 | 
			
		||||
        try:
 | 
			
		||||
            yield bag
 | 
			
		||||
        except TypeError as exc:
 | 
			
		||||
            if "unexpected keyword argument 'env'" not in str(exc):
 | 
			
		||||
                raise
 | 
			
		||||
        else:
 | 
			
		||||
            return
 | 
			
		||||
        warnings.warn("Fallback spawn triggered. Please update distutils monkeypatch.")
 | 
			
		||||
        with mock.patch.dict('os.environ', env):
 | 
			
		||||
            bag.value = super().spawn(cmd)
 | 
			
		||||
 | 
			
		||||
    # -- Miscellaneous methods -----------------------------------------
 | 
			
		||||
    # These are all used by the 'gen_lib_options() function, in
 | 
			
		||||
    # ccompiler.py.
 | 
			
		||||
 | 
			
		||||
    def library_dir_option(self, dir):
 | 
			
		||||
        return "/LIBPATH:" + dir
 | 
			
		||||
 | 
			
		||||
    def runtime_library_dir_option(self, dir):
 | 
			
		||||
        raise DistutilsPlatformError(
 | 
			
		||||
            "don't know how to set runtime library search path for MSVC"
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def library_option(self, lib):
 | 
			
		||||
        return self.library_filename(lib)
 | 
			
		||||
 | 
			
		||||
    def find_library_file(self, dirs, lib, debug=0):
 | 
			
		||||
        # Prefer a debugging library if found (and requested), but deal
 | 
			
		||||
        # with it if we don't have one.
 | 
			
		||||
        if debug:
 | 
			
		||||
            try_names = [lib + "_d", lib]
 | 
			
		||||
        else:
 | 
			
		||||
            try_names = [lib]
 | 
			
		||||
        for dir in dirs:
 | 
			
		||||
            for name in try_names:
 | 
			
		||||
                libfile = os.path.join(dir, self.library_filename(name))
 | 
			
		||||
                if os.path.isfile(libfile):
 | 
			
		||||
                    return libfile
 | 
			
		||||
        else:
 | 
			
		||||
            # Oops, didn't find it in *any* of 'dirs'
 | 
			
		||||
            return None
 | 
			
		||||
@@ -0,0 +1,280 @@
 | 
			
		||||
"""distutils.archive_util
 | 
			
		||||
 | 
			
		||||
Utility functions for creating archive files (tarballs, zip files,
 | 
			
		||||
that sort of thing)."""
 | 
			
		||||
 | 
			
		||||
import os
 | 
			
		||||
from warnings import warn
 | 
			
		||||
import sys
 | 
			
		||||
 | 
			
		||||
try:
 | 
			
		||||
    import zipfile
 | 
			
		||||
except ImportError:
 | 
			
		||||
    zipfile = None
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
from .errors import DistutilsExecError
 | 
			
		||||
from .spawn import spawn
 | 
			
		||||
from .dir_util import mkpath
 | 
			
		||||
from ._log import log
 | 
			
		||||
 | 
			
		||||
try:
 | 
			
		||||
    from pwd import getpwnam
 | 
			
		||||
except ImportError:
 | 
			
		||||
    getpwnam = None
 | 
			
		||||
 | 
			
		||||
try:
 | 
			
		||||
    from grp import getgrnam
 | 
			
		||||
except ImportError:
 | 
			
		||||
    getgrnam = None
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _get_gid(name):
 | 
			
		||||
    """Returns a gid, given a group name."""
 | 
			
		||||
    if getgrnam is None or name is None:
 | 
			
		||||
        return None
 | 
			
		||||
    try:
 | 
			
		||||
        result = getgrnam(name)
 | 
			
		||||
    except KeyError:
 | 
			
		||||
        result = None
 | 
			
		||||
    if result is not None:
 | 
			
		||||
        return result[2]
 | 
			
		||||
    return None
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _get_uid(name):
 | 
			
		||||
    """Returns an uid, given a user name."""
 | 
			
		||||
    if getpwnam is None or name is None:
 | 
			
		||||
        return None
 | 
			
		||||
    try:
 | 
			
		||||
        result = getpwnam(name)
 | 
			
		||||
    except KeyError:
 | 
			
		||||
        result = None
 | 
			
		||||
    if result is not None:
 | 
			
		||||
        return result[2]
 | 
			
		||||
    return None
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def make_tarball(
 | 
			
		||||
    base_name, base_dir, compress="gzip", verbose=0, dry_run=0, owner=None, group=None
 | 
			
		||||
):
 | 
			
		||||
    """Create a (possibly compressed) tar file from all the files under
 | 
			
		||||
    'base_dir'.
 | 
			
		||||
 | 
			
		||||
    'compress' must be "gzip" (the default), "bzip2", "xz", "compress", or
 | 
			
		||||
    None.  ("compress" will be deprecated in Python 3.2)
 | 
			
		||||
 | 
			
		||||
    'owner' and 'group' can be used to define an owner and a group for the
 | 
			
		||||
    archive that is being built. If not provided, the current owner and group
 | 
			
		||||
    will be used.
 | 
			
		||||
 | 
			
		||||
    The output tar file will be named 'base_dir' +  ".tar", possibly plus
 | 
			
		||||
    the appropriate compression extension (".gz", ".bz2", ".xz" or ".Z").
 | 
			
		||||
 | 
			
		||||
    Returns the output filename.
 | 
			
		||||
    """
 | 
			
		||||
    tar_compression = {
 | 
			
		||||
        'gzip': 'gz',
 | 
			
		||||
        'bzip2': 'bz2',
 | 
			
		||||
        'xz': 'xz',
 | 
			
		||||
        None: '',
 | 
			
		||||
        'compress': '',
 | 
			
		||||
    }
 | 
			
		||||
    compress_ext = {'gzip': '.gz', 'bzip2': '.bz2', 'xz': '.xz', 'compress': '.Z'}
 | 
			
		||||
 | 
			
		||||
    # flags for compression program, each element of list will be an argument
 | 
			
		||||
    if compress is not None and compress not in compress_ext.keys():
 | 
			
		||||
        raise ValueError(
 | 
			
		||||
            "bad value for 'compress': must be None, 'gzip', 'bzip2', "
 | 
			
		||||
            "'xz' or 'compress'"
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    archive_name = base_name + '.tar'
 | 
			
		||||
    if compress != 'compress':
 | 
			
		||||
        archive_name += compress_ext.get(compress, '')
 | 
			
		||||
 | 
			
		||||
    mkpath(os.path.dirname(archive_name), dry_run=dry_run)
 | 
			
		||||
 | 
			
		||||
    # creating the tarball
 | 
			
		||||
    import tarfile  # late import so Python build itself doesn't break
 | 
			
		||||
 | 
			
		||||
    log.info('Creating tar archive')
 | 
			
		||||
 | 
			
		||||
    uid = _get_uid(owner)
 | 
			
		||||
    gid = _get_gid(group)
 | 
			
		||||
 | 
			
		||||
    def _set_uid_gid(tarinfo):
 | 
			
		||||
        if gid is not None:
 | 
			
		||||
            tarinfo.gid = gid
 | 
			
		||||
            tarinfo.gname = group
 | 
			
		||||
        if uid is not None:
 | 
			
		||||
            tarinfo.uid = uid
 | 
			
		||||
            tarinfo.uname = owner
 | 
			
		||||
        return tarinfo
 | 
			
		||||
 | 
			
		||||
    if not dry_run:
 | 
			
		||||
        tar = tarfile.open(archive_name, 'w|%s' % tar_compression[compress])
 | 
			
		||||
        try:
 | 
			
		||||
            tar.add(base_dir, filter=_set_uid_gid)
 | 
			
		||||
        finally:
 | 
			
		||||
            tar.close()
 | 
			
		||||
 | 
			
		||||
    # compression using `compress`
 | 
			
		||||
    if compress == 'compress':
 | 
			
		||||
        warn("'compress' is deprecated.", DeprecationWarning)
 | 
			
		||||
        # the option varies depending on the platform
 | 
			
		||||
        compressed_name = archive_name + compress_ext[compress]
 | 
			
		||||
        if sys.platform == 'win32':
 | 
			
		||||
            cmd = [compress, archive_name, compressed_name]
 | 
			
		||||
        else:
 | 
			
		||||
            cmd = [compress, '-f', archive_name]
 | 
			
		||||
        spawn(cmd, dry_run=dry_run)
 | 
			
		||||
        return compressed_name
 | 
			
		||||
 | 
			
		||||
    return archive_name
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def make_zipfile(base_name, base_dir, verbose=0, dry_run=0):  # noqa: C901
 | 
			
		||||
    """Create a zip file from all the files under 'base_dir'.
 | 
			
		||||
 | 
			
		||||
    The output zip file will be named 'base_name' + ".zip".  Uses either the
 | 
			
		||||
    "zipfile" Python module (if available) or the InfoZIP "zip" utility
 | 
			
		||||
    (if installed and found on the default search path).  If neither tool is
 | 
			
		||||
    available, raises DistutilsExecError.  Returns the name of the output zip
 | 
			
		||||
    file.
 | 
			
		||||
    """
 | 
			
		||||
    zip_filename = base_name + ".zip"
 | 
			
		||||
    mkpath(os.path.dirname(zip_filename), dry_run=dry_run)
 | 
			
		||||
 | 
			
		||||
    # If zipfile module is not available, try spawning an external
 | 
			
		||||
    # 'zip' command.
 | 
			
		||||
    if zipfile is None:
 | 
			
		||||
        if verbose:
 | 
			
		||||
            zipoptions = "-r"
 | 
			
		||||
        else:
 | 
			
		||||
            zipoptions = "-rq"
 | 
			
		||||
 | 
			
		||||
        try:
 | 
			
		||||
            spawn(["zip", zipoptions, zip_filename, base_dir], dry_run=dry_run)
 | 
			
		||||
        except DistutilsExecError:
 | 
			
		||||
            # XXX really should distinguish between "couldn't find
 | 
			
		||||
            # external 'zip' command" and "zip failed".
 | 
			
		||||
            raise DistutilsExecError(
 | 
			
		||||
                (
 | 
			
		||||
                    "unable to create zip file '%s': "
 | 
			
		||||
                    "could neither import the 'zipfile' module nor "
 | 
			
		||||
                    "find a standalone zip utility"
 | 
			
		||||
                )
 | 
			
		||||
                % zip_filename
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
    else:
 | 
			
		||||
        log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir)
 | 
			
		||||
 | 
			
		||||
        if not dry_run:
 | 
			
		||||
            try:
 | 
			
		||||
                zip = zipfile.ZipFile(
 | 
			
		||||
                    zip_filename, "w", compression=zipfile.ZIP_DEFLATED
 | 
			
		||||
                )
 | 
			
		||||
            except RuntimeError:
 | 
			
		||||
                zip = zipfile.ZipFile(zip_filename, "w", compression=zipfile.ZIP_STORED)
 | 
			
		||||
 | 
			
		||||
            with zip:
 | 
			
		||||
                if base_dir != os.curdir:
 | 
			
		||||
                    path = os.path.normpath(os.path.join(base_dir, ''))
 | 
			
		||||
                    zip.write(path, path)
 | 
			
		||||
                    log.info("adding '%s'", path)
 | 
			
		||||
                for dirpath, dirnames, filenames in os.walk(base_dir):
 | 
			
		||||
                    for name in dirnames:
 | 
			
		||||
                        path = os.path.normpath(os.path.join(dirpath, name, ''))
 | 
			
		||||
                        zip.write(path, path)
 | 
			
		||||
                        log.info("adding '%s'", path)
 | 
			
		||||
                    for name in filenames:
 | 
			
		||||
                        path = os.path.normpath(os.path.join(dirpath, name))
 | 
			
		||||
                        if os.path.isfile(path):
 | 
			
		||||
                            zip.write(path, path)
 | 
			
		||||
                            log.info("adding '%s'", path)
 | 
			
		||||
 | 
			
		||||
    return zip_filename
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
ARCHIVE_FORMATS = {
 | 
			
		||||
    'gztar': (make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"),
 | 
			
		||||
    'bztar': (make_tarball, [('compress', 'bzip2')], "bzip2'ed tar-file"),
 | 
			
		||||
    'xztar': (make_tarball, [('compress', 'xz')], "xz'ed tar-file"),
 | 
			
		||||
    'ztar': (make_tarball, [('compress', 'compress')], "compressed tar file"),
 | 
			
		||||
    'tar': (make_tarball, [('compress', None)], "uncompressed tar file"),
 | 
			
		||||
    'zip': (make_zipfile, [], "ZIP file"),
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def check_archive_formats(formats):
 | 
			
		||||
    """Returns the first format from the 'format' list that is unknown.
 | 
			
		||||
 | 
			
		||||
    If all formats are known, returns None
 | 
			
		||||
    """
 | 
			
		||||
    for format in formats:
 | 
			
		||||
        if format not in ARCHIVE_FORMATS:
 | 
			
		||||
            return format
 | 
			
		||||
    return None
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def make_archive(
 | 
			
		||||
    base_name,
 | 
			
		||||
    format,
 | 
			
		||||
    root_dir=None,
 | 
			
		||||
    base_dir=None,
 | 
			
		||||
    verbose=0,
 | 
			
		||||
    dry_run=0,
 | 
			
		||||
    owner=None,
 | 
			
		||||
    group=None,
 | 
			
		||||
):
 | 
			
		||||
    """Create an archive file (eg. zip or tar).
 | 
			
		||||
 | 
			
		||||
    'base_name' is the name of the file to create, minus any format-specific
 | 
			
		||||
    extension; 'format' is the archive format: one of "zip", "tar", "gztar",
 | 
			
		||||
    "bztar", "xztar", or "ztar".
 | 
			
		||||
 | 
			
		||||
    'root_dir' is a directory that will be the root directory of the
 | 
			
		||||
    archive; ie. we typically chdir into 'root_dir' before creating the
 | 
			
		||||
    archive.  'base_dir' is the directory where we start archiving from;
 | 
			
		||||
    ie. 'base_dir' will be the common prefix of all files and
 | 
			
		||||
    directories in the archive.  'root_dir' and 'base_dir' both default
 | 
			
		||||
    to the current directory.  Returns the name of the archive file.
 | 
			
		||||
 | 
			
		||||
    'owner' and 'group' are used when creating a tar archive. By default,
 | 
			
		||||
    uses the current owner and group.
 | 
			
		||||
    """
 | 
			
		||||
    save_cwd = os.getcwd()
 | 
			
		||||
    if root_dir is not None:
 | 
			
		||||
        log.debug("changing into '%s'", root_dir)
 | 
			
		||||
        base_name = os.path.abspath(base_name)
 | 
			
		||||
        if not dry_run:
 | 
			
		||||
            os.chdir(root_dir)
 | 
			
		||||
 | 
			
		||||
    if base_dir is None:
 | 
			
		||||
        base_dir = os.curdir
 | 
			
		||||
 | 
			
		||||
    kwargs = {'dry_run': dry_run}
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        format_info = ARCHIVE_FORMATS[format]
 | 
			
		||||
    except KeyError:
 | 
			
		||||
        raise ValueError("unknown archive format '%s'" % format)
 | 
			
		||||
 | 
			
		||||
    func = format_info[0]
 | 
			
		||||
    for arg, val in format_info[1]:
 | 
			
		||||
        kwargs[arg] = val
 | 
			
		||||
 | 
			
		||||
    if format != 'zip':
 | 
			
		||||
        kwargs['owner'] = owner
 | 
			
		||||
        kwargs['group'] = group
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        filename = func(base_name, base_dir, **kwargs)
 | 
			
		||||
    finally:
 | 
			
		||||
        if root_dir is not None:
 | 
			
		||||
            log.debug("changing back to '%s'", save_cwd)
 | 
			
		||||
            os.chdir(save_cwd)
 | 
			
		||||
 | 
			
		||||
    return filename
 | 
			
		||||
@@ -0,0 +1,408 @@
 | 
			
		||||
"""distutils.bcppcompiler
 | 
			
		||||
 | 
			
		||||
Contains BorlandCCompiler, an implementation of the abstract CCompiler class
 | 
			
		||||
for the Borland C++ compiler.
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
# This implementation by Lyle Johnson, based on the original msvccompiler.py
 | 
			
		||||
# module and using the directions originally published by Gordon Williams.
 | 
			
		||||
 | 
			
		||||
# XXX looks like there's a LOT of overlap between these two classes:
 | 
			
		||||
# someone should sit down and factor out the common code as
 | 
			
		||||
# WindowsCCompiler!  --GPW
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
import os
 | 
			
		||||
import warnings
 | 
			
		||||
 | 
			
		||||
from .errors import (
 | 
			
		||||
    DistutilsExecError,
 | 
			
		||||
    CompileError,
 | 
			
		||||
    LibError,
 | 
			
		||||
    LinkError,
 | 
			
		||||
    UnknownFileError,
 | 
			
		||||
)
 | 
			
		||||
from .ccompiler import CCompiler, gen_preprocess_options
 | 
			
		||||
from .file_util import write_file
 | 
			
		||||
from .dep_util import newer
 | 
			
		||||
from ._log import log
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
warnings.warn(
 | 
			
		||||
    "bcppcompiler is deprecated and slated to be removed "
 | 
			
		||||
    "in the future. Please discontinue use or file an issue "
 | 
			
		||||
    "with pypa/distutils describing your use case.",
 | 
			
		||||
    DeprecationWarning,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class BCPPCompiler(CCompiler):
 | 
			
		||||
    """Concrete class that implements an interface to the Borland C/C++
 | 
			
		||||
    compiler, as defined by the CCompiler abstract class.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    compiler_type = 'bcpp'
 | 
			
		||||
 | 
			
		||||
    # Just set this so CCompiler's constructor doesn't barf.  We currently
 | 
			
		||||
    # don't use the 'set_executables()' bureaucracy provided by CCompiler,
 | 
			
		||||
    # as it really isn't necessary for this sort of single-compiler class.
 | 
			
		||||
    # Would be nice to have a consistent interface with UnixCCompiler,
 | 
			
		||||
    # though, so it's worth thinking about.
 | 
			
		||||
    executables = {}
 | 
			
		||||
 | 
			
		||||
    # Private class data (need to distinguish C from C++ source for compiler)
 | 
			
		||||
    _c_extensions = ['.c']
 | 
			
		||||
    _cpp_extensions = ['.cc', '.cpp', '.cxx']
 | 
			
		||||
 | 
			
		||||
    # Needed for the filename generation methods provided by the
 | 
			
		||||
    # base class, CCompiler.
 | 
			
		||||
    src_extensions = _c_extensions + _cpp_extensions
 | 
			
		||||
    obj_extension = '.obj'
 | 
			
		||||
    static_lib_extension = '.lib'
 | 
			
		||||
    shared_lib_extension = '.dll'
 | 
			
		||||
    static_lib_format = shared_lib_format = '%s%s'
 | 
			
		||||
    exe_extension = '.exe'
 | 
			
		||||
 | 
			
		||||
    def __init__(self, verbose=0, dry_run=0, force=0):
 | 
			
		||||
 | 
			
		||||
        super().__init__(verbose, dry_run, force)
 | 
			
		||||
 | 
			
		||||
        # These executables are assumed to all be in the path.
 | 
			
		||||
        # Borland doesn't seem to use any special registry settings to
 | 
			
		||||
        # indicate their installation locations.
 | 
			
		||||
 | 
			
		||||
        self.cc = "bcc32.exe"
 | 
			
		||||
        self.linker = "ilink32.exe"
 | 
			
		||||
        self.lib = "tlib.exe"
 | 
			
		||||
 | 
			
		||||
        self.preprocess_options = None
 | 
			
		||||
        self.compile_options = ['/tWM', '/O2', '/q', '/g0']
 | 
			
		||||
        self.compile_options_debug = ['/tWM', '/Od', '/q', '/g0']
 | 
			
		||||
 | 
			
		||||
        self.ldflags_shared = ['/Tpd', '/Gn', '/q', '/x']
 | 
			
		||||
        self.ldflags_shared_debug = ['/Tpd', '/Gn', '/q', '/x']
 | 
			
		||||
        self.ldflags_static = []
 | 
			
		||||
        self.ldflags_exe = ['/Gn', '/q', '/x']
 | 
			
		||||
        self.ldflags_exe_debug = ['/Gn', '/q', '/x', '/r']
 | 
			
		||||
 | 
			
		||||
    # -- Worker methods ------------------------------------------------
 | 
			
		||||
 | 
			
		||||
    def compile(  # noqa: C901
 | 
			
		||||
        self,
 | 
			
		||||
        sources,
 | 
			
		||||
        output_dir=None,
 | 
			
		||||
        macros=None,
 | 
			
		||||
        include_dirs=None,
 | 
			
		||||
        debug=0,
 | 
			
		||||
        extra_preargs=None,
 | 
			
		||||
        extra_postargs=None,
 | 
			
		||||
        depends=None,
 | 
			
		||||
    ):
 | 
			
		||||
 | 
			
		||||
        macros, objects, extra_postargs, pp_opts, build = self._setup_compile(
 | 
			
		||||
            output_dir, macros, include_dirs, sources, depends, extra_postargs
 | 
			
		||||
        )
 | 
			
		||||
        compile_opts = extra_preargs or []
 | 
			
		||||
        compile_opts.append('-c')
 | 
			
		||||
        if debug:
 | 
			
		||||
            compile_opts.extend(self.compile_options_debug)
 | 
			
		||||
        else:
 | 
			
		||||
            compile_opts.extend(self.compile_options)
 | 
			
		||||
 | 
			
		||||
        for obj in objects:
 | 
			
		||||
            try:
 | 
			
		||||
                src, ext = build[obj]
 | 
			
		||||
            except KeyError:
 | 
			
		||||
                continue
 | 
			
		||||
            # XXX why do the normpath here?
 | 
			
		||||
            src = os.path.normpath(src)
 | 
			
		||||
            obj = os.path.normpath(obj)
 | 
			
		||||
            # XXX _setup_compile() did a mkpath() too but before the normpath.
 | 
			
		||||
            # Is it possible to skip the normpath?
 | 
			
		||||
            self.mkpath(os.path.dirname(obj))
 | 
			
		||||
 | 
			
		||||
            if ext == '.res':
 | 
			
		||||
                # This is already a binary file -- skip it.
 | 
			
		||||
                continue  # the 'for' loop
 | 
			
		||||
            if ext == '.rc':
 | 
			
		||||
                # This needs to be compiled to a .res file -- do it now.
 | 
			
		||||
                try:
 | 
			
		||||
                    self.spawn(["brcc32", "-fo", obj, src])
 | 
			
		||||
                except DistutilsExecError as msg:
 | 
			
		||||
                    raise CompileError(msg)
 | 
			
		||||
                continue  # the 'for' loop
 | 
			
		||||
 | 
			
		||||
            # The next two are both for the real compiler.
 | 
			
		||||
            if ext in self._c_extensions:
 | 
			
		||||
                input_opt = ""
 | 
			
		||||
            elif ext in self._cpp_extensions:
 | 
			
		||||
                input_opt = "-P"
 | 
			
		||||
            else:
 | 
			
		||||
                # Unknown file type -- no extra options.  The compiler
 | 
			
		||||
                # will probably fail, but let it just in case this is a
 | 
			
		||||
                # file the compiler recognizes even if we don't.
 | 
			
		||||
                input_opt = ""
 | 
			
		||||
 | 
			
		||||
            output_opt = "-o" + obj
 | 
			
		||||
 | 
			
		||||
            # Compiler command line syntax is: "bcc32 [options] file(s)".
 | 
			
		||||
            # Note that the source file names must appear at the end of
 | 
			
		||||
            # the command line.
 | 
			
		||||
            try:
 | 
			
		||||
                self.spawn(
 | 
			
		||||
                    [self.cc]
 | 
			
		||||
                    + compile_opts
 | 
			
		||||
                    + pp_opts
 | 
			
		||||
                    + [input_opt, output_opt]
 | 
			
		||||
                    + extra_postargs
 | 
			
		||||
                    + [src]
 | 
			
		||||
                )
 | 
			
		||||
            except DistutilsExecError as msg:
 | 
			
		||||
                raise CompileError(msg)
 | 
			
		||||
 | 
			
		||||
        return objects
 | 
			
		||||
 | 
			
		||||
    # compile ()
 | 
			
		||||
 | 
			
		||||
    def create_static_lib(
 | 
			
		||||
        self, objects, output_libname, output_dir=None, debug=0, target_lang=None
 | 
			
		||||
    ):
 | 
			
		||||
 | 
			
		||||
        (objects, output_dir) = self._fix_object_args(objects, output_dir)
 | 
			
		||||
        output_filename = self.library_filename(output_libname, output_dir=output_dir)
 | 
			
		||||
 | 
			
		||||
        if self._need_link(objects, output_filename):
 | 
			
		||||
            lib_args = [output_filename, '/u'] + objects
 | 
			
		||||
            if debug:
 | 
			
		||||
                pass  # XXX what goes here?
 | 
			
		||||
            try:
 | 
			
		||||
                self.spawn([self.lib] + lib_args)
 | 
			
		||||
            except DistutilsExecError as msg:
 | 
			
		||||
                raise LibError(msg)
 | 
			
		||||
        else:
 | 
			
		||||
            log.debug("skipping %s (up-to-date)", output_filename)
 | 
			
		||||
 | 
			
		||||
    # create_static_lib ()
 | 
			
		||||
 | 
			
		||||
    def link(  # noqa: C901
 | 
			
		||||
        self,
 | 
			
		||||
        target_desc,
 | 
			
		||||
        objects,
 | 
			
		||||
        output_filename,
 | 
			
		||||
        output_dir=None,
 | 
			
		||||
        libraries=None,
 | 
			
		||||
        library_dirs=None,
 | 
			
		||||
        runtime_library_dirs=None,
 | 
			
		||||
        export_symbols=None,
 | 
			
		||||
        debug=0,
 | 
			
		||||
        extra_preargs=None,
 | 
			
		||||
        extra_postargs=None,
 | 
			
		||||
        build_temp=None,
 | 
			
		||||
        target_lang=None,
 | 
			
		||||
    ):
 | 
			
		||||
 | 
			
		||||
        # XXX this ignores 'build_temp'!  should follow the lead of
 | 
			
		||||
        # msvccompiler.py
 | 
			
		||||
 | 
			
		||||
        (objects, output_dir) = self._fix_object_args(objects, output_dir)
 | 
			
		||||
        (libraries, library_dirs, runtime_library_dirs) = self._fix_lib_args(
 | 
			
		||||
            libraries, library_dirs, runtime_library_dirs
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        if runtime_library_dirs:
 | 
			
		||||
            log.warning(
 | 
			
		||||
                "I don't know what to do with 'runtime_library_dirs': %s",
 | 
			
		||||
                str(runtime_library_dirs),
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        if output_dir is not None:
 | 
			
		||||
            output_filename = os.path.join(output_dir, output_filename)
 | 
			
		||||
 | 
			
		||||
        if self._need_link(objects, output_filename):
 | 
			
		||||
 | 
			
		||||
            # Figure out linker args based on type of target.
 | 
			
		||||
            if target_desc == CCompiler.EXECUTABLE:
 | 
			
		||||
                startup_obj = 'c0w32'
 | 
			
		||||
                if debug:
 | 
			
		||||
                    ld_args = self.ldflags_exe_debug[:]
 | 
			
		||||
                else:
 | 
			
		||||
                    ld_args = self.ldflags_exe[:]
 | 
			
		||||
            else:
 | 
			
		||||
                startup_obj = 'c0d32'
 | 
			
		||||
                if debug:
 | 
			
		||||
                    ld_args = self.ldflags_shared_debug[:]
 | 
			
		||||
                else:
 | 
			
		||||
                    ld_args = self.ldflags_shared[:]
 | 
			
		||||
 | 
			
		||||
            # Create a temporary exports file for use by the linker
 | 
			
		||||
            if export_symbols is None:
 | 
			
		||||
                def_file = ''
 | 
			
		||||
            else:
 | 
			
		||||
                head, tail = os.path.split(output_filename)
 | 
			
		||||
                modname, ext = os.path.splitext(tail)
 | 
			
		||||
                temp_dir = os.path.dirname(objects[0])  # preserve tree structure
 | 
			
		||||
                def_file = os.path.join(temp_dir, '%s.def' % modname)
 | 
			
		||||
                contents = ['EXPORTS']
 | 
			
		||||
                for sym in export_symbols or []:
 | 
			
		||||
                    contents.append('  {}=_{}'.format(sym, sym))
 | 
			
		||||
                self.execute(write_file, (def_file, contents), "writing %s" % def_file)
 | 
			
		||||
 | 
			
		||||
            # Borland C++ has problems with '/' in paths
 | 
			
		||||
            objects2 = map(os.path.normpath, objects)
 | 
			
		||||
            # split objects in .obj and .res files
 | 
			
		||||
            # Borland C++ needs them at different positions in the command line
 | 
			
		||||
            objects = [startup_obj]
 | 
			
		||||
            resources = []
 | 
			
		||||
            for file in objects2:
 | 
			
		||||
                (base, ext) = os.path.splitext(os.path.normcase(file))
 | 
			
		||||
                if ext == '.res':
 | 
			
		||||
                    resources.append(file)
 | 
			
		||||
                else:
 | 
			
		||||
                    objects.append(file)
 | 
			
		||||
 | 
			
		||||
            for ell in library_dirs:
 | 
			
		||||
                ld_args.append("/L%s" % os.path.normpath(ell))
 | 
			
		||||
            ld_args.append("/L.")  # we sometimes use relative paths
 | 
			
		||||
 | 
			
		||||
            # list of object files
 | 
			
		||||
            ld_args.extend(objects)
 | 
			
		||||
 | 
			
		||||
            # XXX the command-line syntax for Borland C++ is a bit wonky;
 | 
			
		||||
            # certain filenames are jammed together in one big string, but
 | 
			
		||||
            # comma-delimited.  This doesn't mesh too well with the
 | 
			
		||||
            # Unix-centric attitude (with a DOS/Windows quoting hack) of
 | 
			
		||||
            # 'spawn()', so constructing the argument list is a bit
 | 
			
		||||
            # awkward.  Note that doing the obvious thing and jamming all
 | 
			
		||||
            # the filenames and commas into one argument would be wrong,
 | 
			
		||||
            # because 'spawn()' would quote any filenames with spaces in
 | 
			
		||||
            # them.  Arghghh!.  Apparently it works fine as coded...
 | 
			
		||||
 | 
			
		||||
            # name of dll/exe file
 | 
			
		||||
            ld_args.extend([',', output_filename])
 | 
			
		||||
            # no map file and start libraries
 | 
			
		||||
            ld_args.append(',,')
 | 
			
		||||
 | 
			
		||||
            for lib in libraries:
 | 
			
		||||
                # see if we find it and if there is a bcpp specific lib
 | 
			
		||||
                # (xxx_bcpp.lib)
 | 
			
		||||
                libfile = self.find_library_file(library_dirs, lib, debug)
 | 
			
		||||
                if libfile is None:
 | 
			
		||||
                    ld_args.append(lib)
 | 
			
		||||
                    # probably a BCPP internal library -- don't warn
 | 
			
		||||
                else:
 | 
			
		||||
                    # full name which prefers bcpp_xxx.lib over xxx.lib
 | 
			
		||||
                    ld_args.append(libfile)
 | 
			
		||||
 | 
			
		||||
            # some default libraries
 | 
			
		||||
            ld_args.append('import32')
 | 
			
		||||
            ld_args.append('cw32mt')
 | 
			
		||||
 | 
			
		||||
            # def file for export symbols
 | 
			
		||||
            ld_args.extend([',', def_file])
 | 
			
		||||
            # add resource files
 | 
			
		||||
            ld_args.append(',')
 | 
			
		||||
            ld_args.extend(resources)
 | 
			
		||||
 | 
			
		||||
            if extra_preargs:
 | 
			
		||||
                ld_args[:0] = extra_preargs
 | 
			
		||||
            if extra_postargs:
 | 
			
		||||
                ld_args.extend(extra_postargs)
 | 
			
		||||
 | 
			
		||||
            self.mkpath(os.path.dirname(output_filename))
 | 
			
		||||
            try:
 | 
			
		||||
                self.spawn([self.linker] + ld_args)
 | 
			
		||||
            except DistutilsExecError as msg:
 | 
			
		||||
                raise LinkError(msg)
 | 
			
		||||
 | 
			
		||||
        else:
 | 
			
		||||
            log.debug("skipping %s (up-to-date)", output_filename)
 | 
			
		||||
 | 
			
		||||
    # link ()
 | 
			
		||||
 | 
			
		||||
    # -- Miscellaneous methods -----------------------------------------
 | 
			
		||||
 | 
			
		||||
    def find_library_file(self, dirs, lib, debug=0):
 | 
			
		||||
        # List of effective library names to try, in order of preference:
 | 
			
		||||
        # xxx_bcpp.lib is better than xxx.lib
 | 
			
		||||
        # and xxx_d.lib is better than xxx.lib if debug is set
 | 
			
		||||
        #
 | 
			
		||||
        # The "_bcpp" suffix is to handle a Python installation for people
 | 
			
		||||
        # with multiple compilers (primarily Distutils hackers, I suspect
 | 
			
		||||
        # ;-).  The idea is they'd have one static library for each
 | 
			
		||||
        # compiler they care about, since (almost?) every Windows compiler
 | 
			
		||||
        # seems to have a different format for static libraries.
 | 
			
		||||
        if debug:
 | 
			
		||||
            dlib = lib + "_d"
 | 
			
		||||
            try_names = (dlib + "_bcpp", lib + "_bcpp", dlib, lib)
 | 
			
		||||
        else:
 | 
			
		||||
            try_names = (lib + "_bcpp", lib)
 | 
			
		||||
 | 
			
		||||
        for dir in dirs:
 | 
			
		||||
            for name in try_names:
 | 
			
		||||
                libfile = os.path.join(dir, self.library_filename(name))
 | 
			
		||||
                if os.path.exists(libfile):
 | 
			
		||||
                    return libfile
 | 
			
		||||
        else:
 | 
			
		||||
            # Oops, didn't find it in *any* of 'dirs'
 | 
			
		||||
            return None
 | 
			
		||||
 | 
			
		||||
    # overwrite the one from CCompiler to support rc and res-files
 | 
			
		||||
    def object_filenames(self, source_filenames, strip_dir=0, output_dir=''):
 | 
			
		||||
        if output_dir is None:
 | 
			
		||||
            output_dir = ''
 | 
			
		||||
        obj_names = []
 | 
			
		||||
        for src_name in source_filenames:
 | 
			
		||||
            # use normcase to make sure '.rc' is really '.rc' and not '.RC'
 | 
			
		||||
            (base, ext) = os.path.splitext(os.path.normcase(src_name))
 | 
			
		||||
            if ext not in (self.src_extensions + ['.rc', '.res']):
 | 
			
		||||
                raise UnknownFileError(
 | 
			
		||||
                    "unknown file type '{}' (from '{}')".format(ext, src_name)
 | 
			
		||||
                )
 | 
			
		||||
            if strip_dir:
 | 
			
		||||
                base = os.path.basename(base)
 | 
			
		||||
            if ext == '.res':
 | 
			
		||||
                # these can go unchanged
 | 
			
		||||
                obj_names.append(os.path.join(output_dir, base + ext))
 | 
			
		||||
            elif ext == '.rc':
 | 
			
		||||
                # these need to be compiled to .res-files
 | 
			
		||||
                obj_names.append(os.path.join(output_dir, base + '.res'))
 | 
			
		||||
            else:
 | 
			
		||||
                obj_names.append(os.path.join(output_dir, base + self.obj_extension))
 | 
			
		||||
        return obj_names
 | 
			
		||||
 | 
			
		||||
    # object_filenames ()
 | 
			
		||||
 | 
			
		||||
    def preprocess(
 | 
			
		||||
        self,
 | 
			
		||||
        source,
 | 
			
		||||
        output_file=None,
 | 
			
		||||
        macros=None,
 | 
			
		||||
        include_dirs=None,
 | 
			
		||||
        extra_preargs=None,
 | 
			
		||||
        extra_postargs=None,
 | 
			
		||||
    ):
 | 
			
		||||
 | 
			
		||||
        (_, macros, include_dirs) = self._fix_compile_args(None, macros, include_dirs)
 | 
			
		||||
        pp_opts = gen_preprocess_options(macros, include_dirs)
 | 
			
		||||
        pp_args = ['cpp32.exe'] + pp_opts
 | 
			
		||||
        if output_file is not None:
 | 
			
		||||
            pp_args.append('-o' + output_file)
 | 
			
		||||
        if extra_preargs:
 | 
			
		||||
            pp_args[:0] = extra_preargs
 | 
			
		||||
        if extra_postargs:
 | 
			
		||||
            pp_args.extend(extra_postargs)
 | 
			
		||||
        pp_args.append(source)
 | 
			
		||||
 | 
			
		||||
        # We need to preprocess: either we're being forced to, or the
 | 
			
		||||
        # source file is newer than the target (or the target doesn't
 | 
			
		||||
        # exist).
 | 
			
		||||
        if self.force or output_file is None or newer(source, output_file):
 | 
			
		||||
            if output_file:
 | 
			
		||||
                self.mkpath(os.path.dirname(output_file))
 | 
			
		||||
            try:
 | 
			
		||||
                self.spawn(pp_args)
 | 
			
		||||
            except DistutilsExecError as msg:
 | 
			
		||||
                print(msg)
 | 
			
		||||
                raise CompileError(msg)
 | 
			
		||||
 | 
			
		||||
    # preprocess()
 | 
			
		||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										435
									
								
								teil20/lib/python3.11/site-packages/setuptools/_distutils/cmd.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										435
									
								
								teil20/lib/python3.11/site-packages/setuptools/_distutils/cmd.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,435 @@
 | 
			
		||||
"""distutils.cmd
 | 
			
		||||
 | 
			
		||||
Provides the Command class, the base class for the command classes
 | 
			
		||||
in the distutils.command package.
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
import sys
 | 
			
		||||
import os
 | 
			
		||||
import re
 | 
			
		||||
import logging
 | 
			
		||||
 | 
			
		||||
from .errors import DistutilsOptionError
 | 
			
		||||
from . import util, dir_util, file_util, archive_util, dep_util
 | 
			
		||||
from ._log import log
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Command:
 | 
			
		||||
    """Abstract base class for defining command classes, the "worker bees"
 | 
			
		||||
    of the Distutils.  A useful analogy for command classes is to think of
 | 
			
		||||
    them as subroutines with local variables called "options".  The options
 | 
			
		||||
    are "declared" in 'initialize_options()' and "defined" (given their
 | 
			
		||||
    final values, aka "finalized") in 'finalize_options()', both of which
 | 
			
		||||
    must be defined by every command class.  The distinction between the
 | 
			
		||||
    two is necessary because option values might come from the outside
 | 
			
		||||
    world (command line, config file, ...), and any options dependent on
 | 
			
		||||
    other options must be computed *after* these outside influences have
 | 
			
		||||
    been processed -- hence 'finalize_options()'.  The "body" of the
 | 
			
		||||
    subroutine, where it does all its work based on the values of its
 | 
			
		||||
    options, is the 'run()' method, which must also be implemented by every
 | 
			
		||||
    command class.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    # 'sub_commands' formalizes the notion of a "family" of commands,
 | 
			
		||||
    # eg. "install" as the parent with sub-commands "install_lib",
 | 
			
		||||
    # "install_headers", etc.  The parent of a family of commands
 | 
			
		||||
    # defines 'sub_commands' as a class attribute; it's a list of
 | 
			
		||||
    #    (command_name : string, predicate : unbound_method | string | None)
 | 
			
		||||
    # tuples, where 'predicate' is a method of the parent command that
 | 
			
		||||
    # determines whether the corresponding command is applicable in the
 | 
			
		||||
    # current situation.  (Eg. we "install_headers" is only applicable if
 | 
			
		||||
    # we have any C header files to install.)  If 'predicate' is None,
 | 
			
		||||
    # that command is always applicable.
 | 
			
		||||
    #
 | 
			
		||||
    # 'sub_commands' is usually defined at the *end* of a class, because
 | 
			
		||||
    # predicates can be unbound methods, so they must already have been
 | 
			
		||||
    # defined.  The canonical example is the "install" command.
 | 
			
		||||
    sub_commands = []
 | 
			
		||||
 | 
			
		||||
    # -- Creation/initialization methods -------------------------------
 | 
			
		||||
 | 
			
		||||
    def __init__(self, dist):
 | 
			
		||||
        """Create and initialize a new Command object.  Most importantly,
 | 
			
		||||
        invokes the 'initialize_options()' method, which is the real
 | 
			
		||||
        initializer and depends on the actual command being
 | 
			
		||||
        instantiated.
 | 
			
		||||
        """
 | 
			
		||||
        # late import because of mutual dependence between these classes
 | 
			
		||||
        from distutils.dist import Distribution
 | 
			
		||||
 | 
			
		||||
        if not isinstance(dist, Distribution):
 | 
			
		||||
            raise TypeError("dist must be a Distribution instance")
 | 
			
		||||
        if self.__class__ is Command:
 | 
			
		||||
            raise RuntimeError("Command is an abstract class")
 | 
			
		||||
 | 
			
		||||
        self.distribution = dist
 | 
			
		||||
        self.initialize_options()
 | 
			
		||||
 | 
			
		||||
        # Per-command versions of the global flags, so that the user can
 | 
			
		||||
        # customize Distutils' behaviour command-by-command and let some
 | 
			
		||||
        # commands fall back on the Distribution's behaviour.  None means
 | 
			
		||||
        # "not defined, check self.distribution's copy", while 0 or 1 mean
 | 
			
		||||
        # false and true (duh).  Note that this means figuring out the real
 | 
			
		||||
        # value of each flag is a touch complicated -- hence "self._dry_run"
 | 
			
		||||
        # will be handled by __getattr__, below.
 | 
			
		||||
        # XXX This needs to be fixed.
 | 
			
		||||
        self._dry_run = None
 | 
			
		||||
 | 
			
		||||
        # verbose is largely ignored, but needs to be set for
 | 
			
		||||
        # backwards compatibility (I think)?
 | 
			
		||||
        self.verbose = dist.verbose
 | 
			
		||||
 | 
			
		||||
        # Some commands define a 'self.force' option to ignore file
 | 
			
		||||
        # timestamps, but methods defined *here* assume that
 | 
			
		||||
        # 'self.force' exists for all commands.  So define it here
 | 
			
		||||
        # just to be safe.
 | 
			
		||||
        self.force = None
 | 
			
		||||
 | 
			
		||||
        # The 'help' flag is just used for command-line parsing, so
 | 
			
		||||
        # none of that complicated bureaucracy is needed.
 | 
			
		||||
        self.help = 0
 | 
			
		||||
 | 
			
		||||
        # 'finalized' records whether or not 'finalize_options()' has been
 | 
			
		||||
        # called.  'finalize_options()' itself should not pay attention to
 | 
			
		||||
        # this flag: it is the business of 'ensure_finalized()', which
 | 
			
		||||
        # always calls 'finalize_options()', to respect/update it.
 | 
			
		||||
        self.finalized = 0
 | 
			
		||||
 | 
			
		||||
    # XXX A more explicit way to customize dry_run would be better.
 | 
			
		||||
    def __getattr__(self, attr):
 | 
			
		||||
        if attr == 'dry_run':
 | 
			
		||||
            myval = getattr(self, "_" + attr)
 | 
			
		||||
            if myval is None:
 | 
			
		||||
                return getattr(self.distribution, attr)
 | 
			
		||||
            else:
 | 
			
		||||
                return myval
 | 
			
		||||
        else:
 | 
			
		||||
            raise AttributeError(attr)
 | 
			
		||||
 | 
			
		||||
    def ensure_finalized(self):
 | 
			
		||||
        if not self.finalized:
 | 
			
		||||
            self.finalize_options()
 | 
			
		||||
        self.finalized = 1
 | 
			
		||||
 | 
			
		||||
    # Subclasses must define:
 | 
			
		||||
    #   initialize_options()
 | 
			
		||||
    #     provide default values for all options; may be customized by
 | 
			
		||||
    #     setup script, by options from config file(s), or by command-line
 | 
			
		||||
    #     options
 | 
			
		||||
    #   finalize_options()
 | 
			
		||||
    #     decide on the final values for all options; this is called
 | 
			
		||||
    #     after all possible intervention from the outside world
 | 
			
		||||
    #     (command-line, option file, etc.) has been processed
 | 
			
		||||
    #   run()
 | 
			
		||||
    #     run the command: do whatever it is we're here to do,
 | 
			
		||||
    #     controlled by the command's various option values
 | 
			
		||||
 | 
			
		||||
    def initialize_options(self):
 | 
			
		||||
        """Set default values for all the options that this command
 | 
			
		||||
        supports.  Note that these defaults may be overridden by other
 | 
			
		||||
        commands, by the setup script, by config files, or by the
 | 
			
		||||
        command-line.  Thus, this is not the place to code dependencies
 | 
			
		||||
        between options; generally, 'initialize_options()' implementations
 | 
			
		||||
        are just a bunch of "self.foo = None" assignments.
 | 
			
		||||
 | 
			
		||||
        This method must be implemented by all command classes.
 | 
			
		||||
        """
 | 
			
		||||
        raise RuntimeError(
 | 
			
		||||
            "abstract method -- subclass %s must override" % self.__class__
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def finalize_options(self):
 | 
			
		||||
        """Set final values for all the options that this command supports.
 | 
			
		||||
        This is always called as late as possible, ie.  after any option
 | 
			
		||||
        assignments from the command-line or from other commands have been
 | 
			
		||||
        done.  Thus, this is the place to code option dependencies: if
 | 
			
		||||
        'foo' depends on 'bar', then it is safe to set 'foo' from 'bar' as
 | 
			
		||||
        long as 'foo' still has the same value it was assigned in
 | 
			
		||||
        'initialize_options()'.
 | 
			
		||||
 | 
			
		||||
        This method must be implemented by all command classes.
 | 
			
		||||
        """
 | 
			
		||||
        raise RuntimeError(
 | 
			
		||||
            "abstract method -- subclass %s must override" % self.__class__
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def dump_options(self, header=None, indent=""):
 | 
			
		||||
        from distutils.fancy_getopt import longopt_xlate
 | 
			
		||||
 | 
			
		||||
        if header is None:
 | 
			
		||||
            header = "command options for '%s':" % self.get_command_name()
 | 
			
		||||
        self.announce(indent + header, level=logging.INFO)
 | 
			
		||||
        indent = indent + "  "
 | 
			
		||||
        for (option, _, _) in self.user_options:
 | 
			
		||||
            option = option.translate(longopt_xlate)
 | 
			
		||||
            if option[-1] == "=":
 | 
			
		||||
                option = option[:-1]
 | 
			
		||||
            value = getattr(self, option)
 | 
			
		||||
            self.announce(indent + "{} = {}".format(option, value), level=logging.INFO)
 | 
			
		||||
 | 
			
		||||
    def run(self):
 | 
			
		||||
        """A command's raison d'etre: carry out the action it exists to
 | 
			
		||||
        perform, controlled by the options initialized in
 | 
			
		||||
        'initialize_options()', customized by other commands, the setup
 | 
			
		||||
        script, the command-line, and config files, and finalized in
 | 
			
		||||
        'finalize_options()'.  All terminal output and filesystem
 | 
			
		||||
        interaction should be done by 'run()'.
 | 
			
		||||
 | 
			
		||||
        This method must be implemented by all command classes.
 | 
			
		||||
        """
 | 
			
		||||
        raise RuntimeError(
 | 
			
		||||
            "abstract method -- subclass %s must override" % self.__class__
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def announce(self, msg, level=logging.DEBUG):
 | 
			
		||||
        log.log(level, msg)
 | 
			
		||||
 | 
			
		||||
    def debug_print(self, msg):
 | 
			
		||||
        """Print 'msg' to stdout if the global DEBUG (taken from the
 | 
			
		||||
        DISTUTILS_DEBUG environment variable) flag is true.
 | 
			
		||||
        """
 | 
			
		||||
        from distutils.debug import DEBUG
 | 
			
		||||
 | 
			
		||||
        if DEBUG:
 | 
			
		||||
            print(msg)
 | 
			
		||||
            sys.stdout.flush()
 | 
			
		||||
 | 
			
		||||
    # -- Option validation methods -------------------------------------
 | 
			
		||||
    # (these are very handy in writing the 'finalize_options()' method)
 | 
			
		||||
    #
 | 
			
		||||
    # NB. the general philosophy here is to ensure that a particular option
 | 
			
		||||
    # value meets certain type and value constraints.  If not, we try to
 | 
			
		||||
    # force it into conformance (eg. if we expect a list but have a string,
 | 
			
		||||
    # split the string on comma and/or whitespace).  If we can't force the
 | 
			
		||||
    # option into conformance, raise DistutilsOptionError.  Thus, command
 | 
			
		||||
    # classes need do nothing more than (eg.)
 | 
			
		||||
    #   self.ensure_string_list('foo')
 | 
			
		||||
    # and they can be guaranteed that thereafter, self.foo will be
 | 
			
		||||
    # a list of strings.
 | 
			
		||||
 | 
			
		||||
    def _ensure_stringlike(self, option, what, default=None):
 | 
			
		||||
        val = getattr(self, option)
 | 
			
		||||
        if val is None:
 | 
			
		||||
            setattr(self, option, default)
 | 
			
		||||
            return default
 | 
			
		||||
        elif not isinstance(val, str):
 | 
			
		||||
            raise DistutilsOptionError(
 | 
			
		||||
                "'{}' must be a {} (got `{}`)".format(option, what, val)
 | 
			
		||||
            )
 | 
			
		||||
        return val
 | 
			
		||||
 | 
			
		||||
    def ensure_string(self, option, default=None):
 | 
			
		||||
        """Ensure that 'option' is a string; if not defined, set it to
 | 
			
		||||
        'default'.
 | 
			
		||||
        """
 | 
			
		||||
        self._ensure_stringlike(option, "string", default)
 | 
			
		||||
 | 
			
		||||
    def ensure_string_list(self, option):
 | 
			
		||||
        r"""Ensure that 'option' is a list of strings.  If 'option' is
 | 
			
		||||
        currently a string, we split it either on /,\s*/ or /\s+/, so
 | 
			
		||||
        "foo bar baz", "foo,bar,baz", and "foo,   bar baz" all become
 | 
			
		||||
        ["foo", "bar", "baz"].
 | 
			
		||||
        """
 | 
			
		||||
        val = getattr(self, option)
 | 
			
		||||
        if val is None:
 | 
			
		||||
            return
 | 
			
		||||
        elif isinstance(val, str):
 | 
			
		||||
            setattr(self, option, re.split(r',\s*|\s+', val))
 | 
			
		||||
        else:
 | 
			
		||||
            if isinstance(val, list):
 | 
			
		||||
                ok = all(isinstance(v, str) for v in val)
 | 
			
		||||
            else:
 | 
			
		||||
                ok = False
 | 
			
		||||
            if not ok:
 | 
			
		||||
                raise DistutilsOptionError(
 | 
			
		||||
                    "'{}' must be a list of strings (got {!r})".format(option, val)
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
    def _ensure_tested_string(self, option, tester, what, error_fmt, default=None):
 | 
			
		||||
        val = self._ensure_stringlike(option, what, default)
 | 
			
		||||
        if val is not None and not tester(val):
 | 
			
		||||
            raise DistutilsOptionError(
 | 
			
		||||
                ("error in '%s' option: " + error_fmt) % (option, val)
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
    def ensure_filename(self, option):
 | 
			
		||||
        """Ensure that 'option' is the name of an existing file."""
 | 
			
		||||
        self._ensure_tested_string(
 | 
			
		||||
            option, os.path.isfile, "filename", "'%s' does not exist or is not a file"
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def ensure_dirname(self, option):
 | 
			
		||||
        self._ensure_tested_string(
 | 
			
		||||
            option,
 | 
			
		||||
            os.path.isdir,
 | 
			
		||||
            "directory name",
 | 
			
		||||
            "'%s' does not exist or is not a directory",
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    # -- Convenience methods for commands ------------------------------
 | 
			
		||||
 | 
			
		||||
    def get_command_name(self):
 | 
			
		||||
        if hasattr(self, 'command_name'):
 | 
			
		||||
            return self.command_name
 | 
			
		||||
        else:
 | 
			
		||||
            return self.__class__.__name__
 | 
			
		||||
 | 
			
		||||
    def set_undefined_options(self, src_cmd, *option_pairs):
 | 
			
		||||
        """Set the values of any "undefined" options from corresponding
 | 
			
		||||
        option values in some other command object.  "Undefined" here means
 | 
			
		||||
        "is None", which is the convention used to indicate that an option
 | 
			
		||||
        has not been changed between 'initialize_options()' and
 | 
			
		||||
        'finalize_options()'.  Usually called from 'finalize_options()' for
 | 
			
		||||
        options that depend on some other command rather than another
 | 
			
		||||
        option of the same command.  'src_cmd' is the other command from
 | 
			
		||||
        which option values will be taken (a command object will be created
 | 
			
		||||
        for it if necessary); the remaining arguments are
 | 
			
		||||
        '(src_option,dst_option)' tuples which mean "take the value of
 | 
			
		||||
        'src_option' in the 'src_cmd' command object, and copy it to
 | 
			
		||||
        'dst_option' in the current command object".
 | 
			
		||||
        """
 | 
			
		||||
        # Option_pairs: list of (src_option, dst_option) tuples
 | 
			
		||||
        src_cmd_obj = self.distribution.get_command_obj(src_cmd)
 | 
			
		||||
        src_cmd_obj.ensure_finalized()
 | 
			
		||||
        for (src_option, dst_option) in option_pairs:
 | 
			
		||||
            if getattr(self, dst_option) is None:
 | 
			
		||||
                setattr(self, dst_option, getattr(src_cmd_obj, src_option))
 | 
			
		||||
 | 
			
		||||
    def get_finalized_command(self, command, create=1):
 | 
			
		||||
        """Wrapper around Distribution's 'get_command_obj()' method: find
 | 
			
		||||
        (create if necessary and 'create' is true) the command object for
 | 
			
		||||
        'command', call its 'ensure_finalized()' method, and return the
 | 
			
		||||
        finalized command object.
 | 
			
		||||
        """
 | 
			
		||||
        cmd_obj = self.distribution.get_command_obj(command, create)
 | 
			
		||||
        cmd_obj.ensure_finalized()
 | 
			
		||||
        return cmd_obj
 | 
			
		||||
 | 
			
		||||
    # XXX rename to 'get_reinitialized_command()'? (should do the
 | 
			
		||||
    # same in dist.py, if so)
 | 
			
		||||
    def reinitialize_command(self, command, reinit_subcommands=0):
 | 
			
		||||
        return self.distribution.reinitialize_command(command, reinit_subcommands)
 | 
			
		||||
 | 
			
		||||
    def run_command(self, command):
 | 
			
		||||
        """Run some other command: uses the 'run_command()' method of
 | 
			
		||||
        Distribution, which creates and finalizes the command object if
 | 
			
		||||
        necessary and then invokes its 'run()' method.
 | 
			
		||||
        """
 | 
			
		||||
        self.distribution.run_command(command)
 | 
			
		||||
 | 
			
		||||
    def get_sub_commands(self):
 | 
			
		||||
        """Determine the sub-commands that are relevant in the current
 | 
			
		||||
        distribution (ie., that need to be run).  This is based on the
 | 
			
		||||
        'sub_commands' class attribute: each tuple in that list may include
 | 
			
		||||
        a method that we call to determine if the subcommand needs to be
 | 
			
		||||
        run for the current distribution.  Return a list of command names.
 | 
			
		||||
        """
 | 
			
		||||
        commands = []
 | 
			
		||||
        for (cmd_name, method) in self.sub_commands:
 | 
			
		||||
            if method is None or method(self):
 | 
			
		||||
                commands.append(cmd_name)
 | 
			
		||||
        return commands
 | 
			
		||||
 | 
			
		||||
    # -- External world manipulation -----------------------------------
 | 
			
		||||
 | 
			
		||||
    def warn(self, msg):
 | 
			
		||||
        log.warning("warning: %s: %s\n", self.get_command_name(), msg)
 | 
			
		||||
 | 
			
		||||
    def execute(self, func, args, msg=None, level=1):
 | 
			
		||||
        util.execute(func, args, msg, dry_run=self.dry_run)
 | 
			
		||||
 | 
			
		||||
    def mkpath(self, name, mode=0o777):
 | 
			
		||||
        dir_util.mkpath(name, mode, dry_run=self.dry_run)
 | 
			
		||||
 | 
			
		||||
    def copy_file(
 | 
			
		||||
        self, infile, outfile, preserve_mode=1, preserve_times=1, link=None, level=1
 | 
			
		||||
    ):
 | 
			
		||||
        """Copy a file respecting verbose, dry-run and force flags.  (The
 | 
			
		||||
        former two default to whatever is in the Distribution object, and
 | 
			
		||||
        the latter defaults to false for commands that don't define it.)"""
 | 
			
		||||
        return file_util.copy_file(
 | 
			
		||||
            infile,
 | 
			
		||||
            outfile,
 | 
			
		||||
            preserve_mode,
 | 
			
		||||
            preserve_times,
 | 
			
		||||
            not self.force,
 | 
			
		||||
            link,
 | 
			
		||||
            dry_run=self.dry_run,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def copy_tree(
 | 
			
		||||
        self,
 | 
			
		||||
        infile,
 | 
			
		||||
        outfile,
 | 
			
		||||
        preserve_mode=1,
 | 
			
		||||
        preserve_times=1,
 | 
			
		||||
        preserve_symlinks=0,
 | 
			
		||||
        level=1,
 | 
			
		||||
    ):
 | 
			
		||||
        """Copy an entire directory tree respecting verbose, dry-run,
 | 
			
		||||
        and force flags.
 | 
			
		||||
        """
 | 
			
		||||
        return dir_util.copy_tree(
 | 
			
		||||
            infile,
 | 
			
		||||
            outfile,
 | 
			
		||||
            preserve_mode,
 | 
			
		||||
            preserve_times,
 | 
			
		||||
            preserve_symlinks,
 | 
			
		||||
            not self.force,
 | 
			
		||||
            dry_run=self.dry_run,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def move_file(self, src, dst, level=1):
 | 
			
		||||
        """Move a file respecting dry-run flag."""
 | 
			
		||||
        return file_util.move_file(src, dst, dry_run=self.dry_run)
 | 
			
		||||
 | 
			
		||||
    def spawn(self, cmd, search_path=1, level=1):
 | 
			
		||||
        """Spawn an external command respecting dry-run flag."""
 | 
			
		||||
        from distutils.spawn import spawn
 | 
			
		||||
 | 
			
		||||
        spawn(cmd, search_path, dry_run=self.dry_run)
 | 
			
		||||
 | 
			
		||||
    def make_archive(
 | 
			
		||||
        self, base_name, format, root_dir=None, base_dir=None, owner=None, group=None
 | 
			
		||||
    ):
 | 
			
		||||
        return archive_util.make_archive(
 | 
			
		||||
            base_name,
 | 
			
		||||
            format,
 | 
			
		||||
            root_dir,
 | 
			
		||||
            base_dir,
 | 
			
		||||
            dry_run=self.dry_run,
 | 
			
		||||
            owner=owner,
 | 
			
		||||
            group=group,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def make_file(
 | 
			
		||||
        self, infiles, outfile, func, args, exec_msg=None, skip_msg=None, level=1
 | 
			
		||||
    ):
 | 
			
		||||
        """Special case of 'execute()' for operations that process one or
 | 
			
		||||
        more input files and generate one output file.  Works just like
 | 
			
		||||
        'execute()', except the operation is skipped and a different
 | 
			
		||||
        message printed if 'outfile' already exists and is newer than all
 | 
			
		||||
        files listed in 'infiles'.  If the command defined 'self.force',
 | 
			
		||||
        and it is true, then the command is unconditionally run -- does no
 | 
			
		||||
        timestamp checks.
 | 
			
		||||
        """
 | 
			
		||||
        if skip_msg is None:
 | 
			
		||||
            skip_msg = "skipping %s (inputs unchanged)" % outfile
 | 
			
		||||
 | 
			
		||||
        # Allow 'infiles' to be a single string
 | 
			
		||||
        if isinstance(infiles, str):
 | 
			
		||||
            infiles = (infiles,)
 | 
			
		||||
        elif not isinstance(infiles, (list, tuple)):
 | 
			
		||||
            raise TypeError("'infiles' must be a string, or a list or tuple of strings")
 | 
			
		||||
 | 
			
		||||
        if exec_msg is None:
 | 
			
		||||
            exec_msg = "generating {} from {}".format(outfile, ', '.join(infiles))
 | 
			
		||||
 | 
			
		||||
        # If 'outfile' must be regenerated (either because it doesn't
 | 
			
		||||
        # exist, is out-of-date, or the 'force' flag is true) then
 | 
			
		||||
        # perform the action that presumably regenerates it
 | 
			
		||||
        if self.force or dep_util.newer_group(infiles, outfile):
 | 
			
		||||
            self.execute(func, args, exec_msg, level)
 | 
			
		||||
        # Otherwise, print the "skip" message
 | 
			
		||||
        else:
 | 
			
		||||
            log.debug(skip_msg)
 | 
			
		||||
@@ -0,0 +1,25 @@
 | 
			
		||||
"""distutils.command
 | 
			
		||||
 | 
			
		||||
Package containing implementation of all the standard Distutils
 | 
			
		||||
commands."""
 | 
			
		||||
 | 
			
		||||
__all__ = [  # noqa: F822
 | 
			
		||||
    'build',
 | 
			
		||||
    'build_py',
 | 
			
		||||
    'build_ext',
 | 
			
		||||
    'build_clib',
 | 
			
		||||
    'build_scripts',
 | 
			
		||||
    'clean',
 | 
			
		||||
    'install',
 | 
			
		||||
    'install_lib',
 | 
			
		||||
    'install_headers',
 | 
			
		||||
    'install_scripts',
 | 
			
		||||
    'install_data',
 | 
			
		||||
    'sdist',
 | 
			
		||||
    'register',
 | 
			
		||||
    'bdist',
 | 
			
		||||
    'bdist_dumb',
 | 
			
		||||
    'bdist_rpm',
 | 
			
		||||
    'check',
 | 
			
		||||
    'upload',
 | 
			
		||||
]
 | 
			
		||||
@@ -0,0 +1,55 @@
 | 
			
		||||
"""
 | 
			
		||||
Backward compatibility for homebrew builds on macOS.
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
import sys
 | 
			
		||||
import os
 | 
			
		||||
import functools
 | 
			
		||||
import subprocess
 | 
			
		||||
import sysconfig
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@functools.lru_cache()
 | 
			
		||||
def enabled():
 | 
			
		||||
    """
 | 
			
		||||
    Only enabled for Python 3.9 framework homebrew builds
 | 
			
		||||
    except ensurepip and venv.
 | 
			
		||||
    """
 | 
			
		||||
    PY39 = (3, 9) < sys.version_info < (3, 10)
 | 
			
		||||
    framework = sys.platform == 'darwin' and sys._framework
 | 
			
		||||
    homebrew = "Cellar" in sysconfig.get_config_var('projectbase')
 | 
			
		||||
    venv = sys.prefix != sys.base_prefix
 | 
			
		||||
    ensurepip = os.environ.get("ENSUREPIP_OPTIONS")
 | 
			
		||||
    return PY39 and framework and homebrew and not venv and not ensurepip
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
schemes = dict(
 | 
			
		||||
    osx_framework_library=dict(
 | 
			
		||||
        stdlib='{installed_base}/{platlibdir}/python{py_version_short}',
 | 
			
		||||
        platstdlib='{platbase}/{platlibdir}/python{py_version_short}',
 | 
			
		||||
        purelib='{homebrew_prefix}/lib/python{py_version_short}/site-packages',
 | 
			
		||||
        platlib='{homebrew_prefix}/{platlibdir}/python{py_version_short}/site-packages',
 | 
			
		||||
        include='{installed_base}/include/python{py_version_short}{abiflags}',
 | 
			
		||||
        platinclude='{installed_platbase}/include/python{py_version_short}{abiflags}',
 | 
			
		||||
        scripts='{homebrew_prefix}/bin',
 | 
			
		||||
        data='{homebrew_prefix}',
 | 
			
		||||
    )
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@functools.lru_cache()
 | 
			
		||||
def vars():
 | 
			
		||||
    if not enabled():
 | 
			
		||||
        return {}
 | 
			
		||||
    homebrew_prefix = subprocess.check_output(['brew', '--prefix'], text=True).strip()
 | 
			
		||||
    return locals()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def scheme(name):
 | 
			
		||||
    """
 | 
			
		||||
    Override the selected scheme for posix_prefix.
 | 
			
		||||
    """
 | 
			
		||||
    if not enabled() or not name.endswith('_prefix'):
 | 
			
		||||
        return name
 | 
			
		||||
    return 'osx_framework_library'
 | 
			
		||||
@@ -0,0 +1,157 @@
 | 
			
		||||
"""distutils.command.bdist
 | 
			
		||||
 | 
			
		||||
Implements the Distutils 'bdist' command (create a built [binary]
 | 
			
		||||
distribution)."""
 | 
			
		||||
 | 
			
		||||
import os
 | 
			
		||||
import warnings
 | 
			
		||||
 | 
			
		||||
from ..core import Command
 | 
			
		||||
from ..errors import DistutilsPlatformError, DistutilsOptionError
 | 
			
		||||
from ..util import get_platform
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def show_formats():
 | 
			
		||||
    """Print list of available formats (arguments to "--format" option)."""
 | 
			
		||||
    from ..fancy_getopt import FancyGetopt
 | 
			
		||||
 | 
			
		||||
    formats = []
 | 
			
		||||
    for format in bdist.format_commands:
 | 
			
		||||
        formats.append(("formats=" + format, None, bdist.format_commands[format][1]))
 | 
			
		||||
    pretty_printer = FancyGetopt(formats)
 | 
			
		||||
    pretty_printer.print_help("List of available distribution formats:")
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class ListCompat(dict):
 | 
			
		||||
    # adapter to allow for Setuptools compatibility in format_commands
 | 
			
		||||
    def append(self, item):
 | 
			
		||||
        warnings.warn(
 | 
			
		||||
            """format_commands is now a dict. append is deprecated.""",
 | 
			
		||||
            DeprecationWarning,
 | 
			
		||||
            stacklevel=2,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class bdist(Command):
 | 
			
		||||
 | 
			
		||||
    description = "create a built (binary) distribution"
 | 
			
		||||
 | 
			
		||||
    user_options = [
 | 
			
		||||
        ('bdist-base=', 'b', "temporary directory for creating built distributions"),
 | 
			
		||||
        (
 | 
			
		||||
            'plat-name=',
 | 
			
		||||
            'p',
 | 
			
		||||
            "platform name to embed in generated filenames "
 | 
			
		||||
            "(default: %s)" % get_platform(),
 | 
			
		||||
        ),
 | 
			
		||||
        ('formats=', None, "formats for distribution (comma-separated list)"),
 | 
			
		||||
        (
 | 
			
		||||
            'dist-dir=',
 | 
			
		||||
            'd',
 | 
			
		||||
            "directory to put final built distributions in " "[default: dist]",
 | 
			
		||||
        ),
 | 
			
		||||
        ('skip-build', None, "skip rebuilding everything (for testing/debugging)"),
 | 
			
		||||
        (
 | 
			
		||||
            'owner=',
 | 
			
		||||
            'u',
 | 
			
		||||
            "Owner name used when creating a tar file" " [default: current user]",
 | 
			
		||||
        ),
 | 
			
		||||
        (
 | 
			
		||||
            'group=',
 | 
			
		||||
            'g',
 | 
			
		||||
            "Group name used when creating a tar file" " [default: current group]",
 | 
			
		||||
        ),
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
    boolean_options = ['skip-build']
 | 
			
		||||
 | 
			
		||||
    help_options = [
 | 
			
		||||
        ('help-formats', None, "lists available distribution formats", show_formats),
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
    # The following commands do not take a format option from bdist
 | 
			
		||||
    no_format_option = ('bdist_rpm',)
 | 
			
		||||
 | 
			
		||||
    # This won't do in reality: will need to distinguish RPM-ish Linux,
 | 
			
		||||
    # Debian-ish Linux, Solaris, FreeBSD, ..., Windows, Mac OS.
 | 
			
		||||
    default_format = {'posix': 'gztar', 'nt': 'zip'}
 | 
			
		||||
 | 
			
		||||
    # Define commands in preferred order for the --help-formats option
 | 
			
		||||
    format_commands = ListCompat(
 | 
			
		||||
        {
 | 
			
		||||
            'rpm': ('bdist_rpm', "RPM distribution"),
 | 
			
		||||
            'gztar': ('bdist_dumb', "gzip'ed tar file"),
 | 
			
		||||
            'bztar': ('bdist_dumb', "bzip2'ed tar file"),
 | 
			
		||||
            'xztar': ('bdist_dumb', "xz'ed tar file"),
 | 
			
		||||
            'ztar': ('bdist_dumb', "compressed tar file"),
 | 
			
		||||
            'tar': ('bdist_dumb', "tar file"),
 | 
			
		||||
            'zip': ('bdist_dumb', "ZIP file"),
 | 
			
		||||
        }
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    # for compatibility until consumers only reference format_commands
 | 
			
		||||
    format_command = format_commands
 | 
			
		||||
 | 
			
		||||
    def initialize_options(self):
 | 
			
		||||
        self.bdist_base = None
 | 
			
		||||
        self.plat_name = None
 | 
			
		||||
        self.formats = None
 | 
			
		||||
        self.dist_dir = None
 | 
			
		||||
        self.skip_build = 0
 | 
			
		||||
        self.group = None
 | 
			
		||||
        self.owner = None
 | 
			
		||||
 | 
			
		||||
    def finalize_options(self):
 | 
			
		||||
        # have to finalize 'plat_name' before 'bdist_base'
 | 
			
		||||
        if self.plat_name is None:
 | 
			
		||||
            if self.skip_build:
 | 
			
		||||
                self.plat_name = get_platform()
 | 
			
		||||
            else:
 | 
			
		||||
                self.plat_name = self.get_finalized_command('build').plat_name
 | 
			
		||||
 | 
			
		||||
        # 'bdist_base' -- parent of per-built-distribution-format
 | 
			
		||||
        # temporary directories (eg. we'll probably have
 | 
			
		||||
        # "build/bdist.<plat>/dumb", "build/bdist.<plat>/rpm", etc.)
 | 
			
		||||
        if self.bdist_base is None:
 | 
			
		||||
            build_base = self.get_finalized_command('build').build_base
 | 
			
		||||
            self.bdist_base = os.path.join(build_base, 'bdist.' + self.plat_name)
 | 
			
		||||
 | 
			
		||||
        self.ensure_string_list('formats')
 | 
			
		||||
        if self.formats is None:
 | 
			
		||||
            try:
 | 
			
		||||
                self.formats = [self.default_format[os.name]]
 | 
			
		||||
            except KeyError:
 | 
			
		||||
                raise DistutilsPlatformError(
 | 
			
		||||
                    "don't know how to create built distributions "
 | 
			
		||||
                    "on platform %s" % os.name
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
        if self.dist_dir is None:
 | 
			
		||||
            self.dist_dir = "dist"
 | 
			
		||||
 | 
			
		||||
    def run(self):
 | 
			
		||||
        # Figure out which sub-commands we need to run.
 | 
			
		||||
        commands = []
 | 
			
		||||
        for format in self.formats:
 | 
			
		||||
            try:
 | 
			
		||||
                commands.append(self.format_commands[format][0])
 | 
			
		||||
            except KeyError:
 | 
			
		||||
                raise DistutilsOptionError("invalid format '%s'" % format)
 | 
			
		||||
 | 
			
		||||
        # Reinitialize and run each command.
 | 
			
		||||
        for i in range(len(self.formats)):
 | 
			
		||||
            cmd_name = commands[i]
 | 
			
		||||
            sub_cmd = self.reinitialize_command(cmd_name)
 | 
			
		||||
            if cmd_name not in self.no_format_option:
 | 
			
		||||
                sub_cmd.format = self.formats[i]
 | 
			
		||||
 | 
			
		||||
            # passing the owner and group names for tar archiving
 | 
			
		||||
            if cmd_name == 'bdist_dumb':
 | 
			
		||||
                sub_cmd.owner = self.owner
 | 
			
		||||
                sub_cmd.group = self.group
 | 
			
		||||
 | 
			
		||||
            # If we're going to need to run this command again, tell it to
 | 
			
		||||
            # keep its temporary files around so subsequent runs go faster.
 | 
			
		||||
            if cmd_name in commands[i + 1 :]:
 | 
			
		||||
                sub_cmd.keep_temp = 1
 | 
			
		||||
            self.run_command(cmd_name)
 | 
			
		||||
@@ -0,0 +1,144 @@
 | 
			
		||||
"""distutils.command.bdist_dumb
 | 
			
		||||
 | 
			
		||||
Implements the Distutils 'bdist_dumb' command (create a "dumb" built
 | 
			
		||||
distribution -- i.e., just an archive to be unpacked under $prefix or
 | 
			
		||||
$exec_prefix)."""
 | 
			
		||||
 | 
			
		||||
import os
 | 
			
		||||
from ..core import Command
 | 
			
		||||
from ..util import get_platform
 | 
			
		||||
from ..dir_util import remove_tree, ensure_relative
 | 
			
		||||
from ..errors import DistutilsPlatformError
 | 
			
		||||
from ..sysconfig import get_python_version
 | 
			
		||||
from distutils._log import log
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class bdist_dumb(Command):
 | 
			
		||||
 | 
			
		||||
    description = "create a \"dumb\" built distribution"
 | 
			
		||||
 | 
			
		||||
    user_options = [
 | 
			
		||||
        ('bdist-dir=', 'd', "temporary directory for creating the distribution"),
 | 
			
		||||
        (
 | 
			
		||||
            'plat-name=',
 | 
			
		||||
            'p',
 | 
			
		||||
            "platform name to embed in generated filenames "
 | 
			
		||||
            "(default: %s)" % get_platform(),
 | 
			
		||||
        ),
 | 
			
		||||
        (
 | 
			
		||||
            'format=',
 | 
			
		||||
            'f',
 | 
			
		||||
            "archive format to create (tar, gztar, bztar, xztar, " "ztar, zip)",
 | 
			
		||||
        ),
 | 
			
		||||
        (
 | 
			
		||||
            'keep-temp',
 | 
			
		||||
            'k',
 | 
			
		||||
            "keep the pseudo-installation tree around after "
 | 
			
		||||
            + "creating the distribution archive",
 | 
			
		||||
        ),
 | 
			
		||||
        ('dist-dir=', 'd', "directory to put final built distributions in"),
 | 
			
		||||
        ('skip-build', None, "skip rebuilding everything (for testing/debugging)"),
 | 
			
		||||
        (
 | 
			
		||||
            'relative',
 | 
			
		||||
            None,
 | 
			
		||||
            "build the archive using relative paths " "(default: false)",
 | 
			
		||||
        ),
 | 
			
		||||
        (
 | 
			
		||||
            'owner=',
 | 
			
		||||
            'u',
 | 
			
		||||
            "Owner name used when creating a tar file" " [default: current user]",
 | 
			
		||||
        ),
 | 
			
		||||
        (
 | 
			
		||||
            'group=',
 | 
			
		||||
            'g',
 | 
			
		||||
            "Group name used when creating a tar file" " [default: current group]",
 | 
			
		||||
        ),
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
    boolean_options = ['keep-temp', 'skip-build', 'relative']
 | 
			
		||||
 | 
			
		||||
    default_format = {'posix': 'gztar', 'nt': 'zip'}
 | 
			
		||||
 | 
			
		||||
    def initialize_options(self):
 | 
			
		||||
        self.bdist_dir = None
 | 
			
		||||
        self.plat_name = None
 | 
			
		||||
        self.format = None
 | 
			
		||||
        self.keep_temp = 0
 | 
			
		||||
        self.dist_dir = None
 | 
			
		||||
        self.skip_build = None
 | 
			
		||||
        self.relative = 0
 | 
			
		||||
        self.owner = None
 | 
			
		||||
        self.group = None
 | 
			
		||||
 | 
			
		||||
    def finalize_options(self):
 | 
			
		||||
        if self.bdist_dir is None:
 | 
			
		||||
            bdist_base = self.get_finalized_command('bdist').bdist_base
 | 
			
		||||
            self.bdist_dir = os.path.join(bdist_base, 'dumb')
 | 
			
		||||
 | 
			
		||||
        if self.format is None:
 | 
			
		||||
            try:
 | 
			
		||||
                self.format = self.default_format[os.name]
 | 
			
		||||
            except KeyError:
 | 
			
		||||
                raise DistutilsPlatformError(
 | 
			
		||||
                    "don't know how to create dumb built distributions "
 | 
			
		||||
                    "on platform %s" % os.name
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
        self.set_undefined_options(
 | 
			
		||||
            'bdist',
 | 
			
		||||
            ('dist_dir', 'dist_dir'),
 | 
			
		||||
            ('plat_name', 'plat_name'),
 | 
			
		||||
            ('skip_build', 'skip_build'),
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def run(self):
 | 
			
		||||
        if not self.skip_build:
 | 
			
		||||
            self.run_command('build')
 | 
			
		||||
 | 
			
		||||
        install = self.reinitialize_command('install', reinit_subcommands=1)
 | 
			
		||||
        install.root = self.bdist_dir
 | 
			
		||||
        install.skip_build = self.skip_build
 | 
			
		||||
        install.warn_dir = 0
 | 
			
		||||
 | 
			
		||||
        log.info("installing to %s", self.bdist_dir)
 | 
			
		||||
        self.run_command('install')
 | 
			
		||||
 | 
			
		||||
        # And make an archive relative to the root of the
 | 
			
		||||
        # pseudo-installation tree.
 | 
			
		||||
        archive_basename = "{}.{}".format(
 | 
			
		||||
            self.distribution.get_fullname(), self.plat_name
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        pseudoinstall_root = os.path.join(self.dist_dir, archive_basename)
 | 
			
		||||
        if not self.relative:
 | 
			
		||||
            archive_root = self.bdist_dir
 | 
			
		||||
        else:
 | 
			
		||||
            if self.distribution.has_ext_modules() and (
 | 
			
		||||
                install.install_base != install.install_platbase
 | 
			
		||||
            ):
 | 
			
		||||
                raise DistutilsPlatformError(
 | 
			
		||||
                    "can't make a dumb built distribution where "
 | 
			
		||||
                    "base and platbase are different (%s, %s)"
 | 
			
		||||
                    % (repr(install.install_base), repr(install.install_platbase))
 | 
			
		||||
                )
 | 
			
		||||
            else:
 | 
			
		||||
                archive_root = os.path.join(
 | 
			
		||||
                    self.bdist_dir, ensure_relative(install.install_base)
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
        # Make the archive
 | 
			
		||||
        filename = self.make_archive(
 | 
			
		||||
            pseudoinstall_root,
 | 
			
		||||
            self.format,
 | 
			
		||||
            root_dir=archive_root,
 | 
			
		||||
            owner=self.owner,
 | 
			
		||||
            group=self.group,
 | 
			
		||||
        )
 | 
			
		||||
        if self.distribution.has_ext_modules():
 | 
			
		||||
            pyversion = get_python_version()
 | 
			
		||||
        else:
 | 
			
		||||
            pyversion = 'any'
 | 
			
		||||
        self.distribution.dist_files.append(('bdist_dumb', pyversion, filename))
 | 
			
		||||
 | 
			
		||||
        if not self.keep_temp:
 | 
			
		||||
            remove_tree(self.bdist_dir, dry_run=self.dry_run)
 | 
			
		||||
@@ -0,0 +1,615 @@
 | 
			
		||||
"""distutils.command.bdist_rpm
 | 
			
		||||
 | 
			
		||||
Implements the Distutils 'bdist_rpm' command (create RPM source and binary
 | 
			
		||||
distributions)."""
 | 
			
		||||
 | 
			
		||||
import subprocess
 | 
			
		||||
import sys
 | 
			
		||||
import os
 | 
			
		||||
 | 
			
		||||
from ..core import Command
 | 
			
		||||
from ..debug import DEBUG
 | 
			
		||||
from ..file_util import write_file
 | 
			
		||||
from ..errors import (
 | 
			
		||||
    DistutilsOptionError,
 | 
			
		||||
    DistutilsPlatformError,
 | 
			
		||||
    DistutilsFileError,
 | 
			
		||||
    DistutilsExecError,
 | 
			
		||||
)
 | 
			
		||||
from ..sysconfig import get_python_version
 | 
			
		||||
from distutils._log import log
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class bdist_rpm(Command):
 | 
			
		||||
 | 
			
		||||
    description = "create an RPM distribution"
 | 
			
		||||
 | 
			
		||||
    user_options = [
 | 
			
		||||
        ('bdist-base=', None, "base directory for creating built distributions"),
 | 
			
		||||
        (
 | 
			
		||||
            'rpm-base=',
 | 
			
		||||
            None,
 | 
			
		||||
            "base directory for creating RPMs (defaults to \"rpm\" under "
 | 
			
		||||
            "--bdist-base; must be specified for RPM 2)",
 | 
			
		||||
        ),
 | 
			
		||||
        (
 | 
			
		||||
            'dist-dir=',
 | 
			
		||||
            'd',
 | 
			
		||||
            "directory to put final RPM files in " "(and .spec files if --spec-only)",
 | 
			
		||||
        ),
 | 
			
		||||
        (
 | 
			
		||||
            'python=',
 | 
			
		||||
            None,
 | 
			
		||||
            "path to Python interpreter to hard-code in the .spec file "
 | 
			
		||||
            "(default: \"python\")",
 | 
			
		||||
        ),
 | 
			
		||||
        (
 | 
			
		||||
            'fix-python',
 | 
			
		||||
            None,
 | 
			
		||||
            "hard-code the exact path to the current Python interpreter in "
 | 
			
		||||
            "the .spec file",
 | 
			
		||||
        ),
 | 
			
		||||
        ('spec-only', None, "only regenerate spec file"),
 | 
			
		||||
        ('source-only', None, "only generate source RPM"),
 | 
			
		||||
        ('binary-only', None, "only generate binary RPM"),
 | 
			
		||||
        ('use-bzip2', None, "use bzip2 instead of gzip to create source distribution"),
 | 
			
		||||
        # More meta-data: too RPM-specific to put in the setup script,
 | 
			
		||||
        # but needs to go in the .spec file -- so we make these options
 | 
			
		||||
        # to "bdist_rpm".  The idea is that packagers would put this
 | 
			
		||||
        # info in setup.cfg, although they are of course free to
 | 
			
		||||
        # supply it on the command line.
 | 
			
		||||
        (
 | 
			
		||||
            'distribution-name=',
 | 
			
		||||
            None,
 | 
			
		||||
            "name of the (Linux) distribution to which this "
 | 
			
		||||
            "RPM applies (*not* the name of the module distribution!)",
 | 
			
		||||
        ),
 | 
			
		||||
        ('group=', None, "package classification [default: \"Development/Libraries\"]"),
 | 
			
		||||
        ('release=', None, "RPM release number"),
 | 
			
		||||
        ('serial=', None, "RPM serial number"),
 | 
			
		||||
        (
 | 
			
		||||
            'vendor=',
 | 
			
		||||
            None,
 | 
			
		||||
            "RPM \"vendor\" (eg. \"Joe Blow <joe@example.com>\") "
 | 
			
		||||
            "[default: maintainer or author from setup script]",
 | 
			
		||||
        ),
 | 
			
		||||
        (
 | 
			
		||||
            'packager=',
 | 
			
		||||
            None,
 | 
			
		||||
            "RPM packager (eg. \"Jane Doe <jane@example.net>\") " "[default: vendor]",
 | 
			
		||||
        ),
 | 
			
		||||
        ('doc-files=', None, "list of documentation files (space or comma-separated)"),
 | 
			
		||||
        ('changelog=', None, "RPM changelog"),
 | 
			
		||||
        ('icon=', None, "name of icon file"),
 | 
			
		||||
        ('provides=', None, "capabilities provided by this package"),
 | 
			
		||||
        ('requires=', None, "capabilities required by this package"),
 | 
			
		||||
        ('conflicts=', None, "capabilities which conflict with this package"),
 | 
			
		||||
        ('build-requires=', None, "capabilities required to build this package"),
 | 
			
		||||
        ('obsoletes=', None, "capabilities made obsolete by this package"),
 | 
			
		||||
        ('no-autoreq', None, "do not automatically calculate dependencies"),
 | 
			
		||||
        # Actions to take when building RPM
 | 
			
		||||
        ('keep-temp', 'k', "don't clean up RPM build directory"),
 | 
			
		||||
        ('no-keep-temp', None, "clean up RPM build directory [default]"),
 | 
			
		||||
        (
 | 
			
		||||
            'use-rpm-opt-flags',
 | 
			
		||||
            None,
 | 
			
		||||
            "compile with RPM_OPT_FLAGS when building from source RPM",
 | 
			
		||||
        ),
 | 
			
		||||
        ('no-rpm-opt-flags', None, "do not pass any RPM CFLAGS to compiler"),
 | 
			
		||||
        ('rpm3-mode', None, "RPM 3 compatibility mode (default)"),
 | 
			
		||||
        ('rpm2-mode', None, "RPM 2 compatibility mode"),
 | 
			
		||||
        # Add the hooks necessary for specifying custom scripts
 | 
			
		||||
        ('prep-script=', None, "Specify a script for the PREP phase of RPM building"),
 | 
			
		||||
        ('build-script=', None, "Specify a script for the BUILD phase of RPM building"),
 | 
			
		||||
        (
 | 
			
		||||
            'pre-install=',
 | 
			
		||||
            None,
 | 
			
		||||
            "Specify a script for the pre-INSTALL phase of RPM building",
 | 
			
		||||
        ),
 | 
			
		||||
        (
 | 
			
		||||
            'install-script=',
 | 
			
		||||
            None,
 | 
			
		||||
            "Specify a script for the INSTALL phase of RPM building",
 | 
			
		||||
        ),
 | 
			
		||||
        (
 | 
			
		||||
            'post-install=',
 | 
			
		||||
            None,
 | 
			
		||||
            "Specify a script for the post-INSTALL phase of RPM building",
 | 
			
		||||
        ),
 | 
			
		||||
        (
 | 
			
		||||
            'pre-uninstall=',
 | 
			
		||||
            None,
 | 
			
		||||
            "Specify a script for the pre-UNINSTALL phase of RPM building",
 | 
			
		||||
        ),
 | 
			
		||||
        (
 | 
			
		||||
            'post-uninstall=',
 | 
			
		||||
            None,
 | 
			
		||||
            "Specify a script for the post-UNINSTALL phase of RPM building",
 | 
			
		||||
        ),
 | 
			
		||||
        ('clean-script=', None, "Specify a script for the CLEAN phase of RPM building"),
 | 
			
		||||
        (
 | 
			
		||||
            'verify-script=',
 | 
			
		||||
            None,
 | 
			
		||||
            "Specify a script for the VERIFY phase of the RPM build",
 | 
			
		||||
        ),
 | 
			
		||||
        # Allow a packager to explicitly force an architecture
 | 
			
		||||
        ('force-arch=', None, "Force an architecture onto the RPM build process"),
 | 
			
		||||
        ('quiet', 'q', "Run the INSTALL phase of RPM building in quiet mode"),
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
    boolean_options = [
 | 
			
		||||
        'keep-temp',
 | 
			
		||||
        'use-rpm-opt-flags',
 | 
			
		||||
        'rpm3-mode',
 | 
			
		||||
        'no-autoreq',
 | 
			
		||||
        'quiet',
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
    negative_opt = {
 | 
			
		||||
        'no-keep-temp': 'keep-temp',
 | 
			
		||||
        'no-rpm-opt-flags': 'use-rpm-opt-flags',
 | 
			
		||||
        'rpm2-mode': 'rpm3-mode',
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    def initialize_options(self):
 | 
			
		||||
        self.bdist_base = None
 | 
			
		||||
        self.rpm_base = None
 | 
			
		||||
        self.dist_dir = None
 | 
			
		||||
        self.python = None
 | 
			
		||||
        self.fix_python = None
 | 
			
		||||
        self.spec_only = None
 | 
			
		||||
        self.binary_only = None
 | 
			
		||||
        self.source_only = None
 | 
			
		||||
        self.use_bzip2 = None
 | 
			
		||||
 | 
			
		||||
        self.distribution_name = None
 | 
			
		||||
        self.group = None
 | 
			
		||||
        self.release = None
 | 
			
		||||
        self.serial = None
 | 
			
		||||
        self.vendor = None
 | 
			
		||||
        self.packager = None
 | 
			
		||||
        self.doc_files = None
 | 
			
		||||
        self.changelog = None
 | 
			
		||||
        self.icon = None
 | 
			
		||||
 | 
			
		||||
        self.prep_script = None
 | 
			
		||||
        self.build_script = None
 | 
			
		||||
        self.install_script = None
 | 
			
		||||
        self.clean_script = None
 | 
			
		||||
        self.verify_script = None
 | 
			
		||||
        self.pre_install = None
 | 
			
		||||
        self.post_install = None
 | 
			
		||||
        self.pre_uninstall = None
 | 
			
		||||
        self.post_uninstall = None
 | 
			
		||||
        self.prep = None
 | 
			
		||||
        self.provides = None
 | 
			
		||||
        self.requires = None
 | 
			
		||||
        self.conflicts = None
 | 
			
		||||
        self.build_requires = None
 | 
			
		||||
        self.obsoletes = None
 | 
			
		||||
 | 
			
		||||
        self.keep_temp = 0
 | 
			
		||||
        self.use_rpm_opt_flags = 1
 | 
			
		||||
        self.rpm3_mode = 1
 | 
			
		||||
        self.no_autoreq = 0
 | 
			
		||||
 | 
			
		||||
        self.force_arch = None
 | 
			
		||||
        self.quiet = 0
 | 
			
		||||
 | 
			
		||||
    def finalize_options(self):
 | 
			
		||||
        self.set_undefined_options('bdist', ('bdist_base', 'bdist_base'))
 | 
			
		||||
        if self.rpm_base is None:
 | 
			
		||||
            if not self.rpm3_mode:
 | 
			
		||||
                raise DistutilsOptionError("you must specify --rpm-base in RPM 2 mode")
 | 
			
		||||
            self.rpm_base = os.path.join(self.bdist_base, "rpm")
 | 
			
		||||
 | 
			
		||||
        if self.python is None:
 | 
			
		||||
            if self.fix_python:
 | 
			
		||||
                self.python = sys.executable
 | 
			
		||||
            else:
 | 
			
		||||
                self.python = "python3"
 | 
			
		||||
        elif self.fix_python:
 | 
			
		||||
            raise DistutilsOptionError(
 | 
			
		||||
                "--python and --fix-python are mutually exclusive options"
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        if os.name != 'posix':
 | 
			
		||||
            raise DistutilsPlatformError(
 | 
			
		||||
                "don't know how to create RPM " "distributions on platform %s" % os.name
 | 
			
		||||
            )
 | 
			
		||||
        if self.binary_only and self.source_only:
 | 
			
		||||
            raise DistutilsOptionError(
 | 
			
		||||
                "cannot supply both '--source-only' and '--binary-only'"
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        # don't pass CFLAGS to pure python distributions
 | 
			
		||||
        if not self.distribution.has_ext_modules():
 | 
			
		||||
            self.use_rpm_opt_flags = 0
 | 
			
		||||
 | 
			
		||||
        self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
 | 
			
		||||
        self.finalize_package_data()
 | 
			
		||||
 | 
			
		||||
    def finalize_package_data(self):
 | 
			
		||||
        self.ensure_string('group', "Development/Libraries")
 | 
			
		||||
        self.ensure_string(
 | 
			
		||||
            'vendor',
 | 
			
		||||
            "%s <%s>"
 | 
			
		||||
            % (self.distribution.get_contact(), self.distribution.get_contact_email()),
 | 
			
		||||
        )
 | 
			
		||||
        self.ensure_string('packager')
 | 
			
		||||
        self.ensure_string_list('doc_files')
 | 
			
		||||
        if isinstance(self.doc_files, list):
 | 
			
		||||
            for readme in ('README', 'README.txt'):
 | 
			
		||||
                if os.path.exists(readme) and readme not in self.doc_files:
 | 
			
		||||
                    self.doc_files.append(readme)
 | 
			
		||||
 | 
			
		||||
        self.ensure_string('release', "1")
 | 
			
		||||
        self.ensure_string('serial')  # should it be an int?
 | 
			
		||||
 | 
			
		||||
        self.ensure_string('distribution_name')
 | 
			
		||||
 | 
			
		||||
        self.ensure_string('changelog')
 | 
			
		||||
        # Format changelog correctly
 | 
			
		||||
        self.changelog = self._format_changelog(self.changelog)
 | 
			
		||||
 | 
			
		||||
        self.ensure_filename('icon')
 | 
			
		||||
 | 
			
		||||
        self.ensure_filename('prep_script')
 | 
			
		||||
        self.ensure_filename('build_script')
 | 
			
		||||
        self.ensure_filename('install_script')
 | 
			
		||||
        self.ensure_filename('clean_script')
 | 
			
		||||
        self.ensure_filename('verify_script')
 | 
			
		||||
        self.ensure_filename('pre_install')
 | 
			
		||||
        self.ensure_filename('post_install')
 | 
			
		||||
        self.ensure_filename('pre_uninstall')
 | 
			
		||||
        self.ensure_filename('post_uninstall')
 | 
			
		||||
 | 
			
		||||
        # XXX don't forget we punted on summaries and descriptions -- they
 | 
			
		||||
        # should be handled here eventually!
 | 
			
		||||
 | 
			
		||||
        # Now *this* is some meta-data that belongs in the setup script...
 | 
			
		||||
        self.ensure_string_list('provides')
 | 
			
		||||
        self.ensure_string_list('requires')
 | 
			
		||||
        self.ensure_string_list('conflicts')
 | 
			
		||||
        self.ensure_string_list('build_requires')
 | 
			
		||||
        self.ensure_string_list('obsoletes')
 | 
			
		||||
 | 
			
		||||
        self.ensure_string('force_arch')
 | 
			
		||||
 | 
			
		||||
    def run(self):  # noqa: C901
 | 
			
		||||
        if DEBUG:
 | 
			
		||||
            print("before _get_package_data():")
 | 
			
		||||
            print("vendor =", self.vendor)
 | 
			
		||||
            print("packager =", self.packager)
 | 
			
		||||
            print("doc_files =", self.doc_files)
 | 
			
		||||
            print("changelog =", self.changelog)
 | 
			
		||||
 | 
			
		||||
        # make directories
 | 
			
		||||
        if self.spec_only:
 | 
			
		||||
            spec_dir = self.dist_dir
 | 
			
		||||
            self.mkpath(spec_dir)
 | 
			
		||||
        else:
 | 
			
		||||
            rpm_dir = {}
 | 
			
		||||
            for d in ('SOURCES', 'SPECS', 'BUILD', 'RPMS', 'SRPMS'):
 | 
			
		||||
                rpm_dir[d] = os.path.join(self.rpm_base, d)
 | 
			
		||||
                self.mkpath(rpm_dir[d])
 | 
			
		||||
            spec_dir = rpm_dir['SPECS']
 | 
			
		||||
 | 
			
		||||
        # Spec file goes into 'dist_dir' if '--spec-only specified',
 | 
			
		||||
        # build/rpm.<plat> otherwise.
 | 
			
		||||
        spec_path = os.path.join(spec_dir, "%s.spec" % self.distribution.get_name())
 | 
			
		||||
        self.execute(
 | 
			
		||||
            write_file, (spec_path, self._make_spec_file()), "writing '%s'" % spec_path
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        if self.spec_only:  # stop if requested
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
        # Make a source distribution and copy to SOURCES directory with
 | 
			
		||||
        # optional icon.
 | 
			
		||||
        saved_dist_files = self.distribution.dist_files[:]
 | 
			
		||||
        sdist = self.reinitialize_command('sdist')
 | 
			
		||||
        if self.use_bzip2:
 | 
			
		||||
            sdist.formats = ['bztar']
 | 
			
		||||
        else:
 | 
			
		||||
            sdist.formats = ['gztar']
 | 
			
		||||
        self.run_command('sdist')
 | 
			
		||||
        self.distribution.dist_files = saved_dist_files
 | 
			
		||||
 | 
			
		||||
        source = sdist.get_archive_files()[0]
 | 
			
		||||
        source_dir = rpm_dir['SOURCES']
 | 
			
		||||
        self.copy_file(source, source_dir)
 | 
			
		||||
 | 
			
		||||
        if self.icon:
 | 
			
		||||
            if os.path.exists(self.icon):
 | 
			
		||||
                self.copy_file(self.icon, source_dir)
 | 
			
		||||
            else:
 | 
			
		||||
                raise DistutilsFileError("icon file '%s' does not exist" % self.icon)
 | 
			
		||||
 | 
			
		||||
        # build package
 | 
			
		||||
        log.info("building RPMs")
 | 
			
		||||
        rpm_cmd = ['rpmbuild']
 | 
			
		||||
 | 
			
		||||
        if self.source_only:  # what kind of RPMs?
 | 
			
		||||
            rpm_cmd.append('-bs')
 | 
			
		||||
        elif self.binary_only:
 | 
			
		||||
            rpm_cmd.append('-bb')
 | 
			
		||||
        else:
 | 
			
		||||
            rpm_cmd.append('-ba')
 | 
			
		||||
        rpm_cmd.extend(['--define', '__python %s' % self.python])
 | 
			
		||||
        if self.rpm3_mode:
 | 
			
		||||
            rpm_cmd.extend(['--define', '_topdir %s' % os.path.abspath(self.rpm_base)])
 | 
			
		||||
        if not self.keep_temp:
 | 
			
		||||
            rpm_cmd.append('--clean')
 | 
			
		||||
 | 
			
		||||
        if self.quiet:
 | 
			
		||||
            rpm_cmd.append('--quiet')
 | 
			
		||||
 | 
			
		||||
        rpm_cmd.append(spec_path)
 | 
			
		||||
        # Determine the binary rpm names that should be built out of this spec
 | 
			
		||||
        # file
 | 
			
		||||
        # Note that some of these may not be really built (if the file
 | 
			
		||||
        # list is empty)
 | 
			
		||||
        nvr_string = "%{name}-%{version}-%{release}"
 | 
			
		||||
        src_rpm = nvr_string + ".src.rpm"
 | 
			
		||||
        non_src_rpm = "%{arch}/" + nvr_string + ".%{arch}.rpm"
 | 
			
		||||
        q_cmd = r"rpm -q --qf '{} {}\n' --specfile '{}'".format(
 | 
			
		||||
            src_rpm,
 | 
			
		||||
            non_src_rpm,
 | 
			
		||||
            spec_path,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        out = os.popen(q_cmd)
 | 
			
		||||
        try:
 | 
			
		||||
            binary_rpms = []
 | 
			
		||||
            source_rpm = None
 | 
			
		||||
            while True:
 | 
			
		||||
                line = out.readline()
 | 
			
		||||
                if not line:
 | 
			
		||||
                    break
 | 
			
		||||
                ell = line.strip().split()
 | 
			
		||||
                assert len(ell) == 2
 | 
			
		||||
                binary_rpms.append(ell[1])
 | 
			
		||||
                # The source rpm is named after the first entry in the spec file
 | 
			
		||||
                if source_rpm is None:
 | 
			
		||||
                    source_rpm = ell[0]
 | 
			
		||||
 | 
			
		||||
            status = out.close()
 | 
			
		||||
            if status:
 | 
			
		||||
                raise DistutilsExecError("Failed to execute: %s" % repr(q_cmd))
 | 
			
		||||
 | 
			
		||||
        finally:
 | 
			
		||||
            out.close()
 | 
			
		||||
 | 
			
		||||
        self.spawn(rpm_cmd)
 | 
			
		||||
 | 
			
		||||
        if not self.dry_run:
 | 
			
		||||
            if self.distribution.has_ext_modules():
 | 
			
		||||
                pyversion = get_python_version()
 | 
			
		||||
            else:
 | 
			
		||||
                pyversion = 'any'
 | 
			
		||||
 | 
			
		||||
            if not self.binary_only:
 | 
			
		||||
                srpm = os.path.join(rpm_dir['SRPMS'], source_rpm)
 | 
			
		||||
                assert os.path.exists(srpm)
 | 
			
		||||
                self.move_file(srpm, self.dist_dir)
 | 
			
		||||
                filename = os.path.join(self.dist_dir, source_rpm)
 | 
			
		||||
                self.distribution.dist_files.append(('bdist_rpm', pyversion, filename))
 | 
			
		||||
 | 
			
		||||
            if not self.source_only:
 | 
			
		||||
                for rpm in binary_rpms:
 | 
			
		||||
                    rpm = os.path.join(rpm_dir['RPMS'], rpm)
 | 
			
		||||
                    if os.path.exists(rpm):
 | 
			
		||||
                        self.move_file(rpm, self.dist_dir)
 | 
			
		||||
                        filename = os.path.join(self.dist_dir, os.path.basename(rpm))
 | 
			
		||||
                        self.distribution.dist_files.append(
 | 
			
		||||
                            ('bdist_rpm', pyversion, filename)
 | 
			
		||||
                        )
 | 
			
		||||
 | 
			
		||||
    def _dist_path(self, path):
 | 
			
		||||
        return os.path.join(self.dist_dir, os.path.basename(path))
 | 
			
		||||
 | 
			
		||||
    def _make_spec_file(self):  # noqa: C901
 | 
			
		||||
        """Generate the text of an RPM spec file and return it as a
 | 
			
		||||
        list of strings (one per line).
 | 
			
		||||
        """
 | 
			
		||||
        # definitions and headers
 | 
			
		||||
        spec_file = [
 | 
			
		||||
            '%define name ' + self.distribution.get_name(),
 | 
			
		||||
            '%define version ' + self.distribution.get_version().replace('-', '_'),
 | 
			
		||||
            '%define unmangled_version ' + self.distribution.get_version(),
 | 
			
		||||
            '%define release ' + self.release.replace('-', '_'),
 | 
			
		||||
            '',
 | 
			
		||||
            'Summary: ' + (self.distribution.get_description() or "UNKNOWN"),
 | 
			
		||||
        ]
 | 
			
		||||
 | 
			
		||||
        # Workaround for #14443 which affects some RPM based systems such as
 | 
			
		||||
        # RHEL6 (and probably derivatives)
 | 
			
		||||
        vendor_hook = subprocess.getoutput('rpm --eval %{__os_install_post}')
 | 
			
		||||
        # Generate a potential replacement value for __os_install_post (whilst
 | 
			
		||||
        # normalizing the whitespace to simplify the test for whether the
 | 
			
		||||
        # invocation of brp-python-bytecompile passes in __python):
 | 
			
		||||
        vendor_hook = '\n'.join(
 | 
			
		||||
            ['  %s \\' % line.strip() for line in vendor_hook.splitlines()]
 | 
			
		||||
        )
 | 
			
		||||
        problem = "brp-python-bytecompile \\\n"
 | 
			
		||||
        fixed = "brp-python-bytecompile %{__python} \\\n"
 | 
			
		||||
        fixed_hook = vendor_hook.replace(problem, fixed)
 | 
			
		||||
        if fixed_hook != vendor_hook:
 | 
			
		||||
            spec_file.append('# Workaround for http://bugs.python.org/issue14443')
 | 
			
		||||
            spec_file.append('%define __os_install_post ' + fixed_hook + '\n')
 | 
			
		||||
 | 
			
		||||
        # put locale summaries into spec file
 | 
			
		||||
        # XXX not supported for now (hard to put a dictionary
 | 
			
		||||
        # in a config file -- arg!)
 | 
			
		||||
        # for locale in self.summaries.keys():
 | 
			
		||||
        #    spec_file.append('Summary(%s): %s' % (locale,
 | 
			
		||||
        #                                          self.summaries[locale]))
 | 
			
		||||
 | 
			
		||||
        spec_file.extend(
 | 
			
		||||
            [
 | 
			
		||||
                'Name: %{name}',
 | 
			
		||||
                'Version: %{version}',
 | 
			
		||||
                'Release: %{release}',
 | 
			
		||||
            ]
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        # XXX yuck! this filename is available from the "sdist" command,
 | 
			
		||||
        # but only after it has run: and we create the spec file before
 | 
			
		||||
        # running "sdist", in case of --spec-only.
 | 
			
		||||
        if self.use_bzip2:
 | 
			
		||||
            spec_file.append('Source0: %{name}-%{unmangled_version}.tar.bz2')
 | 
			
		||||
        else:
 | 
			
		||||
            spec_file.append('Source0: %{name}-%{unmangled_version}.tar.gz')
 | 
			
		||||
 | 
			
		||||
        spec_file.extend(
 | 
			
		||||
            [
 | 
			
		||||
                'License: ' + (self.distribution.get_license() or "UNKNOWN"),
 | 
			
		||||
                'Group: ' + self.group,
 | 
			
		||||
                'BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-buildroot',
 | 
			
		||||
                'Prefix: %{_prefix}',
 | 
			
		||||
            ]
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        if not self.force_arch:
 | 
			
		||||
            # noarch if no extension modules
 | 
			
		||||
            if not self.distribution.has_ext_modules():
 | 
			
		||||
                spec_file.append('BuildArch: noarch')
 | 
			
		||||
        else:
 | 
			
		||||
            spec_file.append('BuildArch: %s' % self.force_arch)
 | 
			
		||||
 | 
			
		||||
        for field in (
 | 
			
		||||
            'Vendor',
 | 
			
		||||
            'Packager',
 | 
			
		||||
            'Provides',
 | 
			
		||||
            'Requires',
 | 
			
		||||
            'Conflicts',
 | 
			
		||||
            'Obsoletes',
 | 
			
		||||
        ):
 | 
			
		||||
            val = getattr(self, field.lower())
 | 
			
		||||
            if isinstance(val, list):
 | 
			
		||||
                spec_file.append('{}: {}'.format(field, ' '.join(val)))
 | 
			
		||||
            elif val is not None:
 | 
			
		||||
                spec_file.append('{}: {}'.format(field, val))
 | 
			
		||||
 | 
			
		||||
        if self.distribution.get_url():
 | 
			
		||||
            spec_file.append('Url: ' + self.distribution.get_url())
 | 
			
		||||
 | 
			
		||||
        if self.distribution_name:
 | 
			
		||||
            spec_file.append('Distribution: ' + self.distribution_name)
 | 
			
		||||
 | 
			
		||||
        if self.build_requires:
 | 
			
		||||
            spec_file.append('BuildRequires: ' + ' '.join(self.build_requires))
 | 
			
		||||
 | 
			
		||||
        if self.icon:
 | 
			
		||||
            spec_file.append('Icon: ' + os.path.basename(self.icon))
 | 
			
		||||
 | 
			
		||||
        if self.no_autoreq:
 | 
			
		||||
            spec_file.append('AutoReq: 0')
 | 
			
		||||
 | 
			
		||||
        spec_file.extend(
 | 
			
		||||
            [
 | 
			
		||||
                '',
 | 
			
		||||
                '%description',
 | 
			
		||||
                self.distribution.get_long_description() or "",
 | 
			
		||||
            ]
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        # put locale descriptions into spec file
 | 
			
		||||
        # XXX again, suppressed because config file syntax doesn't
 | 
			
		||||
        # easily support this ;-(
 | 
			
		||||
        # for locale in self.descriptions.keys():
 | 
			
		||||
        #    spec_file.extend([
 | 
			
		||||
        #        '',
 | 
			
		||||
        #        '%description -l ' + locale,
 | 
			
		||||
        #        self.descriptions[locale],
 | 
			
		||||
        #        ])
 | 
			
		||||
 | 
			
		||||
        # rpm scripts
 | 
			
		||||
        # figure out default build script
 | 
			
		||||
        def_setup_call = "{} {}".format(self.python, os.path.basename(sys.argv[0]))
 | 
			
		||||
        def_build = "%s build" % def_setup_call
 | 
			
		||||
        if self.use_rpm_opt_flags:
 | 
			
		||||
            def_build = 'env CFLAGS="$RPM_OPT_FLAGS" ' + def_build
 | 
			
		||||
 | 
			
		||||
        # insert contents of files
 | 
			
		||||
 | 
			
		||||
        # XXX this is kind of misleading: user-supplied options are files
 | 
			
		||||
        # that we open and interpolate into the spec file, but the defaults
 | 
			
		||||
        # are just text that we drop in as-is.  Hmmm.
 | 
			
		||||
 | 
			
		||||
        install_cmd = (
 | 
			
		||||
            '%s install -O1 --root=$RPM_BUILD_ROOT ' '--record=INSTALLED_FILES'
 | 
			
		||||
        ) % def_setup_call
 | 
			
		||||
 | 
			
		||||
        script_options = [
 | 
			
		||||
            ('prep', 'prep_script', "%setup -n %{name}-%{unmangled_version}"),
 | 
			
		||||
            ('build', 'build_script', def_build),
 | 
			
		||||
            ('install', 'install_script', install_cmd),
 | 
			
		||||
            ('clean', 'clean_script', "rm -rf $RPM_BUILD_ROOT"),
 | 
			
		||||
            ('verifyscript', 'verify_script', None),
 | 
			
		||||
            ('pre', 'pre_install', None),
 | 
			
		||||
            ('post', 'post_install', None),
 | 
			
		||||
            ('preun', 'pre_uninstall', None),
 | 
			
		||||
            ('postun', 'post_uninstall', None),
 | 
			
		||||
        ]
 | 
			
		||||
 | 
			
		||||
        for (rpm_opt, attr, default) in script_options:
 | 
			
		||||
            # Insert contents of file referred to, if no file is referred to
 | 
			
		||||
            # use 'default' as contents of script
 | 
			
		||||
            val = getattr(self, attr)
 | 
			
		||||
            if val or default:
 | 
			
		||||
                spec_file.extend(
 | 
			
		||||
                    [
 | 
			
		||||
                        '',
 | 
			
		||||
                        '%' + rpm_opt,
 | 
			
		||||
                    ]
 | 
			
		||||
                )
 | 
			
		||||
                if val:
 | 
			
		||||
                    with open(val) as f:
 | 
			
		||||
                        spec_file.extend(f.read().split('\n'))
 | 
			
		||||
                else:
 | 
			
		||||
                    spec_file.append(default)
 | 
			
		||||
 | 
			
		||||
        # files section
 | 
			
		||||
        spec_file.extend(
 | 
			
		||||
            [
 | 
			
		||||
                '',
 | 
			
		||||
                '%files -f INSTALLED_FILES',
 | 
			
		||||
                '%defattr(-,root,root)',
 | 
			
		||||
            ]
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        if self.doc_files:
 | 
			
		||||
            spec_file.append('%doc ' + ' '.join(self.doc_files))
 | 
			
		||||
 | 
			
		||||
        if self.changelog:
 | 
			
		||||
            spec_file.extend(
 | 
			
		||||
                [
 | 
			
		||||
                    '',
 | 
			
		||||
                    '%changelog',
 | 
			
		||||
                ]
 | 
			
		||||
            )
 | 
			
		||||
            spec_file.extend(self.changelog)
 | 
			
		||||
 | 
			
		||||
        return spec_file
 | 
			
		||||
 | 
			
		||||
    def _format_changelog(self, changelog):
 | 
			
		||||
        """Format the changelog correctly and convert it to a list of strings"""
 | 
			
		||||
        if not changelog:
 | 
			
		||||
            return changelog
 | 
			
		||||
        new_changelog = []
 | 
			
		||||
        for line in changelog.strip().split('\n'):
 | 
			
		||||
            line = line.strip()
 | 
			
		||||
            if line[0] == '*':
 | 
			
		||||
                new_changelog.extend(['', line])
 | 
			
		||||
            elif line[0] == '-':
 | 
			
		||||
                new_changelog.append(line)
 | 
			
		||||
            else:
 | 
			
		||||
                new_changelog.append('  ' + line)
 | 
			
		||||
 | 
			
		||||
        # strip trailing newline inserted by first changelog entry
 | 
			
		||||
        if not new_changelog[0]:
 | 
			
		||||
            del new_changelog[0]
 | 
			
		||||
 | 
			
		||||
        return new_changelog
 | 
			
		||||
@@ -0,0 +1,153 @@
 | 
			
		||||
"""distutils.command.build
 | 
			
		||||
 | 
			
		||||
Implements the Distutils 'build' command."""
 | 
			
		||||
 | 
			
		||||
import sys
 | 
			
		||||
import os
 | 
			
		||||
from ..core import Command
 | 
			
		||||
from ..errors import DistutilsOptionError
 | 
			
		||||
from ..util import get_platform
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def show_compilers():
 | 
			
		||||
    from ..ccompiler import show_compilers
 | 
			
		||||
 | 
			
		||||
    show_compilers()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class build(Command):
 | 
			
		||||
 | 
			
		||||
    description = "build everything needed to install"
 | 
			
		||||
 | 
			
		||||
    user_options = [
 | 
			
		||||
        ('build-base=', 'b', "base directory for build library"),
 | 
			
		||||
        ('build-purelib=', None, "build directory for platform-neutral distributions"),
 | 
			
		||||
        ('build-platlib=', None, "build directory for platform-specific distributions"),
 | 
			
		||||
        (
 | 
			
		||||
            'build-lib=',
 | 
			
		||||
            None,
 | 
			
		||||
            "build directory for all distribution (defaults to either "
 | 
			
		||||
            + "build-purelib or build-platlib",
 | 
			
		||||
        ),
 | 
			
		||||
        ('build-scripts=', None, "build directory for scripts"),
 | 
			
		||||
        ('build-temp=', 't', "temporary build directory"),
 | 
			
		||||
        (
 | 
			
		||||
            'plat-name=',
 | 
			
		||||
            'p',
 | 
			
		||||
            "platform name to build for, if supported "
 | 
			
		||||
            "(default: %s)" % get_platform(),
 | 
			
		||||
        ),
 | 
			
		||||
        ('compiler=', 'c', "specify the compiler type"),
 | 
			
		||||
        ('parallel=', 'j', "number of parallel build jobs"),
 | 
			
		||||
        ('debug', 'g', "compile extensions and libraries with debugging information"),
 | 
			
		||||
        ('force', 'f', "forcibly build everything (ignore file timestamps)"),
 | 
			
		||||
        ('executable=', 'e', "specify final destination interpreter path (build.py)"),
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
    boolean_options = ['debug', 'force']
 | 
			
		||||
 | 
			
		||||
    help_options = [
 | 
			
		||||
        ('help-compiler', None, "list available compilers", show_compilers),
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
    def initialize_options(self):
 | 
			
		||||
        self.build_base = 'build'
 | 
			
		||||
        # these are decided only after 'build_base' has its final value
 | 
			
		||||
        # (unless overridden by the user or client)
 | 
			
		||||
        self.build_purelib = None
 | 
			
		||||
        self.build_platlib = None
 | 
			
		||||
        self.build_lib = None
 | 
			
		||||
        self.build_temp = None
 | 
			
		||||
        self.build_scripts = None
 | 
			
		||||
        self.compiler = None
 | 
			
		||||
        self.plat_name = None
 | 
			
		||||
        self.debug = None
 | 
			
		||||
        self.force = 0
 | 
			
		||||
        self.executable = None
 | 
			
		||||
        self.parallel = None
 | 
			
		||||
 | 
			
		||||
    def finalize_options(self):  # noqa: C901
 | 
			
		||||
        if self.plat_name is None:
 | 
			
		||||
            self.plat_name = get_platform()
 | 
			
		||||
        else:
 | 
			
		||||
            # plat-name only supported for windows (other platforms are
 | 
			
		||||
            # supported via ./configure flags, if at all).  Avoid misleading
 | 
			
		||||
            # other platforms.
 | 
			
		||||
            if os.name != 'nt':
 | 
			
		||||
                raise DistutilsOptionError(
 | 
			
		||||
                    "--plat-name only supported on Windows (try "
 | 
			
		||||
                    "using './configure --help' on your platform)"
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
        plat_specifier = ".{}-{}".format(self.plat_name, sys.implementation.cache_tag)
 | 
			
		||||
 | 
			
		||||
        # Make it so Python 2.x and Python 2.x with --with-pydebug don't
 | 
			
		||||
        # share the same build directories. Doing so confuses the build
 | 
			
		||||
        # process for C modules
 | 
			
		||||
        if hasattr(sys, 'gettotalrefcount'):
 | 
			
		||||
            plat_specifier += '-pydebug'
 | 
			
		||||
 | 
			
		||||
        # 'build_purelib' and 'build_platlib' just default to 'lib' and
 | 
			
		||||
        # 'lib.<plat>' under the base build directory.  We only use one of
 | 
			
		||||
        # them for a given distribution, though --
 | 
			
		||||
        if self.build_purelib is None:
 | 
			
		||||
            self.build_purelib = os.path.join(self.build_base, 'lib')
 | 
			
		||||
        if self.build_platlib is None:
 | 
			
		||||
            self.build_platlib = os.path.join(self.build_base, 'lib' + plat_specifier)
 | 
			
		||||
 | 
			
		||||
        # 'build_lib' is the actual directory that we will use for this
 | 
			
		||||
        # particular module distribution -- if user didn't supply it, pick
 | 
			
		||||
        # one of 'build_purelib' or 'build_platlib'.
 | 
			
		||||
        if self.build_lib is None:
 | 
			
		||||
            if self.distribution.has_ext_modules():
 | 
			
		||||
                self.build_lib = self.build_platlib
 | 
			
		||||
            else:
 | 
			
		||||
                self.build_lib = self.build_purelib
 | 
			
		||||
 | 
			
		||||
        # 'build_temp' -- temporary directory for compiler turds,
 | 
			
		||||
        # "build/temp.<plat>"
 | 
			
		||||
        if self.build_temp is None:
 | 
			
		||||
            self.build_temp = os.path.join(self.build_base, 'temp' + plat_specifier)
 | 
			
		||||
        if self.build_scripts is None:
 | 
			
		||||
            self.build_scripts = os.path.join(
 | 
			
		||||
                self.build_base, 'scripts-%d.%d' % sys.version_info[:2]
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        if self.executable is None and sys.executable:
 | 
			
		||||
            self.executable = os.path.normpath(sys.executable)
 | 
			
		||||
 | 
			
		||||
        if isinstance(self.parallel, str):
 | 
			
		||||
            try:
 | 
			
		||||
                self.parallel = int(self.parallel)
 | 
			
		||||
            except ValueError:
 | 
			
		||||
                raise DistutilsOptionError("parallel should be an integer")
 | 
			
		||||
 | 
			
		||||
    def run(self):
 | 
			
		||||
        # Run all relevant sub-commands.  This will be some subset of:
 | 
			
		||||
        #  - build_py      - pure Python modules
 | 
			
		||||
        #  - build_clib    - standalone C libraries
 | 
			
		||||
        #  - build_ext     - Python extensions
 | 
			
		||||
        #  - build_scripts - (Python) scripts
 | 
			
		||||
        for cmd_name in self.get_sub_commands():
 | 
			
		||||
            self.run_command(cmd_name)
 | 
			
		||||
 | 
			
		||||
    # -- Predicates for the sub-command list ---------------------------
 | 
			
		||||
 | 
			
		||||
    def has_pure_modules(self):
 | 
			
		||||
        return self.distribution.has_pure_modules()
 | 
			
		||||
 | 
			
		||||
    def has_c_libraries(self):
 | 
			
		||||
        return self.distribution.has_c_libraries()
 | 
			
		||||
 | 
			
		||||
    def has_ext_modules(self):
 | 
			
		||||
        return self.distribution.has_ext_modules()
 | 
			
		||||
 | 
			
		||||
    def has_scripts(self):
 | 
			
		||||
        return self.distribution.has_scripts()
 | 
			
		||||
 | 
			
		||||
    sub_commands = [
 | 
			
		||||
        ('build_py', has_pure_modules),
 | 
			
		||||
        ('build_clib', has_c_libraries),
 | 
			
		||||
        ('build_ext', has_ext_modules),
 | 
			
		||||
        ('build_scripts', has_scripts),
 | 
			
		||||
    ]
 | 
			
		||||
@@ -0,0 +1,208 @@
 | 
			
		||||
"""distutils.command.build_clib
 | 
			
		||||
 | 
			
		||||
Implements the Distutils 'build_clib' command, to build a C/C++ library
 | 
			
		||||
that is included in the module distribution and needed by an extension
 | 
			
		||||
module."""
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# XXX this module has *lots* of code ripped-off quite transparently from
 | 
			
		||||
# build_ext.py -- not surprisingly really, as the work required to build
 | 
			
		||||
# a static library from a collection of C source files is not really all
 | 
			
		||||
# that different from what's required to build a shared object file from
 | 
			
		||||
# a collection of C source files.  Nevertheless, I haven't done the
 | 
			
		||||
# necessary refactoring to account for the overlap in code between the
 | 
			
		||||
# two modules, mainly because a number of subtle details changed in the
 | 
			
		||||
# cut 'n paste.  Sigh.
 | 
			
		||||
 | 
			
		||||
import os
 | 
			
		||||
from ..core import Command
 | 
			
		||||
from ..errors import DistutilsSetupError
 | 
			
		||||
from ..sysconfig import customize_compiler
 | 
			
		||||
from distutils._log import log
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def show_compilers():
 | 
			
		||||
    from ..ccompiler import show_compilers
 | 
			
		||||
 | 
			
		||||
    show_compilers()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class build_clib(Command):
 | 
			
		||||
 | 
			
		||||
    description = "build C/C++ libraries used by Python extensions"
 | 
			
		||||
 | 
			
		||||
    user_options = [
 | 
			
		||||
        ('build-clib=', 'b', "directory to build C/C++ libraries to"),
 | 
			
		||||
        ('build-temp=', 't', "directory to put temporary build by-products"),
 | 
			
		||||
        ('debug', 'g', "compile with debugging information"),
 | 
			
		||||
        ('force', 'f', "forcibly build everything (ignore file timestamps)"),
 | 
			
		||||
        ('compiler=', 'c', "specify the compiler type"),
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
    boolean_options = ['debug', 'force']
 | 
			
		||||
 | 
			
		||||
    help_options = [
 | 
			
		||||
        ('help-compiler', None, "list available compilers", show_compilers),
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
    def initialize_options(self):
 | 
			
		||||
        self.build_clib = None
 | 
			
		||||
        self.build_temp = None
 | 
			
		||||
 | 
			
		||||
        # List of libraries to build
 | 
			
		||||
        self.libraries = None
 | 
			
		||||
 | 
			
		||||
        # Compilation options for all libraries
 | 
			
		||||
        self.include_dirs = None
 | 
			
		||||
        self.define = None
 | 
			
		||||
        self.undef = None
 | 
			
		||||
        self.debug = None
 | 
			
		||||
        self.force = 0
 | 
			
		||||
        self.compiler = None
 | 
			
		||||
 | 
			
		||||
    def finalize_options(self):
 | 
			
		||||
        # This might be confusing: both build-clib and build-temp default
 | 
			
		||||
        # to build-temp as defined by the "build" command.  This is because
 | 
			
		||||
        # I think that C libraries are really just temporary build
 | 
			
		||||
        # by-products, at least from the point of view of building Python
 | 
			
		||||
        # extensions -- but I want to keep my options open.
 | 
			
		||||
        self.set_undefined_options(
 | 
			
		||||
            'build',
 | 
			
		||||
            ('build_temp', 'build_clib'),
 | 
			
		||||
            ('build_temp', 'build_temp'),
 | 
			
		||||
            ('compiler', 'compiler'),
 | 
			
		||||
            ('debug', 'debug'),
 | 
			
		||||
            ('force', 'force'),
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        self.libraries = self.distribution.libraries
 | 
			
		||||
        if self.libraries:
 | 
			
		||||
            self.check_library_list(self.libraries)
 | 
			
		||||
 | 
			
		||||
        if self.include_dirs is None:
 | 
			
		||||
            self.include_dirs = self.distribution.include_dirs or []
 | 
			
		||||
        if isinstance(self.include_dirs, str):
 | 
			
		||||
            self.include_dirs = self.include_dirs.split(os.pathsep)
 | 
			
		||||
 | 
			
		||||
        # XXX same as for build_ext -- what about 'self.define' and
 | 
			
		||||
        # 'self.undef' ?
 | 
			
		||||
 | 
			
		||||
    def run(self):
 | 
			
		||||
        if not self.libraries:
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
        # Yech -- this is cut 'n pasted from build_ext.py!
 | 
			
		||||
        from ..ccompiler import new_compiler
 | 
			
		||||
 | 
			
		||||
        self.compiler = new_compiler(
 | 
			
		||||
            compiler=self.compiler, dry_run=self.dry_run, force=self.force
 | 
			
		||||
        )
 | 
			
		||||
        customize_compiler(self.compiler)
 | 
			
		||||
 | 
			
		||||
        if self.include_dirs is not None:
 | 
			
		||||
            self.compiler.set_include_dirs(self.include_dirs)
 | 
			
		||||
        if self.define is not None:
 | 
			
		||||
            # 'define' option is a list of (name,value) tuples
 | 
			
		||||
            for (name, value) in self.define:
 | 
			
		||||
                self.compiler.define_macro(name, value)
 | 
			
		||||
        if self.undef is not None:
 | 
			
		||||
            for macro in self.undef:
 | 
			
		||||
                self.compiler.undefine_macro(macro)
 | 
			
		||||
 | 
			
		||||
        self.build_libraries(self.libraries)
 | 
			
		||||
 | 
			
		||||
    def check_library_list(self, libraries):
 | 
			
		||||
        """Ensure that the list of libraries is valid.
 | 
			
		||||
 | 
			
		||||
        `library` is presumably provided as a command option 'libraries'.
 | 
			
		||||
        This method checks that it is a list of 2-tuples, where the tuples
 | 
			
		||||
        are (library_name, build_info_dict).
 | 
			
		||||
 | 
			
		||||
        Raise DistutilsSetupError if the structure is invalid anywhere;
 | 
			
		||||
        just returns otherwise.
 | 
			
		||||
        """
 | 
			
		||||
        if not isinstance(libraries, list):
 | 
			
		||||
            raise DistutilsSetupError("'libraries' option must be a list of tuples")
 | 
			
		||||
 | 
			
		||||
        for lib in libraries:
 | 
			
		||||
            if not isinstance(lib, tuple) and len(lib) != 2:
 | 
			
		||||
                raise DistutilsSetupError("each element of 'libraries' must a 2-tuple")
 | 
			
		||||
 | 
			
		||||
            name, build_info = lib
 | 
			
		||||
 | 
			
		||||
            if not isinstance(name, str):
 | 
			
		||||
                raise DistutilsSetupError(
 | 
			
		||||
                    "first element of each tuple in 'libraries' "
 | 
			
		||||
                    "must be a string (the library name)"
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
            if '/' in name or (os.sep != '/' and os.sep in name):
 | 
			
		||||
                raise DistutilsSetupError(
 | 
			
		||||
                    "bad library name '%s': "
 | 
			
		||||
                    "may not contain directory separators" % lib[0]
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
            if not isinstance(build_info, dict):
 | 
			
		||||
                raise DistutilsSetupError(
 | 
			
		||||
                    "second element of each tuple in 'libraries' "
 | 
			
		||||
                    "must be a dictionary (build info)"
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
    def get_library_names(self):
 | 
			
		||||
        # Assume the library list is valid -- 'check_library_list()' is
 | 
			
		||||
        # called from 'finalize_options()', so it should be!
 | 
			
		||||
        if not self.libraries:
 | 
			
		||||
            return None
 | 
			
		||||
 | 
			
		||||
        lib_names = []
 | 
			
		||||
        for (lib_name, build_info) in self.libraries:
 | 
			
		||||
            lib_names.append(lib_name)
 | 
			
		||||
        return lib_names
 | 
			
		||||
 | 
			
		||||
    def get_source_files(self):
 | 
			
		||||
        self.check_library_list(self.libraries)
 | 
			
		||||
        filenames = []
 | 
			
		||||
        for (lib_name, build_info) in self.libraries:
 | 
			
		||||
            sources = build_info.get('sources')
 | 
			
		||||
            if sources is None or not isinstance(sources, (list, tuple)):
 | 
			
		||||
                raise DistutilsSetupError(
 | 
			
		||||
                    "in 'libraries' option (library '%s'), "
 | 
			
		||||
                    "'sources' must be present and must be "
 | 
			
		||||
                    "a list of source filenames" % lib_name
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
            filenames.extend(sources)
 | 
			
		||||
        return filenames
 | 
			
		||||
 | 
			
		||||
    def build_libraries(self, libraries):
 | 
			
		||||
        for (lib_name, build_info) in libraries:
 | 
			
		||||
            sources = build_info.get('sources')
 | 
			
		||||
            if sources is None or not isinstance(sources, (list, tuple)):
 | 
			
		||||
                raise DistutilsSetupError(
 | 
			
		||||
                    "in 'libraries' option (library '%s'), "
 | 
			
		||||
                    "'sources' must be present and must be "
 | 
			
		||||
                    "a list of source filenames" % lib_name
 | 
			
		||||
                )
 | 
			
		||||
            sources = list(sources)
 | 
			
		||||
 | 
			
		||||
            log.info("building '%s' library", lib_name)
 | 
			
		||||
 | 
			
		||||
            # First, compile the source code to object files in the library
 | 
			
		||||
            # directory.  (This should probably change to putting object
 | 
			
		||||
            # files in a temporary build directory.)
 | 
			
		||||
            macros = build_info.get('macros')
 | 
			
		||||
            include_dirs = build_info.get('include_dirs')
 | 
			
		||||
            objects = self.compiler.compile(
 | 
			
		||||
                sources,
 | 
			
		||||
                output_dir=self.build_temp,
 | 
			
		||||
                macros=macros,
 | 
			
		||||
                include_dirs=include_dirs,
 | 
			
		||||
                debug=self.debug,
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
            # Now "link" the object files together into a static library.
 | 
			
		||||
            # (On Unix at least, this isn't really linking -- it just
 | 
			
		||||
            # builds an archive.  Whatever.)
 | 
			
		||||
            self.compiler.create_static_lib(
 | 
			
		||||
                objects, lib_name, output_dir=self.build_clib, debug=self.debug
 | 
			
		||||
            )
 | 
			
		||||
@@ -0,0 +1,789 @@
 | 
			
		||||
"""distutils.command.build_ext
 | 
			
		||||
 | 
			
		||||
Implements the Distutils 'build_ext' command, for building extension
 | 
			
		||||
modules (currently limited to C extensions, should accommodate C++
 | 
			
		||||
extensions ASAP)."""
 | 
			
		||||
 | 
			
		||||
import contextlib
 | 
			
		||||
import os
 | 
			
		||||
import re
 | 
			
		||||
import sys
 | 
			
		||||
from ..core import Command
 | 
			
		||||
from ..errors import (
 | 
			
		||||
    DistutilsOptionError,
 | 
			
		||||
    DistutilsSetupError,
 | 
			
		||||
    CCompilerError,
 | 
			
		||||
    DistutilsError,
 | 
			
		||||
    CompileError,
 | 
			
		||||
    DistutilsPlatformError,
 | 
			
		||||
)
 | 
			
		||||
from ..sysconfig import customize_compiler, get_python_version
 | 
			
		||||
from ..sysconfig import get_config_h_filename
 | 
			
		||||
from ..dep_util import newer_group
 | 
			
		||||
from ..extension import Extension
 | 
			
		||||
from ..util import get_platform
 | 
			
		||||
from distutils._log import log
 | 
			
		||||
from . import py37compat
 | 
			
		||||
 | 
			
		||||
from site import USER_BASE
 | 
			
		||||
 | 
			
		||||
# An extension name is just a dot-separated list of Python NAMEs (ie.
 | 
			
		||||
# the same as a fully-qualified module name).
 | 
			
		||||
extension_name_re = re.compile(r'^[a-zA-Z_][a-zA-Z_0-9]*(\.[a-zA-Z_][a-zA-Z_0-9]*)*$')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def show_compilers():
 | 
			
		||||
    from ..ccompiler import show_compilers
 | 
			
		||||
 | 
			
		||||
    show_compilers()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class build_ext(Command):
 | 
			
		||||
 | 
			
		||||
    description = "build C/C++ extensions (compile/link to build directory)"
 | 
			
		||||
 | 
			
		||||
    # XXX thoughts on how to deal with complex command-line options like
 | 
			
		||||
    # these, i.e. how to make it so fancy_getopt can suck them off the
 | 
			
		||||
    # command line and make it look like setup.py defined the appropriate
 | 
			
		||||
    # lists of tuples of what-have-you.
 | 
			
		||||
    #   - each command needs a callback to process its command-line options
 | 
			
		||||
    #   - Command.__init__() needs access to its share of the whole
 | 
			
		||||
    #     command line (must ultimately come from
 | 
			
		||||
    #     Distribution.parse_command_line())
 | 
			
		||||
    #   - it then calls the current command class' option-parsing
 | 
			
		||||
    #     callback to deal with weird options like -D, which have to
 | 
			
		||||
    #     parse the option text and churn out some custom data
 | 
			
		||||
    #     structure
 | 
			
		||||
    #   - that data structure (in this case, a list of 2-tuples)
 | 
			
		||||
    #     will then be present in the command object by the time
 | 
			
		||||
    #     we get to finalize_options() (i.e. the constructor
 | 
			
		||||
    #     takes care of both command-line and client options
 | 
			
		||||
    #     in between initialize_options() and finalize_options())
 | 
			
		||||
 | 
			
		||||
    sep_by = " (separated by '%s')" % os.pathsep
 | 
			
		||||
    user_options = [
 | 
			
		||||
        ('build-lib=', 'b', "directory for compiled extension modules"),
 | 
			
		||||
        ('build-temp=', 't', "directory for temporary files (build by-products)"),
 | 
			
		||||
        (
 | 
			
		||||
            'plat-name=',
 | 
			
		||||
            'p',
 | 
			
		||||
            "platform name to cross-compile for, if supported "
 | 
			
		||||
            "(default: %s)" % get_platform(),
 | 
			
		||||
        ),
 | 
			
		||||
        (
 | 
			
		||||
            'inplace',
 | 
			
		||||
            'i',
 | 
			
		||||
            "ignore build-lib and put compiled extensions into the source "
 | 
			
		||||
            + "directory alongside your pure Python modules",
 | 
			
		||||
        ),
 | 
			
		||||
        (
 | 
			
		||||
            'include-dirs=',
 | 
			
		||||
            'I',
 | 
			
		||||
            "list of directories to search for header files" + sep_by,
 | 
			
		||||
        ),
 | 
			
		||||
        ('define=', 'D', "C preprocessor macros to define"),
 | 
			
		||||
        ('undef=', 'U', "C preprocessor macros to undefine"),
 | 
			
		||||
        ('libraries=', 'l', "external C libraries to link with"),
 | 
			
		||||
        (
 | 
			
		||||
            'library-dirs=',
 | 
			
		||||
            'L',
 | 
			
		||||
            "directories to search for external C libraries" + sep_by,
 | 
			
		||||
        ),
 | 
			
		||||
        ('rpath=', 'R', "directories to search for shared C libraries at runtime"),
 | 
			
		||||
        ('link-objects=', 'O', "extra explicit link objects to include in the link"),
 | 
			
		||||
        ('debug', 'g', "compile/link with debugging information"),
 | 
			
		||||
        ('force', 'f', "forcibly build everything (ignore file timestamps)"),
 | 
			
		||||
        ('compiler=', 'c', "specify the compiler type"),
 | 
			
		||||
        ('parallel=', 'j', "number of parallel build jobs"),
 | 
			
		||||
        ('swig-cpp', None, "make SWIG create C++ files (default is C)"),
 | 
			
		||||
        ('swig-opts=', None, "list of SWIG command line options"),
 | 
			
		||||
        ('swig=', None, "path to the SWIG executable"),
 | 
			
		||||
        ('user', None, "add user include, library and rpath"),
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
    boolean_options = ['inplace', 'debug', 'force', 'swig-cpp', 'user']
 | 
			
		||||
 | 
			
		||||
    help_options = [
 | 
			
		||||
        ('help-compiler', None, "list available compilers", show_compilers),
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
    def initialize_options(self):
 | 
			
		||||
        self.extensions = None
 | 
			
		||||
        self.build_lib = None
 | 
			
		||||
        self.plat_name = None
 | 
			
		||||
        self.build_temp = None
 | 
			
		||||
        self.inplace = 0
 | 
			
		||||
        self.package = None
 | 
			
		||||
 | 
			
		||||
        self.include_dirs = None
 | 
			
		||||
        self.define = None
 | 
			
		||||
        self.undef = None
 | 
			
		||||
        self.libraries = None
 | 
			
		||||
        self.library_dirs = None
 | 
			
		||||
        self.rpath = None
 | 
			
		||||
        self.link_objects = None
 | 
			
		||||
        self.debug = None
 | 
			
		||||
        self.force = None
 | 
			
		||||
        self.compiler = None
 | 
			
		||||
        self.swig = None
 | 
			
		||||
        self.swig_cpp = None
 | 
			
		||||
        self.swig_opts = None
 | 
			
		||||
        self.user = None
 | 
			
		||||
        self.parallel = None
 | 
			
		||||
 | 
			
		||||
    def finalize_options(self):  # noqa: C901
 | 
			
		||||
        from distutils import sysconfig
 | 
			
		||||
 | 
			
		||||
        self.set_undefined_options(
 | 
			
		||||
            'build',
 | 
			
		||||
            ('build_lib', 'build_lib'),
 | 
			
		||||
            ('build_temp', 'build_temp'),
 | 
			
		||||
            ('compiler', 'compiler'),
 | 
			
		||||
            ('debug', 'debug'),
 | 
			
		||||
            ('force', 'force'),
 | 
			
		||||
            ('parallel', 'parallel'),
 | 
			
		||||
            ('plat_name', 'plat_name'),
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        if self.package is None:
 | 
			
		||||
            self.package = self.distribution.ext_package
 | 
			
		||||
 | 
			
		||||
        self.extensions = self.distribution.ext_modules
 | 
			
		||||
 | 
			
		||||
        # Make sure Python's include directories (for Python.h, pyconfig.h,
 | 
			
		||||
        # etc.) are in the include search path.
 | 
			
		||||
        py_include = sysconfig.get_python_inc()
 | 
			
		||||
        plat_py_include = sysconfig.get_python_inc(plat_specific=1)
 | 
			
		||||
        if self.include_dirs is None:
 | 
			
		||||
            self.include_dirs = self.distribution.include_dirs or []
 | 
			
		||||
        if isinstance(self.include_dirs, str):
 | 
			
		||||
            self.include_dirs = self.include_dirs.split(os.pathsep)
 | 
			
		||||
 | 
			
		||||
        # If in a virtualenv, add its include directory
 | 
			
		||||
        # Issue 16116
 | 
			
		||||
        if sys.exec_prefix != sys.base_exec_prefix:
 | 
			
		||||
            self.include_dirs.append(os.path.join(sys.exec_prefix, 'include'))
 | 
			
		||||
 | 
			
		||||
        # Put the Python "system" include dir at the end, so that
 | 
			
		||||
        # any local include dirs take precedence.
 | 
			
		||||
        self.include_dirs.extend(py_include.split(os.path.pathsep))
 | 
			
		||||
        if plat_py_include != py_include:
 | 
			
		||||
            self.include_dirs.extend(plat_py_include.split(os.path.pathsep))
 | 
			
		||||
 | 
			
		||||
        self.ensure_string_list('libraries')
 | 
			
		||||
        self.ensure_string_list('link_objects')
 | 
			
		||||
 | 
			
		||||
        # Life is easier if we're not forever checking for None, so
 | 
			
		||||
        # simplify these options to empty lists if unset
 | 
			
		||||
        if self.libraries is None:
 | 
			
		||||
            self.libraries = []
 | 
			
		||||
        if self.library_dirs is None:
 | 
			
		||||
            self.library_dirs = []
 | 
			
		||||
        elif isinstance(self.library_dirs, str):
 | 
			
		||||
            self.library_dirs = self.library_dirs.split(os.pathsep)
 | 
			
		||||
 | 
			
		||||
        if self.rpath is None:
 | 
			
		||||
            self.rpath = []
 | 
			
		||||
        elif isinstance(self.rpath, str):
 | 
			
		||||
            self.rpath = self.rpath.split(os.pathsep)
 | 
			
		||||
 | 
			
		||||
        # for extensions under windows use different directories
 | 
			
		||||
        # for Release and Debug builds.
 | 
			
		||||
        # also Python's library directory must be appended to library_dirs
 | 
			
		||||
        if os.name == 'nt':
 | 
			
		||||
            # the 'libs' directory is for binary installs - we assume that
 | 
			
		||||
            # must be the *native* platform.  But we don't really support
 | 
			
		||||
            # cross-compiling via a binary install anyway, so we let it go.
 | 
			
		||||
            self.library_dirs.append(os.path.join(sys.exec_prefix, 'libs'))
 | 
			
		||||
            if sys.base_exec_prefix != sys.prefix:  # Issue 16116
 | 
			
		||||
                self.library_dirs.append(os.path.join(sys.base_exec_prefix, 'libs'))
 | 
			
		||||
            if self.debug:
 | 
			
		||||
                self.build_temp = os.path.join(self.build_temp, "Debug")
 | 
			
		||||
            else:
 | 
			
		||||
                self.build_temp = os.path.join(self.build_temp, "Release")
 | 
			
		||||
 | 
			
		||||
            # Append the source distribution include and library directories,
 | 
			
		||||
            # this allows distutils on windows to work in the source tree
 | 
			
		||||
            self.include_dirs.append(os.path.dirname(get_config_h_filename()))
 | 
			
		||||
            self.library_dirs.append(sys.base_exec_prefix)
 | 
			
		||||
 | 
			
		||||
            # Use the .lib files for the correct architecture
 | 
			
		||||
            if self.plat_name == 'win32':
 | 
			
		||||
                suffix = 'win32'
 | 
			
		||||
            else:
 | 
			
		||||
                # win-amd64
 | 
			
		||||
                suffix = self.plat_name[4:]
 | 
			
		||||
            new_lib = os.path.join(sys.exec_prefix, 'PCbuild')
 | 
			
		||||
            if suffix:
 | 
			
		||||
                new_lib = os.path.join(new_lib, suffix)
 | 
			
		||||
            self.library_dirs.append(new_lib)
 | 
			
		||||
 | 
			
		||||
        # For extensions under Cygwin, Python's library directory must be
 | 
			
		||||
        # appended to library_dirs
 | 
			
		||||
        if sys.platform[:6] == 'cygwin':
 | 
			
		||||
            if not sysconfig.python_build:
 | 
			
		||||
                # building third party extensions
 | 
			
		||||
                self.library_dirs.append(
 | 
			
		||||
                    os.path.join(
 | 
			
		||||
                        sys.prefix, "lib", "python" + get_python_version(), "config"
 | 
			
		||||
                    )
 | 
			
		||||
                )
 | 
			
		||||
            else:
 | 
			
		||||
                # building python standard extensions
 | 
			
		||||
                self.library_dirs.append('.')
 | 
			
		||||
 | 
			
		||||
        # For building extensions with a shared Python library,
 | 
			
		||||
        # Python's library directory must be appended to library_dirs
 | 
			
		||||
        # See Issues: #1600860, #4366
 | 
			
		||||
        if sysconfig.get_config_var('Py_ENABLE_SHARED'):
 | 
			
		||||
            if not sysconfig.python_build:
 | 
			
		||||
                # building third party extensions
 | 
			
		||||
                self.library_dirs.append(sysconfig.get_config_var('LIBDIR'))
 | 
			
		||||
            else:
 | 
			
		||||
                # building python standard extensions
 | 
			
		||||
                self.library_dirs.append('.')
 | 
			
		||||
 | 
			
		||||
        # The argument parsing will result in self.define being a string, but
 | 
			
		||||
        # it has to be a list of 2-tuples.  All the preprocessor symbols
 | 
			
		||||
        # specified by the 'define' option will be set to '1'.  Multiple
 | 
			
		||||
        # symbols can be separated with commas.
 | 
			
		||||
 | 
			
		||||
        if self.define:
 | 
			
		||||
            defines = self.define.split(',')
 | 
			
		||||
            self.define = [(symbol, '1') for symbol in defines]
 | 
			
		||||
 | 
			
		||||
        # The option for macros to undefine is also a string from the
 | 
			
		||||
        # option parsing, but has to be a list.  Multiple symbols can also
 | 
			
		||||
        # be separated with commas here.
 | 
			
		||||
        if self.undef:
 | 
			
		||||
            self.undef = self.undef.split(',')
 | 
			
		||||
 | 
			
		||||
        if self.swig_opts is None:
 | 
			
		||||
            self.swig_opts = []
 | 
			
		||||
        else:
 | 
			
		||||
            self.swig_opts = self.swig_opts.split(' ')
 | 
			
		||||
 | 
			
		||||
        # Finally add the user include and library directories if requested
 | 
			
		||||
        if self.user:
 | 
			
		||||
            user_include = os.path.join(USER_BASE, "include")
 | 
			
		||||
            user_lib = os.path.join(USER_BASE, "lib")
 | 
			
		||||
            if os.path.isdir(user_include):
 | 
			
		||||
                self.include_dirs.append(user_include)
 | 
			
		||||
            if os.path.isdir(user_lib):
 | 
			
		||||
                self.library_dirs.append(user_lib)
 | 
			
		||||
                self.rpath.append(user_lib)
 | 
			
		||||
 | 
			
		||||
        if isinstance(self.parallel, str):
 | 
			
		||||
            try:
 | 
			
		||||
                self.parallel = int(self.parallel)
 | 
			
		||||
            except ValueError:
 | 
			
		||||
                raise DistutilsOptionError("parallel should be an integer")
 | 
			
		||||
 | 
			
		||||
    def run(self):  # noqa: C901
 | 
			
		||||
        from ..ccompiler import new_compiler
 | 
			
		||||
 | 
			
		||||
        # 'self.extensions', as supplied by setup.py, is a list of
 | 
			
		||||
        # Extension instances.  See the documentation for Extension (in
 | 
			
		||||
        # distutils.extension) for details.
 | 
			
		||||
        #
 | 
			
		||||
        # For backwards compatibility with Distutils 0.8.2 and earlier, we
 | 
			
		||||
        # also allow the 'extensions' list to be a list of tuples:
 | 
			
		||||
        #    (ext_name, build_info)
 | 
			
		||||
        # where build_info is a dictionary containing everything that
 | 
			
		||||
        # Extension instances do except the name, with a few things being
 | 
			
		||||
        # differently named.  We convert these 2-tuples to Extension
 | 
			
		||||
        # instances as needed.
 | 
			
		||||
 | 
			
		||||
        if not self.extensions:
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
        # If we were asked to build any C/C++ libraries, make sure that the
 | 
			
		||||
        # directory where we put them is in the library search path for
 | 
			
		||||
        # linking extensions.
 | 
			
		||||
        if self.distribution.has_c_libraries():
 | 
			
		||||
            build_clib = self.get_finalized_command('build_clib')
 | 
			
		||||
            self.libraries.extend(build_clib.get_library_names() or [])
 | 
			
		||||
            self.library_dirs.append(build_clib.build_clib)
 | 
			
		||||
 | 
			
		||||
        # Setup the CCompiler object that we'll use to do all the
 | 
			
		||||
        # compiling and linking
 | 
			
		||||
        self.compiler = new_compiler(
 | 
			
		||||
            compiler=self.compiler,
 | 
			
		||||
            verbose=self.verbose,
 | 
			
		||||
            dry_run=self.dry_run,
 | 
			
		||||
            force=self.force,
 | 
			
		||||
        )
 | 
			
		||||
        customize_compiler(self.compiler)
 | 
			
		||||
        # If we are cross-compiling, init the compiler now (if we are not
 | 
			
		||||
        # cross-compiling, init would not hurt, but people may rely on
 | 
			
		||||
        # late initialization of compiler even if they shouldn't...)
 | 
			
		||||
        if os.name == 'nt' and self.plat_name != get_platform():
 | 
			
		||||
            self.compiler.initialize(self.plat_name)
 | 
			
		||||
 | 
			
		||||
        # And make sure that any compile/link-related options (which might
 | 
			
		||||
        # come from the command-line or from the setup script) are set in
 | 
			
		||||
        # that CCompiler object -- that way, they automatically apply to
 | 
			
		||||
        # all compiling and linking done here.
 | 
			
		||||
        if self.include_dirs is not None:
 | 
			
		||||
            self.compiler.set_include_dirs(self.include_dirs)
 | 
			
		||||
        if self.define is not None:
 | 
			
		||||
            # 'define' option is a list of (name,value) tuples
 | 
			
		||||
            for (name, value) in self.define:
 | 
			
		||||
                self.compiler.define_macro(name, value)
 | 
			
		||||
        if self.undef is not None:
 | 
			
		||||
            for macro in self.undef:
 | 
			
		||||
                self.compiler.undefine_macro(macro)
 | 
			
		||||
        if self.libraries is not None:
 | 
			
		||||
            self.compiler.set_libraries(self.libraries)
 | 
			
		||||
        if self.library_dirs is not None:
 | 
			
		||||
            self.compiler.set_library_dirs(self.library_dirs)
 | 
			
		||||
        if self.rpath is not None:
 | 
			
		||||
            self.compiler.set_runtime_library_dirs(self.rpath)
 | 
			
		||||
        if self.link_objects is not None:
 | 
			
		||||
            self.compiler.set_link_objects(self.link_objects)
 | 
			
		||||
 | 
			
		||||
        # Now actually compile and link everything.
 | 
			
		||||
        self.build_extensions()
 | 
			
		||||
 | 
			
		||||
    def check_extensions_list(self, extensions):  # noqa: C901
 | 
			
		||||
        """Ensure that the list of extensions (presumably provided as a
 | 
			
		||||
        command option 'extensions') is valid, i.e. it is a list of
 | 
			
		||||
        Extension objects.  We also support the old-style list of 2-tuples,
 | 
			
		||||
        where the tuples are (ext_name, build_info), which are converted to
 | 
			
		||||
        Extension instances here.
 | 
			
		||||
 | 
			
		||||
        Raise DistutilsSetupError if the structure is invalid anywhere;
 | 
			
		||||
        just returns otherwise.
 | 
			
		||||
        """
 | 
			
		||||
        if not isinstance(extensions, list):
 | 
			
		||||
            raise DistutilsSetupError(
 | 
			
		||||
                "'ext_modules' option must be a list of Extension instances"
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        for i, ext in enumerate(extensions):
 | 
			
		||||
            if isinstance(ext, Extension):
 | 
			
		||||
                continue  # OK! (assume type-checking done
 | 
			
		||||
                # by Extension constructor)
 | 
			
		||||
 | 
			
		||||
            if not isinstance(ext, tuple) or len(ext) != 2:
 | 
			
		||||
                raise DistutilsSetupError(
 | 
			
		||||
                    "each element of 'ext_modules' option must be an "
 | 
			
		||||
                    "Extension instance or 2-tuple"
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
            ext_name, build_info = ext
 | 
			
		||||
 | 
			
		||||
            log.warning(
 | 
			
		||||
                "old-style (ext_name, build_info) tuple found in "
 | 
			
		||||
                "ext_modules for extension '%s' "
 | 
			
		||||
                "-- please convert to Extension instance",
 | 
			
		||||
                ext_name,
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
            if not (isinstance(ext_name, str) and extension_name_re.match(ext_name)):
 | 
			
		||||
                raise DistutilsSetupError(
 | 
			
		||||
                    "first element of each tuple in 'ext_modules' "
 | 
			
		||||
                    "must be the extension name (a string)"
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
            if not isinstance(build_info, dict):
 | 
			
		||||
                raise DistutilsSetupError(
 | 
			
		||||
                    "second element of each tuple in 'ext_modules' "
 | 
			
		||||
                    "must be a dictionary (build info)"
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
            # OK, the (ext_name, build_info) dict is type-safe: convert it
 | 
			
		||||
            # to an Extension instance.
 | 
			
		||||
            ext = Extension(ext_name, build_info['sources'])
 | 
			
		||||
 | 
			
		||||
            # Easy stuff: one-to-one mapping from dict elements to
 | 
			
		||||
            # instance attributes.
 | 
			
		||||
            for key in (
 | 
			
		||||
                'include_dirs',
 | 
			
		||||
                'library_dirs',
 | 
			
		||||
                'libraries',
 | 
			
		||||
                'extra_objects',
 | 
			
		||||
                'extra_compile_args',
 | 
			
		||||
                'extra_link_args',
 | 
			
		||||
            ):
 | 
			
		||||
                val = build_info.get(key)
 | 
			
		||||
                if val is not None:
 | 
			
		||||
                    setattr(ext, key, val)
 | 
			
		||||
 | 
			
		||||
            # Medium-easy stuff: same syntax/semantics, different names.
 | 
			
		||||
            ext.runtime_library_dirs = build_info.get('rpath')
 | 
			
		||||
            if 'def_file' in build_info:
 | 
			
		||||
                log.warning(
 | 
			
		||||
                    "'def_file' element of build info dict " "no longer supported"
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
            # Non-trivial stuff: 'macros' split into 'define_macros'
 | 
			
		||||
            # and 'undef_macros'.
 | 
			
		||||
            macros = build_info.get('macros')
 | 
			
		||||
            if macros:
 | 
			
		||||
                ext.define_macros = []
 | 
			
		||||
                ext.undef_macros = []
 | 
			
		||||
                for macro in macros:
 | 
			
		||||
                    if not (isinstance(macro, tuple) and len(macro) in (1, 2)):
 | 
			
		||||
                        raise DistutilsSetupError(
 | 
			
		||||
                            "'macros' element of build info dict "
 | 
			
		||||
                            "must be 1- or 2-tuple"
 | 
			
		||||
                        )
 | 
			
		||||
                    if len(macro) == 1:
 | 
			
		||||
                        ext.undef_macros.append(macro[0])
 | 
			
		||||
                    elif len(macro) == 2:
 | 
			
		||||
                        ext.define_macros.append(macro)
 | 
			
		||||
 | 
			
		||||
            extensions[i] = ext
 | 
			
		||||
 | 
			
		||||
    def get_source_files(self):
 | 
			
		||||
        self.check_extensions_list(self.extensions)
 | 
			
		||||
        filenames = []
 | 
			
		||||
 | 
			
		||||
        # Wouldn't it be neat if we knew the names of header files too...
 | 
			
		||||
        for ext in self.extensions:
 | 
			
		||||
            filenames.extend(ext.sources)
 | 
			
		||||
        return filenames
 | 
			
		||||
 | 
			
		||||
    def get_outputs(self):
 | 
			
		||||
        # Sanity check the 'extensions' list -- can't assume this is being
 | 
			
		||||
        # done in the same run as a 'build_extensions()' call (in fact, we
 | 
			
		||||
        # can probably assume that it *isn't*!).
 | 
			
		||||
        self.check_extensions_list(self.extensions)
 | 
			
		||||
 | 
			
		||||
        # And build the list of output (built) filenames.  Note that this
 | 
			
		||||
        # ignores the 'inplace' flag, and assumes everything goes in the
 | 
			
		||||
        # "build" tree.
 | 
			
		||||
        outputs = []
 | 
			
		||||
        for ext in self.extensions:
 | 
			
		||||
            outputs.append(self.get_ext_fullpath(ext.name))
 | 
			
		||||
        return outputs
 | 
			
		||||
 | 
			
		||||
    def build_extensions(self):
 | 
			
		||||
        # First, sanity-check the 'extensions' list
 | 
			
		||||
        self.check_extensions_list(self.extensions)
 | 
			
		||||
        if self.parallel:
 | 
			
		||||
            self._build_extensions_parallel()
 | 
			
		||||
        else:
 | 
			
		||||
            self._build_extensions_serial()
 | 
			
		||||
 | 
			
		||||
    def _build_extensions_parallel(self):
 | 
			
		||||
        workers = self.parallel
 | 
			
		||||
        if self.parallel is True:
 | 
			
		||||
            workers = os.cpu_count()  # may return None
 | 
			
		||||
        try:
 | 
			
		||||
            from concurrent.futures import ThreadPoolExecutor
 | 
			
		||||
        except ImportError:
 | 
			
		||||
            workers = None
 | 
			
		||||
 | 
			
		||||
        if workers is None:
 | 
			
		||||
            self._build_extensions_serial()
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
        with ThreadPoolExecutor(max_workers=workers) as executor:
 | 
			
		||||
            futures = [
 | 
			
		||||
                executor.submit(self.build_extension, ext) for ext in self.extensions
 | 
			
		||||
            ]
 | 
			
		||||
            for ext, fut in zip(self.extensions, futures):
 | 
			
		||||
                with self._filter_build_errors(ext):
 | 
			
		||||
                    fut.result()
 | 
			
		||||
 | 
			
		||||
    def _build_extensions_serial(self):
 | 
			
		||||
        for ext in self.extensions:
 | 
			
		||||
            with self._filter_build_errors(ext):
 | 
			
		||||
                self.build_extension(ext)
 | 
			
		||||
 | 
			
		||||
    @contextlib.contextmanager
 | 
			
		||||
    def _filter_build_errors(self, ext):
 | 
			
		||||
        try:
 | 
			
		||||
            yield
 | 
			
		||||
        except (CCompilerError, DistutilsError, CompileError) as e:
 | 
			
		||||
            if not ext.optional:
 | 
			
		||||
                raise
 | 
			
		||||
            self.warn('building extension "{}" failed: {}'.format(ext.name, e))
 | 
			
		||||
 | 
			
		||||
    def build_extension(self, ext):
 | 
			
		||||
        sources = ext.sources
 | 
			
		||||
        if sources is None or not isinstance(sources, (list, tuple)):
 | 
			
		||||
            raise DistutilsSetupError(
 | 
			
		||||
                "in 'ext_modules' option (extension '%s'), "
 | 
			
		||||
                "'sources' must be present and must be "
 | 
			
		||||
                "a list of source filenames" % ext.name
 | 
			
		||||
            )
 | 
			
		||||
        # sort to make the resulting .so file build reproducible
 | 
			
		||||
        sources = sorted(sources)
 | 
			
		||||
 | 
			
		||||
        ext_path = self.get_ext_fullpath(ext.name)
 | 
			
		||||
        depends = sources + ext.depends
 | 
			
		||||
        if not (self.force or newer_group(depends, ext_path, 'newer')):
 | 
			
		||||
            log.debug("skipping '%s' extension (up-to-date)", ext.name)
 | 
			
		||||
            return
 | 
			
		||||
        else:
 | 
			
		||||
            log.info("building '%s' extension", ext.name)
 | 
			
		||||
 | 
			
		||||
        # First, scan the sources for SWIG definition files (.i), run
 | 
			
		||||
        # SWIG on 'em to create .c files, and modify the sources list
 | 
			
		||||
        # accordingly.
 | 
			
		||||
        sources = self.swig_sources(sources, ext)
 | 
			
		||||
 | 
			
		||||
        # Next, compile the source code to object files.
 | 
			
		||||
 | 
			
		||||
        # XXX not honouring 'define_macros' or 'undef_macros' -- the
 | 
			
		||||
        # CCompiler API needs to change to accommodate this, and I
 | 
			
		||||
        # want to do one thing at a time!
 | 
			
		||||
 | 
			
		||||
        # Two possible sources for extra compiler arguments:
 | 
			
		||||
        #   - 'extra_compile_args' in Extension object
 | 
			
		||||
        #   - CFLAGS environment variable (not particularly
 | 
			
		||||
        #     elegant, but people seem to expect it and I
 | 
			
		||||
        #     guess it's useful)
 | 
			
		||||
        # The environment variable should take precedence, and
 | 
			
		||||
        # any sensible compiler will give precedence to later
 | 
			
		||||
        # command line args.  Hence we combine them in order:
 | 
			
		||||
        extra_args = ext.extra_compile_args or []
 | 
			
		||||
 | 
			
		||||
        macros = ext.define_macros[:]
 | 
			
		||||
        for undef in ext.undef_macros:
 | 
			
		||||
            macros.append((undef,))
 | 
			
		||||
 | 
			
		||||
        objects = self.compiler.compile(
 | 
			
		||||
            sources,
 | 
			
		||||
            output_dir=self.build_temp,
 | 
			
		||||
            macros=macros,
 | 
			
		||||
            include_dirs=ext.include_dirs,
 | 
			
		||||
            debug=self.debug,
 | 
			
		||||
            extra_postargs=extra_args,
 | 
			
		||||
            depends=ext.depends,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        # XXX outdated variable, kept here in case third-part code
 | 
			
		||||
        # needs it.
 | 
			
		||||
        self._built_objects = objects[:]
 | 
			
		||||
 | 
			
		||||
        # Now link the object files together into a "shared object" --
 | 
			
		||||
        # of course, first we have to figure out all the other things
 | 
			
		||||
        # that go into the mix.
 | 
			
		||||
        if ext.extra_objects:
 | 
			
		||||
            objects.extend(ext.extra_objects)
 | 
			
		||||
        extra_args = ext.extra_link_args or []
 | 
			
		||||
 | 
			
		||||
        # Detect target language, if not provided
 | 
			
		||||
        language = ext.language or self.compiler.detect_language(sources)
 | 
			
		||||
 | 
			
		||||
        self.compiler.link_shared_object(
 | 
			
		||||
            objects,
 | 
			
		||||
            ext_path,
 | 
			
		||||
            libraries=self.get_libraries(ext),
 | 
			
		||||
            library_dirs=ext.library_dirs,
 | 
			
		||||
            runtime_library_dirs=ext.runtime_library_dirs,
 | 
			
		||||
            extra_postargs=extra_args,
 | 
			
		||||
            export_symbols=self.get_export_symbols(ext),
 | 
			
		||||
            debug=self.debug,
 | 
			
		||||
            build_temp=self.build_temp,
 | 
			
		||||
            target_lang=language,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def swig_sources(self, sources, extension):
 | 
			
		||||
        """Walk the list of source files in 'sources', looking for SWIG
 | 
			
		||||
        interface (.i) files.  Run SWIG on all that are found, and
 | 
			
		||||
        return a modified 'sources' list with SWIG source files replaced
 | 
			
		||||
        by the generated C (or C++) files.
 | 
			
		||||
        """
 | 
			
		||||
        new_sources = []
 | 
			
		||||
        swig_sources = []
 | 
			
		||||
        swig_targets = {}
 | 
			
		||||
 | 
			
		||||
        # XXX this drops generated C/C++ files into the source tree, which
 | 
			
		||||
        # is fine for developers who want to distribute the generated
 | 
			
		||||
        # source -- but there should be an option to put SWIG output in
 | 
			
		||||
        # the temp dir.
 | 
			
		||||
 | 
			
		||||
        if self.swig_cpp:
 | 
			
		||||
            log.warning("--swig-cpp is deprecated - use --swig-opts=-c++")
 | 
			
		||||
 | 
			
		||||
        if (
 | 
			
		||||
            self.swig_cpp
 | 
			
		||||
            or ('-c++' in self.swig_opts)
 | 
			
		||||
            or ('-c++' in extension.swig_opts)
 | 
			
		||||
        ):
 | 
			
		||||
            target_ext = '.cpp'
 | 
			
		||||
        else:
 | 
			
		||||
            target_ext = '.c'
 | 
			
		||||
 | 
			
		||||
        for source in sources:
 | 
			
		||||
            (base, ext) = os.path.splitext(source)
 | 
			
		||||
            if ext == ".i":  # SWIG interface file
 | 
			
		||||
                new_sources.append(base + '_wrap' + target_ext)
 | 
			
		||||
                swig_sources.append(source)
 | 
			
		||||
                swig_targets[source] = new_sources[-1]
 | 
			
		||||
            else:
 | 
			
		||||
                new_sources.append(source)
 | 
			
		||||
 | 
			
		||||
        if not swig_sources:
 | 
			
		||||
            return new_sources
 | 
			
		||||
 | 
			
		||||
        swig = self.swig or self.find_swig()
 | 
			
		||||
        swig_cmd = [swig, "-python"]
 | 
			
		||||
        swig_cmd.extend(self.swig_opts)
 | 
			
		||||
        if self.swig_cpp:
 | 
			
		||||
            swig_cmd.append("-c++")
 | 
			
		||||
 | 
			
		||||
        # Do not override commandline arguments
 | 
			
		||||
        if not self.swig_opts:
 | 
			
		||||
            for o in extension.swig_opts:
 | 
			
		||||
                swig_cmd.append(o)
 | 
			
		||||
 | 
			
		||||
        for source in swig_sources:
 | 
			
		||||
            target = swig_targets[source]
 | 
			
		||||
            log.info("swigging %s to %s", source, target)
 | 
			
		||||
            self.spawn(swig_cmd + ["-o", target, source])
 | 
			
		||||
 | 
			
		||||
        return new_sources
 | 
			
		||||
 | 
			
		||||
    def find_swig(self):
 | 
			
		||||
        """Return the name of the SWIG executable.  On Unix, this is
 | 
			
		||||
        just "swig" -- it should be in the PATH.  Tries a bit harder on
 | 
			
		||||
        Windows.
 | 
			
		||||
        """
 | 
			
		||||
        if os.name == "posix":
 | 
			
		||||
            return "swig"
 | 
			
		||||
        elif os.name == "nt":
 | 
			
		||||
            # Look for SWIG in its standard installation directory on
 | 
			
		||||
            # Windows (or so I presume!).  If we find it there, great;
 | 
			
		||||
            # if not, act like Unix and assume it's in the PATH.
 | 
			
		||||
            for vers in ("1.3", "1.2", "1.1"):
 | 
			
		||||
                fn = os.path.join("c:\\swig%s" % vers, "swig.exe")
 | 
			
		||||
                if os.path.isfile(fn):
 | 
			
		||||
                    return fn
 | 
			
		||||
            else:
 | 
			
		||||
                return "swig.exe"
 | 
			
		||||
        else:
 | 
			
		||||
            raise DistutilsPlatformError(
 | 
			
		||||
                "I don't know how to find (much less run) SWIG "
 | 
			
		||||
                "on platform '%s'" % os.name
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
    # -- Name generators -----------------------------------------------
 | 
			
		||||
    # (extension names, filenames, whatever)
 | 
			
		||||
    def get_ext_fullpath(self, ext_name):
 | 
			
		||||
        """Returns the path of the filename for a given extension.
 | 
			
		||||
 | 
			
		||||
        The file is located in `build_lib` or directly in the package
 | 
			
		||||
        (inplace option).
 | 
			
		||||
        """
 | 
			
		||||
        fullname = self.get_ext_fullname(ext_name)
 | 
			
		||||
        modpath = fullname.split('.')
 | 
			
		||||
        filename = self.get_ext_filename(modpath[-1])
 | 
			
		||||
 | 
			
		||||
        if not self.inplace:
 | 
			
		||||
            # no further work needed
 | 
			
		||||
            # returning :
 | 
			
		||||
            #   build_dir/package/path/filename
 | 
			
		||||
            filename = os.path.join(*modpath[:-1] + [filename])
 | 
			
		||||
            return os.path.join(self.build_lib, filename)
 | 
			
		||||
 | 
			
		||||
        # the inplace option requires to find the package directory
 | 
			
		||||
        # using the build_py command for that
 | 
			
		||||
        package = '.'.join(modpath[0:-1])
 | 
			
		||||
        build_py = self.get_finalized_command('build_py')
 | 
			
		||||
        package_dir = os.path.abspath(build_py.get_package_dir(package))
 | 
			
		||||
 | 
			
		||||
        # returning
 | 
			
		||||
        #   package_dir/filename
 | 
			
		||||
        return os.path.join(package_dir, filename)
 | 
			
		||||
 | 
			
		||||
    def get_ext_fullname(self, ext_name):
 | 
			
		||||
        """Returns the fullname of a given extension name.
 | 
			
		||||
 | 
			
		||||
        Adds the `package.` prefix"""
 | 
			
		||||
        if self.package is None:
 | 
			
		||||
            return ext_name
 | 
			
		||||
        else:
 | 
			
		||||
            return self.package + '.' + ext_name
 | 
			
		||||
 | 
			
		||||
    def get_ext_filename(self, ext_name):
 | 
			
		||||
        r"""Convert the name of an extension (eg. "foo.bar") into the name
 | 
			
		||||
        of the file from which it will be loaded (eg. "foo/bar.so", or
 | 
			
		||||
        "foo\bar.pyd").
 | 
			
		||||
        """
 | 
			
		||||
        from ..sysconfig import get_config_var
 | 
			
		||||
 | 
			
		||||
        ext_path = ext_name.split('.')
 | 
			
		||||
        ext_suffix = get_config_var('EXT_SUFFIX')
 | 
			
		||||
        return os.path.join(*ext_path) + ext_suffix
 | 
			
		||||
 | 
			
		||||
    def get_export_symbols(self, ext):
 | 
			
		||||
        """Return the list of symbols that a shared extension has to
 | 
			
		||||
        export.  This either uses 'ext.export_symbols' or, if it's not
 | 
			
		||||
        provided, "PyInit_" + module_name.  Only relevant on Windows, where
 | 
			
		||||
        the .pyd file (DLL) must export the module "PyInit_" function.
 | 
			
		||||
        """
 | 
			
		||||
        name = ext.name.split('.')[-1]
 | 
			
		||||
        try:
 | 
			
		||||
            # Unicode module name support as defined in PEP-489
 | 
			
		||||
            # https://www.python.org/dev/peps/pep-0489/#export-hook-name
 | 
			
		||||
            name.encode('ascii')
 | 
			
		||||
        except UnicodeEncodeError:
 | 
			
		||||
            suffix = 'U_' + name.encode('punycode').replace(b'-', b'_').decode('ascii')
 | 
			
		||||
        else:
 | 
			
		||||
            suffix = "_" + name
 | 
			
		||||
 | 
			
		||||
        initfunc_name = "PyInit" + suffix
 | 
			
		||||
        if initfunc_name not in ext.export_symbols:
 | 
			
		||||
            ext.export_symbols.append(initfunc_name)
 | 
			
		||||
        return ext.export_symbols
 | 
			
		||||
 | 
			
		||||
    def get_libraries(self, ext):  # noqa: C901
 | 
			
		||||
        """Return the list of libraries to link against when building a
 | 
			
		||||
        shared extension.  On most platforms, this is just 'ext.libraries';
 | 
			
		||||
        on Windows, we add the Python library (eg. python20.dll).
 | 
			
		||||
        """
 | 
			
		||||
        # The python library is always needed on Windows.  For MSVC, this
 | 
			
		||||
        # is redundant, since the library is mentioned in a pragma in
 | 
			
		||||
        # pyconfig.h that MSVC groks.  The other Windows compilers all seem
 | 
			
		||||
        # to need it mentioned explicitly, though, so that's what we do.
 | 
			
		||||
        # Append '_d' to the python import library on debug builds.
 | 
			
		||||
        if sys.platform == "win32":
 | 
			
		||||
            from .._msvccompiler import MSVCCompiler
 | 
			
		||||
 | 
			
		||||
            if not isinstance(self.compiler, MSVCCompiler):
 | 
			
		||||
                template = "python%d%d"
 | 
			
		||||
                if self.debug:
 | 
			
		||||
                    template = template + '_d'
 | 
			
		||||
                pythonlib = template % (
 | 
			
		||||
                    sys.hexversion >> 24,
 | 
			
		||||
                    (sys.hexversion >> 16) & 0xFF,
 | 
			
		||||
                )
 | 
			
		||||
                # don't extend ext.libraries, it may be shared with other
 | 
			
		||||
                # extensions, it is a reference to the original list
 | 
			
		||||
                return ext.libraries + [pythonlib]
 | 
			
		||||
        else:
 | 
			
		||||
            # On Android only the main executable and LD_PRELOADs are considered
 | 
			
		||||
            # to be RTLD_GLOBAL, all the dependencies of the main executable
 | 
			
		||||
            # remain RTLD_LOCAL and so the shared libraries must be linked with
 | 
			
		||||
            # libpython when python is built with a shared python library (issue
 | 
			
		||||
            # bpo-21536).
 | 
			
		||||
            # On Cygwin (and if required, other POSIX-like platforms based on
 | 
			
		||||
            # Windows like MinGW) it is simply necessary that all symbols in
 | 
			
		||||
            # shared libraries are resolved at link time.
 | 
			
		||||
            from ..sysconfig import get_config_var
 | 
			
		||||
 | 
			
		||||
            link_libpython = False
 | 
			
		||||
            if get_config_var('Py_ENABLE_SHARED'):
 | 
			
		||||
                # A native build on an Android device or on Cygwin
 | 
			
		||||
                if hasattr(sys, 'getandroidapilevel'):
 | 
			
		||||
                    link_libpython = True
 | 
			
		||||
                elif sys.platform == 'cygwin':
 | 
			
		||||
                    link_libpython = True
 | 
			
		||||
                elif '_PYTHON_HOST_PLATFORM' in os.environ:
 | 
			
		||||
                    # We are cross-compiling for one of the relevant platforms
 | 
			
		||||
                    if get_config_var('ANDROID_API_LEVEL') != 0:
 | 
			
		||||
                        link_libpython = True
 | 
			
		||||
                    elif get_config_var('MACHDEP') == 'cygwin':
 | 
			
		||||
                        link_libpython = True
 | 
			
		||||
 | 
			
		||||
            if link_libpython:
 | 
			
		||||
                ldversion = get_config_var('LDVERSION')
 | 
			
		||||
                return ext.libraries + ['python' + ldversion]
 | 
			
		||||
 | 
			
		||||
        return ext.libraries + py37compat.pythonlib()
 | 
			
		||||
@@ -0,0 +1,407 @@
 | 
			
		||||
"""distutils.command.build_py
 | 
			
		||||
 | 
			
		||||
Implements the Distutils 'build_py' command."""
 | 
			
		||||
 | 
			
		||||
import os
 | 
			
		||||
import importlib.util
 | 
			
		||||
import sys
 | 
			
		||||
import glob
 | 
			
		||||
 | 
			
		||||
from ..core import Command
 | 
			
		||||
from ..errors import DistutilsOptionError, DistutilsFileError
 | 
			
		||||
from ..util import convert_path
 | 
			
		||||
from distutils._log import log
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class build_py(Command):
 | 
			
		||||
 | 
			
		||||
    description = "\"build\" pure Python modules (copy to build directory)"
 | 
			
		||||
 | 
			
		||||
    user_options = [
 | 
			
		||||
        ('build-lib=', 'd', "directory to \"build\" (copy) to"),
 | 
			
		||||
        ('compile', 'c', "compile .py to .pyc"),
 | 
			
		||||
        ('no-compile', None, "don't compile .py files [default]"),
 | 
			
		||||
        (
 | 
			
		||||
            'optimize=',
 | 
			
		||||
            'O',
 | 
			
		||||
            "also compile with optimization: -O1 for \"python -O\", "
 | 
			
		||||
            "-O2 for \"python -OO\", and -O0 to disable [default: -O0]",
 | 
			
		||||
        ),
 | 
			
		||||
        ('force', 'f', "forcibly build everything (ignore file timestamps)"),
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
    boolean_options = ['compile', 'force']
 | 
			
		||||
    negative_opt = {'no-compile': 'compile'}
 | 
			
		||||
 | 
			
		||||
    def initialize_options(self):
 | 
			
		||||
        self.build_lib = None
 | 
			
		||||
        self.py_modules = None
 | 
			
		||||
        self.package = None
 | 
			
		||||
        self.package_data = None
 | 
			
		||||
        self.package_dir = None
 | 
			
		||||
        self.compile = 0
 | 
			
		||||
        self.optimize = 0
 | 
			
		||||
        self.force = None
 | 
			
		||||
 | 
			
		||||
    def finalize_options(self):
 | 
			
		||||
        self.set_undefined_options(
 | 
			
		||||
            'build', ('build_lib', 'build_lib'), ('force', 'force')
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        # Get the distribution options that are aliases for build_py
 | 
			
		||||
        # options -- list of packages and list of modules.
 | 
			
		||||
        self.packages = self.distribution.packages
 | 
			
		||||
        self.py_modules = self.distribution.py_modules
 | 
			
		||||
        self.package_data = self.distribution.package_data
 | 
			
		||||
        self.package_dir = {}
 | 
			
		||||
        if self.distribution.package_dir:
 | 
			
		||||
            for name, path in self.distribution.package_dir.items():
 | 
			
		||||
                self.package_dir[name] = convert_path(path)
 | 
			
		||||
        self.data_files = self.get_data_files()
 | 
			
		||||
 | 
			
		||||
        # Ick, copied straight from install_lib.py (fancy_getopt needs a
 | 
			
		||||
        # type system!  Hell, *everything* needs a type system!!!)
 | 
			
		||||
        if not isinstance(self.optimize, int):
 | 
			
		||||
            try:
 | 
			
		||||
                self.optimize = int(self.optimize)
 | 
			
		||||
                assert 0 <= self.optimize <= 2
 | 
			
		||||
            except (ValueError, AssertionError):
 | 
			
		||||
                raise DistutilsOptionError("optimize must be 0, 1, or 2")
 | 
			
		||||
 | 
			
		||||
    def run(self):
 | 
			
		||||
        # XXX copy_file by default preserves atime and mtime.  IMHO this is
 | 
			
		||||
        # the right thing to do, but perhaps it should be an option -- in
 | 
			
		||||
        # particular, a site administrator might want installed files to
 | 
			
		||||
        # reflect the time of installation rather than the last
 | 
			
		||||
        # modification time before the installed release.
 | 
			
		||||
 | 
			
		||||
        # XXX copy_file by default preserves mode, which appears to be the
 | 
			
		||||
        # wrong thing to do: if a file is read-only in the working
 | 
			
		||||
        # directory, we want it to be installed read/write so that the next
 | 
			
		||||
        # installation of the same module distribution can overwrite it
 | 
			
		||||
        # without problems.  (This might be a Unix-specific issue.)  Thus
 | 
			
		||||
        # we turn off 'preserve_mode' when copying to the build directory,
 | 
			
		||||
        # since the build directory is supposed to be exactly what the
 | 
			
		||||
        # installation will look like (ie. we preserve mode when
 | 
			
		||||
        # installing).
 | 
			
		||||
 | 
			
		||||
        # Two options control which modules will be installed: 'packages'
 | 
			
		||||
        # and 'py_modules'.  The former lets us work with whole packages, not
 | 
			
		||||
        # specifying individual modules at all; the latter is for
 | 
			
		||||
        # specifying modules one-at-a-time.
 | 
			
		||||
 | 
			
		||||
        if self.py_modules:
 | 
			
		||||
            self.build_modules()
 | 
			
		||||
        if self.packages:
 | 
			
		||||
            self.build_packages()
 | 
			
		||||
            self.build_package_data()
 | 
			
		||||
 | 
			
		||||
        self.byte_compile(self.get_outputs(include_bytecode=0))
 | 
			
		||||
 | 
			
		||||
    def get_data_files(self):
 | 
			
		||||
        """Generate list of '(package,src_dir,build_dir,filenames)' tuples"""
 | 
			
		||||
        data = []
 | 
			
		||||
        if not self.packages:
 | 
			
		||||
            return data
 | 
			
		||||
        for package in self.packages:
 | 
			
		||||
            # Locate package source directory
 | 
			
		||||
            src_dir = self.get_package_dir(package)
 | 
			
		||||
 | 
			
		||||
            # Compute package build directory
 | 
			
		||||
            build_dir = os.path.join(*([self.build_lib] + package.split('.')))
 | 
			
		||||
 | 
			
		||||
            # Length of path to strip from found files
 | 
			
		||||
            plen = 0
 | 
			
		||||
            if src_dir:
 | 
			
		||||
                plen = len(src_dir) + 1
 | 
			
		||||
 | 
			
		||||
            # Strip directory from globbed filenames
 | 
			
		||||
            filenames = [file[plen:] for file in self.find_data_files(package, src_dir)]
 | 
			
		||||
            data.append((package, src_dir, build_dir, filenames))
 | 
			
		||||
        return data
 | 
			
		||||
 | 
			
		||||
    def find_data_files(self, package, src_dir):
 | 
			
		||||
        """Return filenames for package's data files in 'src_dir'"""
 | 
			
		||||
        globs = self.package_data.get('', []) + self.package_data.get(package, [])
 | 
			
		||||
        files = []
 | 
			
		||||
        for pattern in globs:
 | 
			
		||||
            # Each pattern has to be converted to a platform-specific path
 | 
			
		||||
            filelist = glob.glob(
 | 
			
		||||
                os.path.join(glob.escape(src_dir), convert_path(pattern))
 | 
			
		||||
            )
 | 
			
		||||
            # Files that match more than one pattern are only added once
 | 
			
		||||
            files.extend(
 | 
			
		||||
                [fn for fn in filelist if fn not in files and os.path.isfile(fn)]
 | 
			
		||||
            )
 | 
			
		||||
        return files
 | 
			
		||||
 | 
			
		||||
    def build_package_data(self):
 | 
			
		||||
        """Copy data files into build directory"""
 | 
			
		||||
        for package, src_dir, build_dir, filenames in self.data_files:
 | 
			
		||||
            for filename in filenames:
 | 
			
		||||
                target = os.path.join(build_dir, filename)
 | 
			
		||||
                self.mkpath(os.path.dirname(target))
 | 
			
		||||
                self.copy_file(
 | 
			
		||||
                    os.path.join(src_dir, filename), target, preserve_mode=False
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
    def get_package_dir(self, package):
 | 
			
		||||
        """Return the directory, relative to the top of the source
 | 
			
		||||
        distribution, where package 'package' should be found
 | 
			
		||||
        (at least according to the 'package_dir' option, if any)."""
 | 
			
		||||
        path = package.split('.')
 | 
			
		||||
 | 
			
		||||
        if not self.package_dir:
 | 
			
		||||
            if path:
 | 
			
		||||
                return os.path.join(*path)
 | 
			
		||||
            else:
 | 
			
		||||
                return ''
 | 
			
		||||
        else:
 | 
			
		||||
            tail = []
 | 
			
		||||
            while path:
 | 
			
		||||
                try:
 | 
			
		||||
                    pdir = self.package_dir['.'.join(path)]
 | 
			
		||||
                except KeyError:
 | 
			
		||||
                    tail.insert(0, path[-1])
 | 
			
		||||
                    del path[-1]
 | 
			
		||||
                else:
 | 
			
		||||
                    tail.insert(0, pdir)
 | 
			
		||||
                    return os.path.join(*tail)
 | 
			
		||||
            else:
 | 
			
		||||
                # Oops, got all the way through 'path' without finding a
 | 
			
		||||
                # match in package_dir.  If package_dir defines a directory
 | 
			
		||||
                # for the root (nameless) package, then fallback on it;
 | 
			
		||||
                # otherwise, we might as well have not consulted
 | 
			
		||||
                # package_dir at all, as we just use the directory implied
 | 
			
		||||
                # by 'tail' (which should be the same as the original value
 | 
			
		||||
                # of 'path' at this point).
 | 
			
		||||
                pdir = self.package_dir.get('')
 | 
			
		||||
                if pdir is not None:
 | 
			
		||||
                    tail.insert(0, pdir)
 | 
			
		||||
 | 
			
		||||
                if tail:
 | 
			
		||||
                    return os.path.join(*tail)
 | 
			
		||||
                else:
 | 
			
		||||
                    return ''
 | 
			
		||||
 | 
			
		||||
    def check_package(self, package, package_dir):
 | 
			
		||||
        # Empty dir name means current directory, which we can probably
 | 
			
		||||
        # assume exists.  Also, os.path.exists and isdir don't know about
 | 
			
		||||
        # my "empty string means current dir" convention, so we have to
 | 
			
		||||
        # circumvent them.
 | 
			
		||||
        if package_dir != "":
 | 
			
		||||
            if not os.path.exists(package_dir):
 | 
			
		||||
                raise DistutilsFileError(
 | 
			
		||||
                    "package directory '%s' does not exist" % package_dir
 | 
			
		||||
                )
 | 
			
		||||
            if not os.path.isdir(package_dir):
 | 
			
		||||
                raise DistutilsFileError(
 | 
			
		||||
                    "supposed package directory '%s' exists, "
 | 
			
		||||
                    "but is not a directory" % package_dir
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
        # Directories without __init__.py are namespace packages (PEP 420).
 | 
			
		||||
        if package:
 | 
			
		||||
            init_py = os.path.join(package_dir, "__init__.py")
 | 
			
		||||
            if os.path.isfile(init_py):
 | 
			
		||||
                return init_py
 | 
			
		||||
 | 
			
		||||
        # Either not in a package at all (__init__.py not expected), or
 | 
			
		||||
        # __init__.py doesn't exist -- so don't return the filename.
 | 
			
		||||
        return None
 | 
			
		||||
 | 
			
		||||
    def check_module(self, module, module_file):
 | 
			
		||||
        if not os.path.isfile(module_file):
 | 
			
		||||
            log.warning("file %s (for module %s) not found", module_file, module)
 | 
			
		||||
            return False
 | 
			
		||||
        else:
 | 
			
		||||
            return True
 | 
			
		||||
 | 
			
		||||
    def find_package_modules(self, package, package_dir):
 | 
			
		||||
        self.check_package(package, package_dir)
 | 
			
		||||
        module_files = glob.glob(os.path.join(glob.escape(package_dir), "*.py"))
 | 
			
		||||
        modules = []
 | 
			
		||||
        setup_script = os.path.abspath(self.distribution.script_name)
 | 
			
		||||
 | 
			
		||||
        for f in module_files:
 | 
			
		||||
            abs_f = os.path.abspath(f)
 | 
			
		||||
            if abs_f != setup_script:
 | 
			
		||||
                module = os.path.splitext(os.path.basename(f))[0]
 | 
			
		||||
                modules.append((package, module, f))
 | 
			
		||||
            else:
 | 
			
		||||
                self.debug_print("excluding %s" % setup_script)
 | 
			
		||||
        return modules
 | 
			
		||||
 | 
			
		||||
    def find_modules(self):
 | 
			
		||||
        """Finds individually-specified Python modules, ie. those listed by
 | 
			
		||||
        module name in 'self.py_modules'.  Returns a list of tuples (package,
 | 
			
		||||
        module_base, filename): 'package' is a tuple of the path through
 | 
			
		||||
        package-space to the module; 'module_base' is the bare (no
 | 
			
		||||
        packages, no dots) module name, and 'filename' is the path to the
 | 
			
		||||
        ".py" file (relative to the distribution root) that implements the
 | 
			
		||||
        module.
 | 
			
		||||
        """
 | 
			
		||||
        # Map package names to tuples of useful info about the package:
 | 
			
		||||
        #    (package_dir, checked)
 | 
			
		||||
        # package_dir - the directory where we'll find source files for
 | 
			
		||||
        #   this package
 | 
			
		||||
        # checked - true if we have checked that the package directory
 | 
			
		||||
        #   is valid (exists, contains __init__.py, ... ?)
 | 
			
		||||
        packages = {}
 | 
			
		||||
 | 
			
		||||
        # List of (package, module, filename) tuples to return
 | 
			
		||||
        modules = []
 | 
			
		||||
 | 
			
		||||
        # We treat modules-in-packages almost the same as toplevel modules,
 | 
			
		||||
        # just the "package" for a toplevel is empty (either an empty
 | 
			
		||||
        # string or empty list, depending on context).  Differences:
 | 
			
		||||
        #   - don't check for __init__.py in directory for empty package
 | 
			
		||||
        for module in self.py_modules:
 | 
			
		||||
            path = module.split('.')
 | 
			
		||||
            package = '.'.join(path[0:-1])
 | 
			
		||||
            module_base = path[-1]
 | 
			
		||||
 | 
			
		||||
            try:
 | 
			
		||||
                (package_dir, checked) = packages[package]
 | 
			
		||||
            except KeyError:
 | 
			
		||||
                package_dir = self.get_package_dir(package)
 | 
			
		||||
                checked = 0
 | 
			
		||||
 | 
			
		||||
            if not checked:
 | 
			
		||||
                init_py = self.check_package(package, package_dir)
 | 
			
		||||
                packages[package] = (package_dir, 1)
 | 
			
		||||
                if init_py:
 | 
			
		||||
                    modules.append((package, "__init__", init_py))
 | 
			
		||||
 | 
			
		||||
            # XXX perhaps we should also check for just .pyc files
 | 
			
		||||
            # (so greedy closed-source bastards can distribute Python
 | 
			
		||||
            # modules too)
 | 
			
		||||
            module_file = os.path.join(package_dir, module_base + ".py")
 | 
			
		||||
            if not self.check_module(module, module_file):
 | 
			
		||||
                continue
 | 
			
		||||
 | 
			
		||||
            modules.append((package, module_base, module_file))
 | 
			
		||||
 | 
			
		||||
        return modules
 | 
			
		||||
 | 
			
		||||
    def find_all_modules(self):
 | 
			
		||||
        """Compute the list of all modules that will be built, whether
 | 
			
		||||
        they are specified one-module-at-a-time ('self.py_modules') or
 | 
			
		||||
        by whole packages ('self.packages').  Return a list of tuples
 | 
			
		||||
        (package, module, module_file), just like 'find_modules()' and
 | 
			
		||||
        'find_package_modules()' do."""
 | 
			
		||||
        modules = []
 | 
			
		||||
        if self.py_modules:
 | 
			
		||||
            modules.extend(self.find_modules())
 | 
			
		||||
        if self.packages:
 | 
			
		||||
            for package in self.packages:
 | 
			
		||||
                package_dir = self.get_package_dir(package)
 | 
			
		||||
                m = self.find_package_modules(package, package_dir)
 | 
			
		||||
                modules.extend(m)
 | 
			
		||||
        return modules
 | 
			
		||||
 | 
			
		||||
    def get_source_files(self):
 | 
			
		||||
        return [module[-1] for module in self.find_all_modules()]
 | 
			
		||||
 | 
			
		||||
    def get_module_outfile(self, build_dir, package, module):
 | 
			
		||||
        outfile_path = [build_dir] + list(package) + [module + ".py"]
 | 
			
		||||
        return os.path.join(*outfile_path)
 | 
			
		||||
 | 
			
		||||
    def get_outputs(self, include_bytecode=1):
 | 
			
		||||
        modules = self.find_all_modules()
 | 
			
		||||
        outputs = []
 | 
			
		||||
        for (package, module, module_file) in modules:
 | 
			
		||||
            package = package.split('.')
 | 
			
		||||
            filename = self.get_module_outfile(self.build_lib, package, module)
 | 
			
		||||
            outputs.append(filename)
 | 
			
		||||
            if include_bytecode:
 | 
			
		||||
                if self.compile:
 | 
			
		||||
                    outputs.append(
 | 
			
		||||
                        importlib.util.cache_from_source(filename, optimization='')
 | 
			
		||||
                    )
 | 
			
		||||
                if self.optimize > 0:
 | 
			
		||||
                    outputs.append(
 | 
			
		||||
                        importlib.util.cache_from_source(
 | 
			
		||||
                            filename, optimization=self.optimize
 | 
			
		||||
                        )
 | 
			
		||||
                    )
 | 
			
		||||
 | 
			
		||||
        outputs += [
 | 
			
		||||
            os.path.join(build_dir, filename)
 | 
			
		||||
            for package, src_dir, build_dir, filenames in self.data_files
 | 
			
		||||
            for filename in filenames
 | 
			
		||||
        ]
 | 
			
		||||
 | 
			
		||||
        return outputs
 | 
			
		||||
 | 
			
		||||
    def build_module(self, module, module_file, package):
 | 
			
		||||
        if isinstance(package, str):
 | 
			
		||||
            package = package.split('.')
 | 
			
		||||
        elif not isinstance(package, (list, tuple)):
 | 
			
		||||
            raise TypeError(
 | 
			
		||||
                "'package' must be a string (dot-separated), list, or tuple"
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        # Now put the module source file into the "build" area -- this is
 | 
			
		||||
        # easy, we just copy it somewhere under self.build_lib (the build
 | 
			
		||||
        # directory for Python source).
 | 
			
		||||
        outfile = self.get_module_outfile(self.build_lib, package, module)
 | 
			
		||||
        dir = os.path.dirname(outfile)
 | 
			
		||||
        self.mkpath(dir)
 | 
			
		||||
        return self.copy_file(module_file, outfile, preserve_mode=0)
 | 
			
		||||
 | 
			
		||||
    def build_modules(self):
 | 
			
		||||
        modules = self.find_modules()
 | 
			
		||||
        for (package, module, module_file) in modules:
 | 
			
		||||
            # Now "build" the module -- ie. copy the source file to
 | 
			
		||||
            # self.build_lib (the build directory for Python source).
 | 
			
		||||
            # (Actually, it gets copied to the directory for this package
 | 
			
		||||
            # under self.build_lib.)
 | 
			
		||||
            self.build_module(module, module_file, package)
 | 
			
		||||
 | 
			
		||||
    def build_packages(self):
 | 
			
		||||
        for package in self.packages:
 | 
			
		||||
            # Get list of (package, module, module_file) tuples based on
 | 
			
		||||
            # scanning the package directory.  'package' is only included
 | 
			
		||||
            # in the tuple so that 'find_modules()' and
 | 
			
		||||
            # 'find_package_tuples()' have a consistent interface; it's
 | 
			
		||||
            # ignored here (apart from a sanity check).  Also, 'module' is
 | 
			
		||||
            # the *unqualified* module name (ie. no dots, no package -- we
 | 
			
		||||
            # already know its package!), and 'module_file' is the path to
 | 
			
		||||
            # the .py file, relative to the current directory
 | 
			
		||||
            # (ie. including 'package_dir').
 | 
			
		||||
            package_dir = self.get_package_dir(package)
 | 
			
		||||
            modules = self.find_package_modules(package, package_dir)
 | 
			
		||||
 | 
			
		||||
            # Now loop over the modules we found, "building" each one (just
 | 
			
		||||
            # copy it to self.build_lib).
 | 
			
		||||
            for (package_, module, module_file) in modules:
 | 
			
		||||
                assert package == package_
 | 
			
		||||
                self.build_module(module, module_file, package)
 | 
			
		||||
 | 
			
		||||
    def byte_compile(self, files):
 | 
			
		||||
        if sys.dont_write_bytecode:
 | 
			
		||||
            self.warn('byte-compiling is disabled, skipping.')
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
        from ..util import byte_compile
 | 
			
		||||
 | 
			
		||||
        prefix = self.build_lib
 | 
			
		||||
        if prefix[-1] != os.sep:
 | 
			
		||||
            prefix = prefix + os.sep
 | 
			
		||||
 | 
			
		||||
        # XXX this code is essentially the same as the 'byte_compile()
 | 
			
		||||
        # method of the "install_lib" command, except for the determination
 | 
			
		||||
        # of the 'prefix' string.  Hmmm.
 | 
			
		||||
        if self.compile:
 | 
			
		||||
            byte_compile(
 | 
			
		||||
                files, optimize=0, force=self.force, prefix=prefix, dry_run=self.dry_run
 | 
			
		||||
            )
 | 
			
		||||
        if self.optimize > 0:
 | 
			
		||||
            byte_compile(
 | 
			
		||||
                files,
 | 
			
		||||
                optimize=self.optimize,
 | 
			
		||||
                force=self.force,
 | 
			
		||||
                prefix=prefix,
 | 
			
		||||
                dry_run=self.dry_run,
 | 
			
		||||
            )
 | 
			
		||||
@@ -0,0 +1,173 @@
 | 
			
		||||
"""distutils.command.build_scripts
 | 
			
		||||
 | 
			
		||||
Implements the Distutils 'build_scripts' command."""
 | 
			
		||||
 | 
			
		||||
import os
 | 
			
		||||
import re
 | 
			
		||||
from stat import ST_MODE
 | 
			
		||||
from distutils import sysconfig
 | 
			
		||||
from ..core import Command
 | 
			
		||||
from ..dep_util import newer
 | 
			
		||||
from ..util import convert_path
 | 
			
		||||
from distutils._log import log
 | 
			
		||||
import tokenize
 | 
			
		||||
 | 
			
		||||
shebang_pattern = re.compile('^#!.*python[0-9.]*([ \t].*)?$')
 | 
			
		||||
"""
 | 
			
		||||
Pattern matching a Python interpreter indicated in first line of a script.
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
# for Setuptools compatibility
 | 
			
		||||
first_line_re = shebang_pattern
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class build_scripts(Command):
 | 
			
		||||
 | 
			
		||||
    description = "\"build\" scripts (copy and fixup #! line)"
 | 
			
		||||
 | 
			
		||||
    user_options = [
 | 
			
		||||
        ('build-dir=', 'd', "directory to \"build\" (copy) to"),
 | 
			
		||||
        ('force', 'f', "forcibly build everything (ignore file timestamps"),
 | 
			
		||||
        ('executable=', 'e', "specify final destination interpreter path"),
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
    boolean_options = ['force']
 | 
			
		||||
 | 
			
		||||
    def initialize_options(self):
 | 
			
		||||
        self.build_dir = None
 | 
			
		||||
        self.scripts = None
 | 
			
		||||
        self.force = None
 | 
			
		||||
        self.executable = None
 | 
			
		||||
 | 
			
		||||
    def finalize_options(self):
 | 
			
		||||
        self.set_undefined_options(
 | 
			
		||||
            'build',
 | 
			
		||||
            ('build_scripts', 'build_dir'),
 | 
			
		||||
            ('force', 'force'),
 | 
			
		||||
            ('executable', 'executable'),
 | 
			
		||||
        )
 | 
			
		||||
        self.scripts = self.distribution.scripts
 | 
			
		||||
 | 
			
		||||
    def get_source_files(self):
 | 
			
		||||
        return self.scripts
 | 
			
		||||
 | 
			
		||||
    def run(self):
 | 
			
		||||
        if not self.scripts:
 | 
			
		||||
            return
 | 
			
		||||
        self.copy_scripts()
 | 
			
		||||
 | 
			
		||||
    def copy_scripts(self):
 | 
			
		||||
        """
 | 
			
		||||
        Copy each script listed in ``self.scripts``.
 | 
			
		||||
 | 
			
		||||
        If a script is marked as a Python script (first line matches
 | 
			
		||||
        'shebang_pattern', i.e. starts with ``#!`` and contains
 | 
			
		||||
        "python"), then adjust in the copy the first line to refer to
 | 
			
		||||
        the current Python interpreter.
 | 
			
		||||
        """
 | 
			
		||||
        self.mkpath(self.build_dir)
 | 
			
		||||
        outfiles = []
 | 
			
		||||
        updated_files = []
 | 
			
		||||
        for script in self.scripts:
 | 
			
		||||
            self._copy_script(script, outfiles, updated_files)
 | 
			
		||||
 | 
			
		||||
        self._change_modes(outfiles)
 | 
			
		||||
 | 
			
		||||
        return outfiles, updated_files
 | 
			
		||||
 | 
			
		||||
    def _copy_script(self, script, outfiles, updated_files):  # noqa: C901
 | 
			
		||||
        shebang_match = None
 | 
			
		||||
        script = convert_path(script)
 | 
			
		||||
        outfile = os.path.join(self.build_dir, os.path.basename(script))
 | 
			
		||||
        outfiles.append(outfile)
 | 
			
		||||
 | 
			
		||||
        if not self.force and not newer(script, outfile):
 | 
			
		||||
            log.debug("not copying %s (up-to-date)", script)
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
        # Always open the file, but ignore failures in dry-run mode
 | 
			
		||||
        # in order to attempt to copy directly.
 | 
			
		||||
        try:
 | 
			
		||||
            f = tokenize.open(script)
 | 
			
		||||
        except OSError:
 | 
			
		||||
            if not self.dry_run:
 | 
			
		||||
                raise
 | 
			
		||||
            f = None
 | 
			
		||||
        else:
 | 
			
		||||
            first_line = f.readline()
 | 
			
		||||
            if not first_line:
 | 
			
		||||
                self.warn("%s is an empty file (skipping)" % script)
 | 
			
		||||
                return
 | 
			
		||||
 | 
			
		||||
            shebang_match = shebang_pattern.match(first_line)
 | 
			
		||||
 | 
			
		||||
        updated_files.append(outfile)
 | 
			
		||||
        if shebang_match:
 | 
			
		||||
            log.info("copying and adjusting %s -> %s", script, self.build_dir)
 | 
			
		||||
            if not self.dry_run:
 | 
			
		||||
                if not sysconfig.python_build:
 | 
			
		||||
                    executable = self.executable
 | 
			
		||||
                else:
 | 
			
		||||
                    executable = os.path.join(
 | 
			
		||||
                        sysconfig.get_config_var("BINDIR"),
 | 
			
		||||
                        "python%s%s"
 | 
			
		||||
                        % (
 | 
			
		||||
                            sysconfig.get_config_var("VERSION"),
 | 
			
		||||
                            sysconfig.get_config_var("EXE"),
 | 
			
		||||
                        ),
 | 
			
		||||
                    )
 | 
			
		||||
                post_interp = shebang_match.group(1) or ''
 | 
			
		||||
                shebang = "#!" + executable + post_interp + "\n"
 | 
			
		||||
                self._validate_shebang(shebang, f.encoding)
 | 
			
		||||
                with open(outfile, "w", encoding=f.encoding) as outf:
 | 
			
		||||
                    outf.write(shebang)
 | 
			
		||||
                    outf.writelines(f.readlines())
 | 
			
		||||
            if f:
 | 
			
		||||
                f.close()
 | 
			
		||||
        else:
 | 
			
		||||
            if f:
 | 
			
		||||
                f.close()
 | 
			
		||||
            self.copy_file(script, outfile)
 | 
			
		||||
 | 
			
		||||
    def _change_modes(self, outfiles):
 | 
			
		||||
        if os.name != 'posix':
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
        for file in outfiles:
 | 
			
		||||
            self._change_mode(file)
 | 
			
		||||
 | 
			
		||||
    def _change_mode(self, file):
 | 
			
		||||
        if self.dry_run:
 | 
			
		||||
            log.info("changing mode of %s", file)
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
        oldmode = os.stat(file)[ST_MODE] & 0o7777
 | 
			
		||||
        newmode = (oldmode | 0o555) & 0o7777
 | 
			
		||||
        if newmode != oldmode:
 | 
			
		||||
            log.info("changing mode of %s from %o to %o", file, oldmode, newmode)
 | 
			
		||||
            os.chmod(file, newmode)
 | 
			
		||||
 | 
			
		||||
    @staticmethod
 | 
			
		||||
    def _validate_shebang(shebang, encoding):
 | 
			
		||||
        # Python parser starts to read a script using UTF-8 until
 | 
			
		||||
        # it gets a #coding:xxx cookie. The shebang has to be the
 | 
			
		||||
        # first line of a file, the #coding:xxx cookie cannot be
 | 
			
		||||
        # written before. So the shebang has to be encodable to
 | 
			
		||||
        # UTF-8.
 | 
			
		||||
        try:
 | 
			
		||||
            shebang.encode('utf-8')
 | 
			
		||||
        except UnicodeEncodeError:
 | 
			
		||||
            raise ValueError(
 | 
			
		||||
                "The shebang ({!r}) is not encodable " "to utf-8".format(shebang)
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        # If the script is encoded to a custom encoding (use a
 | 
			
		||||
        # #coding:xxx cookie), the shebang has to be encodable to
 | 
			
		||||
        # the script encoding too.
 | 
			
		||||
        try:
 | 
			
		||||
            shebang.encode(encoding)
 | 
			
		||||
        except UnicodeEncodeError:
 | 
			
		||||
            raise ValueError(
 | 
			
		||||
                "The shebang ({!r}) is not encodable "
 | 
			
		||||
                "to the script encoding ({})".format(shebang, encoding)
 | 
			
		||||
            )
 | 
			
		||||
@@ -0,0 +1,151 @@
 | 
			
		||||
"""distutils.command.check
 | 
			
		||||
 | 
			
		||||
Implements the Distutils 'check' command.
 | 
			
		||||
"""
 | 
			
		||||
import contextlib
 | 
			
		||||
 | 
			
		||||
from ..core import Command
 | 
			
		||||
from ..errors import DistutilsSetupError
 | 
			
		||||
 | 
			
		||||
with contextlib.suppress(ImportError):
 | 
			
		||||
    import docutils.utils
 | 
			
		||||
    import docutils.parsers.rst
 | 
			
		||||
    import docutils.frontend
 | 
			
		||||
    import docutils.nodes
 | 
			
		||||
 | 
			
		||||
    class SilentReporter(docutils.utils.Reporter):
 | 
			
		||||
        def __init__(
 | 
			
		||||
            self,
 | 
			
		||||
            source,
 | 
			
		||||
            report_level,
 | 
			
		||||
            halt_level,
 | 
			
		||||
            stream=None,
 | 
			
		||||
            debug=0,
 | 
			
		||||
            encoding='ascii',
 | 
			
		||||
            error_handler='replace',
 | 
			
		||||
        ):
 | 
			
		||||
            self.messages = []
 | 
			
		||||
            super().__init__(
 | 
			
		||||
                source, report_level, halt_level, stream, debug, encoding, error_handler
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        def system_message(self, level, message, *children, **kwargs):
 | 
			
		||||
            self.messages.append((level, message, children, kwargs))
 | 
			
		||||
            return docutils.nodes.system_message(
 | 
			
		||||
                message, level=level, type=self.levels[level], *children, **kwargs
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class check(Command):
 | 
			
		||||
    """This command checks the meta-data of the package."""
 | 
			
		||||
 | 
			
		||||
    description = "perform some checks on the package"
 | 
			
		||||
    user_options = [
 | 
			
		||||
        ('metadata', 'm', 'Verify meta-data'),
 | 
			
		||||
        (
 | 
			
		||||
            'restructuredtext',
 | 
			
		||||
            'r',
 | 
			
		||||
            (
 | 
			
		||||
                'Checks if long string meta-data syntax '
 | 
			
		||||
                'are reStructuredText-compliant'
 | 
			
		||||
            ),
 | 
			
		||||
        ),
 | 
			
		||||
        ('strict', 's', 'Will exit with an error if a check fails'),
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
    boolean_options = ['metadata', 'restructuredtext', 'strict']
 | 
			
		||||
 | 
			
		||||
    def initialize_options(self):
 | 
			
		||||
        """Sets default values for options."""
 | 
			
		||||
        self.restructuredtext = 0
 | 
			
		||||
        self.metadata = 1
 | 
			
		||||
        self.strict = 0
 | 
			
		||||
        self._warnings = 0
 | 
			
		||||
 | 
			
		||||
    def finalize_options(self):
 | 
			
		||||
        pass
 | 
			
		||||
 | 
			
		||||
    def warn(self, msg):
 | 
			
		||||
        """Counts the number of warnings that occurs."""
 | 
			
		||||
        self._warnings += 1
 | 
			
		||||
        return Command.warn(self, msg)
 | 
			
		||||
 | 
			
		||||
    def run(self):
 | 
			
		||||
        """Runs the command."""
 | 
			
		||||
        # perform the various tests
 | 
			
		||||
        if self.metadata:
 | 
			
		||||
            self.check_metadata()
 | 
			
		||||
        if self.restructuredtext:
 | 
			
		||||
            if 'docutils' in globals():
 | 
			
		||||
                try:
 | 
			
		||||
                    self.check_restructuredtext()
 | 
			
		||||
                except TypeError as exc:
 | 
			
		||||
                    raise DistutilsSetupError(str(exc))
 | 
			
		||||
            elif self.strict:
 | 
			
		||||
                raise DistutilsSetupError('The docutils package is needed.')
 | 
			
		||||
 | 
			
		||||
        # let's raise an error in strict mode, if we have at least
 | 
			
		||||
        # one warning
 | 
			
		||||
        if self.strict and self._warnings > 0:
 | 
			
		||||
            raise DistutilsSetupError('Please correct your package.')
 | 
			
		||||
 | 
			
		||||
    def check_metadata(self):
 | 
			
		||||
        """Ensures that all required elements of meta-data are supplied.
 | 
			
		||||
 | 
			
		||||
        Required fields:
 | 
			
		||||
            name, version
 | 
			
		||||
 | 
			
		||||
        Warns if any are missing.
 | 
			
		||||
        """
 | 
			
		||||
        metadata = self.distribution.metadata
 | 
			
		||||
 | 
			
		||||
        missing = []
 | 
			
		||||
        for attr in 'name', 'version':
 | 
			
		||||
            if not getattr(metadata, attr, None):
 | 
			
		||||
                missing.append(attr)
 | 
			
		||||
 | 
			
		||||
        if missing:
 | 
			
		||||
            self.warn("missing required meta-data: %s" % ', '.join(missing))
 | 
			
		||||
 | 
			
		||||
    def check_restructuredtext(self):
 | 
			
		||||
        """Checks if the long string fields are reST-compliant."""
 | 
			
		||||
        data = self.distribution.get_long_description()
 | 
			
		||||
        for warning in self._check_rst_data(data):
 | 
			
		||||
            line = warning[-1].get('line')
 | 
			
		||||
            if line is None:
 | 
			
		||||
                warning = warning[1]
 | 
			
		||||
            else:
 | 
			
		||||
                warning = '{} (line {})'.format(warning[1], line)
 | 
			
		||||
            self.warn(warning)
 | 
			
		||||
 | 
			
		||||
    def _check_rst_data(self, data):
 | 
			
		||||
        """Returns warnings when the provided data doesn't compile."""
 | 
			
		||||
        # the include and csv_table directives need this to be a path
 | 
			
		||||
        source_path = self.distribution.script_name or 'setup.py'
 | 
			
		||||
        parser = docutils.parsers.rst.Parser()
 | 
			
		||||
        settings = docutils.frontend.OptionParser(
 | 
			
		||||
            components=(docutils.parsers.rst.Parser,)
 | 
			
		||||
        ).get_default_values()
 | 
			
		||||
        settings.tab_width = 4
 | 
			
		||||
        settings.pep_references = None
 | 
			
		||||
        settings.rfc_references = None
 | 
			
		||||
        reporter = SilentReporter(
 | 
			
		||||
            source_path,
 | 
			
		||||
            settings.report_level,
 | 
			
		||||
            settings.halt_level,
 | 
			
		||||
            stream=settings.warning_stream,
 | 
			
		||||
            debug=settings.debug,
 | 
			
		||||
            encoding=settings.error_encoding,
 | 
			
		||||
            error_handler=settings.error_encoding_error_handler,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        document = docutils.nodes.document(settings, reporter, source=source_path)
 | 
			
		||||
        document.note_source(source_path, -1)
 | 
			
		||||
        try:
 | 
			
		||||
            parser.parse(data, document)
 | 
			
		||||
        except AttributeError as e:
 | 
			
		||||
            reporter.messages.append(
 | 
			
		||||
                (-1, 'Could not finish the parsing: %s.' % e, '', {})
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        return reporter.messages
 | 
			
		||||
@@ -0,0 +1,76 @@
 | 
			
		||||
"""distutils.command.clean
 | 
			
		||||
 | 
			
		||||
Implements the Distutils 'clean' command."""
 | 
			
		||||
 | 
			
		||||
# contributed by Bastian Kleineidam <calvin@cs.uni-sb.de>, added 2000-03-18
 | 
			
		||||
 | 
			
		||||
import os
 | 
			
		||||
from ..core import Command
 | 
			
		||||
from ..dir_util import remove_tree
 | 
			
		||||
from distutils._log import log
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class clean(Command):
 | 
			
		||||
 | 
			
		||||
    description = "clean up temporary files from 'build' command"
 | 
			
		||||
    user_options = [
 | 
			
		||||
        ('build-base=', 'b', "base build directory (default: 'build.build-base')"),
 | 
			
		||||
        (
 | 
			
		||||
            'build-lib=',
 | 
			
		||||
            None,
 | 
			
		||||
            "build directory for all modules (default: 'build.build-lib')",
 | 
			
		||||
        ),
 | 
			
		||||
        ('build-temp=', 't', "temporary build directory (default: 'build.build-temp')"),
 | 
			
		||||
        (
 | 
			
		||||
            'build-scripts=',
 | 
			
		||||
            None,
 | 
			
		||||
            "build directory for scripts (default: 'build.build-scripts')",
 | 
			
		||||
        ),
 | 
			
		||||
        ('bdist-base=', None, "temporary directory for built distributions"),
 | 
			
		||||
        ('all', 'a', "remove all build output, not just temporary by-products"),
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
    boolean_options = ['all']
 | 
			
		||||
 | 
			
		||||
    def initialize_options(self):
 | 
			
		||||
        self.build_base = None
 | 
			
		||||
        self.build_lib = None
 | 
			
		||||
        self.build_temp = None
 | 
			
		||||
        self.build_scripts = None
 | 
			
		||||
        self.bdist_base = None
 | 
			
		||||
        self.all = None
 | 
			
		||||
 | 
			
		||||
    def finalize_options(self):
 | 
			
		||||
        self.set_undefined_options(
 | 
			
		||||
            'build',
 | 
			
		||||
            ('build_base', 'build_base'),
 | 
			
		||||
            ('build_lib', 'build_lib'),
 | 
			
		||||
            ('build_scripts', 'build_scripts'),
 | 
			
		||||
            ('build_temp', 'build_temp'),
 | 
			
		||||
        )
 | 
			
		||||
        self.set_undefined_options('bdist', ('bdist_base', 'bdist_base'))
 | 
			
		||||
 | 
			
		||||
    def run(self):
 | 
			
		||||
        # remove the build/temp.<plat> directory (unless it's already
 | 
			
		||||
        # gone)
 | 
			
		||||
        if os.path.exists(self.build_temp):
 | 
			
		||||
            remove_tree(self.build_temp, dry_run=self.dry_run)
 | 
			
		||||
        else:
 | 
			
		||||
            log.debug("'%s' does not exist -- can't clean it", self.build_temp)
 | 
			
		||||
 | 
			
		||||
        if self.all:
 | 
			
		||||
            # remove build directories
 | 
			
		||||
            for directory in (self.build_lib, self.bdist_base, self.build_scripts):
 | 
			
		||||
                if os.path.exists(directory):
 | 
			
		||||
                    remove_tree(directory, dry_run=self.dry_run)
 | 
			
		||||
                else:
 | 
			
		||||
                    log.warning("'%s' does not exist -- can't clean it", directory)
 | 
			
		||||
 | 
			
		||||
        # just for the heck of it, try to remove the base build directory:
 | 
			
		||||
        # we might have emptied it right now, but if not we don't care
 | 
			
		||||
        if not self.dry_run:
 | 
			
		||||
            try:
 | 
			
		||||
                os.rmdir(self.build_base)
 | 
			
		||||
                log.info("removing '%s'", self.build_base)
 | 
			
		||||
            except OSError:
 | 
			
		||||
                pass
 | 
			
		||||
@@ -0,0 +1,377 @@
 | 
			
		||||
"""distutils.command.config
 | 
			
		||||
 | 
			
		||||
Implements the Distutils 'config' command, a (mostly) empty command class
 | 
			
		||||
that exists mainly to be sub-classed by specific module distributions and
 | 
			
		||||
applications.  The idea is that while every "config" command is different,
 | 
			
		||||
at least they're all named the same, and users always see "config" in the
 | 
			
		||||
list of standard commands.  Also, this is a good place to put common
 | 
			
		||||
configure-like tasks: "try to compile this C code", or "figure out where
 | 
			
		||||
this header file lives".
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
import os
 | 
			
		||||
import re
 | 
			
		||||
 | 
			
		||||
from ..core import Command
 | 
			
		||||
from ..errors import DistutilsExecError
 | 
			
		||||
from ..sysconfig import customize_compiler
 | 
			
		||||
from distutils._log import log
 | 
			
		||||
 | 
			
		||||
LANG_EXT = {"c": ".c", "c++": ".cxx"}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class config(Command):
 | 
			
		||||
 | 
			
		||||
    description = "prepare to build"
 | 
			
		||||
 | 
			
		||||
    user_options = [
 | 
			
		||||
        ('compiler=', None, "specify the compiler type"),
 | 
			
		||||
        ('cc=', None, "specify the compiler executable"),
 | 
			
		||||
        ('include-dirs=', 'I', "list of directories to search for header files"),
 | 
			
		||||
        ('define=', 'D', "C preprocessor macros to define"),
 | 
			
		||||
        ('undef=', 'U', "C preprocessor macros to undefine"),
 | 
			
		||||
        ('libraries=', 'l', "external C libraries to link with"),
 | 
			
		||||
        ('library-dirs=', 'L', "directories to search for external C libraries"),
 | 
			
		||||
        ('noisy', None, "show every action (compile, link, run, ...) taken"),
 | 
			
		||||
        (
 | 
			
		||||
            'dump-source',
 | 
			
		||||
            None,
 | 
			
		||||
            "dump generated source files before attempting to compile them",
 | 
			
		||||
        ),
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
    # The three standard command methods: since the "config" command
 | 
			
		||||
    # does nothing by default, these are empty.
 | 
			
		||||
 | 
			
		||||
    def initialize_options(self):
 | 
			
		||||
        self.compiler = None
 | 
			
		||||
        self.cc = None
 | 
			
		||||
        self.include_dirs = None
 | 
			
		||||
        self.libraries = None
 | 
			
		||||
        self.library_dirs = None
 | 
			
		||||
 | 
			
		||||
        # maximal output for now
 | 
			
		||||
        self.noisy = 1
 | 
			
		||||
        self.dump_source = 1
 | 
			
		||||
 | 
			
		||||
        # list of temporary files generated along-the-way that we have
 | 
			
		||||
        # to clean at some point
 | 
			
		||||
        self.temp_files = []
 | 
			
		||||
 | 
			
		||||
    def finalize_options(self):
 | 
			
		||||
        if self.include_dirs is None:
 | 
			
		||||
            self.include_dirs = self.distribution.include_dirs or []
 | 
			
		||||
        elif isinstance(self.include_dirs, str):
 | 
			
		||||
            self.include_dirs = self.include_dirs.split(os.pathsep)
 | 
			
		||||
 | 
			
		||||
        if self.libraries is None:
 | 
			
		||||
            self.libraries = []
 | 
			
		||||
        elif isinstance(self.libraries, str):
 | 
			
		||||
            self.libraries = [self.libraries]
 | 
			
		||||
 | 
			
		||||
        if self.library_dirs is None:
 | 
			
		||||
            self.library_dirs = []
 | 
			
		||||
        elif isinstance(self.library_dirs, str):
 | 
			
		||||
            self.library_dirs = self.library_dirs.split(os.pathsep)
 | 
			
		||||
 | 
			
		||||
    def run(self):
 | 
			
		||||
        pass
 | 
			
		||||
 | 
			
		||||
    # Utility methods for actual "config" commands.  The interfaces are
 | 
			
		||||
    # loosely based on Autoconf macros of similar names.  Sub-classes
 | 
			
		||||
    # may use these freely.
 | 
			
		||||
 | 
			
		||||
    def _check_compiler(self):
 | 
			
		||||
        """Check that 'self.compiler' really is a CCompiler object;
 | 
			
		||||
        if not, make it one.
 | 
			
		||||
        """
 | 
			
		||||
        # We do this late, and only on-demand, because this is an expensive
 | 
			
		||||
        # import.
 | 
			
		||||
        from ..ccompiler import CCompiler, new_compiler
 | 
			
		||||
 | 
			
		||||
        if not isinstance(self.compiler, CCompiler):
 | 
			
		||||
            self.compiler = new_compiler(
 | 
			
		||||
                compiler=self.compiler, dry_run=self.dry_run, force=1
 | 
			
		||||
            )
 | 
			
		||||
            customize_compiler(self.compiler)
 | 
			
		||||
            if self.include_dirs:
 | 
			
		||||
                self.compiler.set_include_dirs(self.include_dirs)
 | 
			
		||||
            if self.libraries:
 | 
			
		||||
                self.compiler.set_libraries(self.libraries)
 | 
			
		||||
            if self.library_dirs:
 | 
			
		||||
                self.compiler.set_library_dirs(self.library_dirs)
 | 
			
		||||
 | 
			
		||||
    def _gen_temp_sourcefile(self, body, headers, lang):
 | 
			
		||||
        filename = "_configtest" + LANG_EXT[lang]
 | 
			
		||||
        with open(filename, "w") as file:
 | 
			
		||||
            if headers:
 | 
			
		||||
                for header in headers:
 | 
			
		||||
                    file.write("#include <%s>\n" % header)
 | 
			
		||||
                file.write("\n")
 | 
			
		||||
            file.write(body)
 | 
			
		||||
            if body[-1] != "\n":
 | 
			
		||||
                file.write("\n")
 | 
			
		||||
        return filename
 | 
			
		||||
 | 
			
		||||
    def _preprocess(self, body, headers, include_dirs, lang):
 | 
			
		||||
        src = self._gen_temp_sourcefile(body, headers, lang)
 | 
			
		||||
        out = "_configtest.i"
 | 
			
		||||
        self.temp_files.extend([src, out])
 | 
			
		||||
        self.compiler.preprocess(src, out, include_dirs=include_dirs)
 | 
			
		||||
        return (src, out)
 | 
			
		||||
 | 
			
		||||
    def _compile(self, body, headers, include_dirs, lang):
 | 
			
		||||
        src = self._gen_temp_sourcefile(body, headers, lang)
 | 
			
		||||
        if self.dump_source:
 | 
			
		||||
            dump_file(src, "compiling '%s':" % src)
 | 
			
		||||
        (obj,) = self.compiler.object_filenames([src])
 | 
			
		||||
        self.temp_files.extend([src, obj])
 | 
			
		||||
        self.compiler.compile([src], include_dirs=include_dirs)
 | 
			
		||||
        return (src, obj)
 | 
			
		||||
 | 
			
		||||
    def _link(self, body, headers, include_dirs, libraries, library_dirs, lang):
 | 
			
		||||
        (src, obj) = self._compile(body, headers, include_dirs, lang)
 | 
			
		||||
        prog = os.path.splitext(os.path.basename(src))[0]
 | 
			
		||||
        self.compiler.link_executable(
 | 
			
		||||
            [obj],
 | 
			
		||||
            prog,
 | 
			
		||||
            libraries=libraries,
 | 
			
		||||
            library_dirs=library_dirs,
 | 
			
		||||
            target_lang=lang,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        if self.compiler.exe_extension is not None:
 | 
			
		||||
            prog = prog + self.compiler.exe_extension
 | 
			
		||||
        self.temp_files.append(prog)
 | 
			
		||||
 | 
			
		||||
        return (src, obj, prog)
 | 
			
		||||
 | 
			
		||||
    def _clean(self, *filenames):
 | 
			
		||||
        if not filenames:
 | 
			
		||||
            filenames = self.temp_files
 | 
			
		||||
            self.temp_files = []
 | 
			
		||||
        log.info("removing: %s", ' '.join(filenames))
 | 
			
		||||
        for filename in filenames:
 | 
			
		||||
            try:
 | 
			
		||||
                os.remove(filename)
 | 
			
		||||
            except OSError:
 | 
			
		||||
                pass
 | 
			
		||||
 | 
			
		||||
    # XXX these ignore the dry-run flag: what to do, what to do? even if
 | 
			
		||||
    # you want a dry-run build, you still need some sort of configuration
 | 
			
		||||
    # info.  My inclination is to make it up to the real config command to
 | 
			
		||||
    # consult 'dry_run', and assume a default (minimal) configuration if
 | 
			
		||||
    # true.  The problem with trying to do it here is that you'd have to
 | 
			
		||||
    # return either true or false from all the 'try' methods, neither of
 | 
			
		||||
    # which is correct.
 | 
			
		||||
 | 
			
		||||
    # XXX need access to the header search path and maybe default macros.
 | 
			
		||||
 | 
			
		||||
    def try_cpp(self, body=None, headers=None, include_dirs=None, lang="c"):
 | 
			
		||||
        """Construct a source file from 'body' (a string containing lines
 | 
			
		||||
        of C/C++ code) and 'headers' (a list of header files to include)
 | 
			
		||||
        and run it through the preprocessor.  Return true if the
 | 
			
		||||
        preprocessor succeeded, false if there were any errors.
 | 
			
		||||
        ('body' probably isn't of much use, but what the heck.)
 | 
			
		||||
        """
 | 
			
		||||
        from ..ccompiler import CompileError
 | 
			
		||||
 | 
			
		||||
        self._check_compiler()
 | 
			
		||||
        ok = True
 | 
			
		||||
        try:
 | 
			
		||||
            self._preprocess(body, headers, include_dirs, lang)
 | 
			
		||||
        except CompileError:
 | 
			
		||||
            ok = False
 | 
			
		||||
 | 
			
		||||
        self._clean()
 | 
			
		||||
        return ok
 | 
			
		||||
 | 
			
		||||
    def search_cpp(self, pattern, body=None, headers=None, include_dirs=None, lang="c"):
 | 
			
		||||
        """Construct a source file (just like 'try_cpp()'), run it through
 | 
			
		||||
        the preprocessor, and return true if any line of the output matches
 | 
			
		||||
        'pattern'.  'pattern' should either be a compiled regex object or a
 | 
			
		||||
        string containing a regex.  If both 'body' and 'headers' are None,
 | 
			
		||||
        preprocesses an empty file -- which can be useful to determine the
 | 
			
		||||
        symbols the preprocessor and compiler set by default.
 | 
			
		||||
        """
 | 
			
		||||
        self._check_compiler()
 | 
			
		||||
        src, out = self._preprocess(body, headers, include_dirs, lang)
 | 
			
		||||
 | 
			
		||||
        if isinstance(pattern, str):
 | 
			
		||||
            pattern = re.compile(pattern)
 | 
			
		||||
 | 
			
		||||
        with open(out) as file:
 | 
			
		||||
            match = False
 | 
			
		||||
            while True:
 | 
			
		||||
                line = file.readline()
 | 
			
		||||
                if line == '':
 | 
			
		||||
                    break
 | 
			
		||||
                if pattern.search(line):
 | 
			
		||||
                    match = True
 | 
			
		||||
                    break
 | 
			
		||||
 | 
			
		||||
        self._clean()
 | 
			
		||||
        return match
 | 
			
		||||
 | 
			
		||||
    def try_compile(self, body, headers=None, include_dirs=None, lang="c"):
 | 
			
		||||
        """Try to compile a source file built from 'body' and 'headers'.
 | 
			
		||||
        Return true on success, false otherwise.
 | 
			
		||||
        """
 | 
			
		||||
        from ..ccompiler import CompileError
 | 
			
		||||
 | 
			
		||||
        self._check_compiler()
 | 
			
		||||
        try:
 | 
			
		||||
            self._compile(body, headers, include_dirs, lang)
 | 
			
		||||
            ok = True
 | 
			
		||||
        except CompileError:
 | 
			
		||||
            ok = False
 | 
			
		||||
 | 
			
		||||
        log.info(ok and "success!" or "failure.")
 | 
			
		||||
        self._clean()
 | 
			
		||||
        return ok
 | 
			
		||||
 | 
			
		||||
    def try_link(
 | 
			
		||||
        self,
 | 
			
		||||
        body,
 | 
			
		||||
        headers=None,
 | 
			
		||||
        include_dirs=None,
 | 
			
		||||
        libraries=None,
 | 
			
		||||
        library_dirs=None,
 | 
			
		||||
        lang="c",
 | 
			
		||||
    ):
 | 
			
		||||
        """Try to compile and link a source file, built from 'body' and
 | 
			
		||||
        'headers', to executable form.  Return true on success, false
 | 
			
		||||
        otherwise.
 | 
			
		||||
        """
 | 
			
		||||
        from ..ccompiler import CompileError, LinkError
 | 
			
		||||
 | 
			
		||||
        self._check_compiler()
 | 
			
		||||
        try:
 | 
			
		||||
            self._link(body, headers, include_dirs, libraries, library_dirs, lang)
 | 
			
		||||
            ok = True
 | 
			
		||||
        except (CompileError, LinkError):
 | 
			
		||||
            ok = False
 | 
			
		||||
 | 
			
		||||
        log.info(ok and "success!" or "failure.")
 | 
			
		||||
        self._clean()
 | 
			
		||||
        return ok
 | 
			
		||||
 | 
			
		||||
    def try_run(
 | 
			
		||||
        self,
 | 
			
		||||
        body,
 | 
			
		||||
        headers=None,
 | 
			
		||||
        include_dirs=None,
 | 
			
		||||
        libraries=None,
 | 
			
		||||
        library_dirs=None,
 | 
			
		||||
        lang="c",
 | 
			
		||||
    ):
 | 
			
		||||
        """Try to compile, link to an executable, and run a program
 | 
			
		||||
        built from 'body' and 'headers'.  Return true on success, false
 | 
			
		||||
        otherwise.
 | 
			
		||||
        """
 | 
			
		||||
        from ..ccompiler import CompileError, LinkError
 | 
			
		||||
 | 
			
		||||
        self._check_compiler()
 | 
			
		||||
        try:
 | 
			
		||||
            src, obj, exe = self._link(
 | 
			
		||||
                body, headers, include_dirs, libraries, library_dirs, lang
 | 
			
		||||
            )
 | 
			
		||||
            self.spawn([exe])
 | 
			
		||||
            ok = True
 | 
			
		||||
        except (CompileError, LinkError, DistutilsExecError):
 | 
			
		||||
            ok = False
 | 
			
		||||
 | 
			
		||||
        log.info(ok and "success!" or "failure.")
 | 
			
		||||
        self._clean()
 | 
			
		||||
        return ok
 | 
			
		||||
 | 
			
		||||
    # -- High-level methods --------------------------------------------
 | 
			
		||||
    # (these are the ones that are actually likely to be useful
 | 
			
		||||
    # when implementing a real-world config command!)
 | 
			
		||||
 | 
			
		||||
    def check_func(
 | 
			
		||||
        self,
 | 
			
		||||
        func,
 | 
			
		||||
        headers=None,
 | 
			
		||||
        include_dirs=None,
 | 
			
		||||
        libraries=None,
 | 
			
		||||
        library_dirs=None,
 | 
			
		||||
        decl=0,
 | 
			
		||||
        call=0,
 | 
			
		||||
    ):
 | 
			
		||||
        """Determine if function 'func' is available by constructing a
 | 
			
		||||
        source file that refers to 'func', and compiles and links it.
 | 
			
		||||
        If everything succeeds, returns true; otherwise returns false.
 | 
			
		||||
 | 
			
		||||
        The constructed source file starts out by including the header
 | 
			
		||||
        files listed in 'headers'.  If 'decl' is true, it then declares
 | 
			
		||||
        'func' (as "int func()"); you probably shouldn't supply 'headers'
 | 
			
		||||
        and set 'decl' true in the same call, or you might get errors about
 | 
			
		||||
        a conflicting declarations for 'func'.  Finally, the constructed
 | 
			
		||||
        'main()' function either references 'func' or (if 'call' is true)
 | 
			
		||||
        calls it.  'libraries' and 'library_dirs' are used when
 | 
			
		||||
        linking.
 | 
			
		||||
        """
 | 
			
		||||
        self._check_compiler()
 | 
			
		||||
        body = []
 | 
			
		||||
        if decl:
 | 
			
		||||
            body.append("int %s ();" % func)
 | 
			
		||||
        body.append("int main () {")
 | 
			
		||||
        if call:
 | 
			
		||||
            body.append("  %s();" % func)
 | 
			
		||||
        else:
 | 
			
		||||
            body.append("  %s;" % func)
 | 
			
		||||
        body.append("}")
 | 
			
		||||
        body = "\n".join(body) + "\n"
 | 
			
		||||
 | 
			
		||||
        return self.try_link(body, headers, include_dirs, libraries, library_dirs)
 | 
			
		||||
 | 
			
		||||
    def check_lib(
 | 
			
		||||
        self,
 | 
			
		||||
        library,
 | 
			
		||||
        library_dirs=None,
 | 
			
		||||
        headers=None,
 | 
			
		||||
        include_dirs=None,
 | 
			
		||||
        other_libraries=[],
 | 
			
		||||
    ):
 | 
			
		||||
        """Determine if 'library' is available to be linked against,
 | 
			
		||||
        without actually checking that any particular symbols are provided
 | 
			
		||||
        by it.  'headers' will be used in constructing the source file to
 | 
			
		||||
        be compiled, but the only effect of this is to check if all the
 | 
			
		||||
        header files listed are available.  Any libraries listed in
 | 
			
		||||
        'other_libraries' will be included in the link, in case 'library'
 | 
			
		||||
        has symbols that depend on other libraries.
 | 
			
		||||
        """
 | 
			
		||||
        self._check_compiler()
 | 
			
		||||
        return self.try_link(
 | 
			
		||||
            "int main (void) { }",
 | 
			
		||||
            headers,
 | 
			
		||||
            include_dirs,
 | 
			
		||||
            [library] + other_libraries,
 | 
			
		||||
            library_dirs,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def check_header(self, header, include_dirs=None, library_dirs=None, lang="c"):
 | 
			
		||||
        """Determine if the system header file named by 'header_file'
 | 
			
		||||
        exists and can be found by the preprocessor; return true if so,
 | 
			
		||||
        false otherwise.
 | 
			
		||||
        """
 | 
			
		||||
        return self.try_cpp(
 | 
			
		||||
            body="/* No body */", headers=[header], include_dirs=include_dirs
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def dump_file(filename, head=None):
 | 
			
		||||
    """Dumps a file content into log.info.
 | 
			
		||||
 | 
			
		||||
    If head is not None, will be dumped before the file content.
 | 
			
		||||
    """
 | 
			
		||||
    if head is None:
 | 
			
		||||
        log.info('%s', filename)
 | 
			
		||||
    else:
 | 
			
		||||
        log.info(head)
 | 
			
		||||
    file = open(filename)
 | 
			
		||||
    try:
 | 
			
		||||
        log.info(file.read())
 | 
			
		||||
    finally:
 | 
			
		||||
        file.close()
 | 
			
		||||
@@ -0,0 +1,814 @@
 | 
			
		||||
"""distutils.command.install
 | 
			
		||||
 | 
			
		||||
Implements the Distutils 'install' command."""
 | 
			
		||||
 | 
			
		||||
import sys
 | 
			
		||||
import os
 | 
			
		||||
import contextlib
 | 
			
		||||
import sysconfig
 | 
			
		||||
import itertools
 | 
			
		||||
 | 
			
		||||
from distutils._log import log
 | 
			
		||||
from ..core import Command
 | 
			
		||||
from ..debug import DEBUG
 | 
			
		||||
from ..sysconfig import get_config_vars
 | 
			
		||||
from ..file_util import write_file
 | 
			
		||||
from ..util import convert_path, subst_vars, change_root
 | 
			
		||||
from ..util import get_platform
 | 
			
		||||
from ..errors import DistutilsOptionError, DistutilsPlatformError
 | 
			
		||||
from . import _framework_compat as fw
 | 
			
		||||
from .. import _collections
 | 
			
		||||
 | 
			
		||||
from site import USER_BASE
 | 
			
		||||
from site import USER_SITE
 | 
			
		||||
 | 
			
		||||
HAS_USER_SITE = True
 | 
			
		||||
 | 
			
		||||
WINDOWS_SCHEME = {
 | 
			
		||||
    'purelib': '{base}/Lib/site-packages',
 | 
			
		||||
    'platlib': '{base}/Lib/site-packages',
 | 
			
		||||
    'headers': '{base}/Include/{dist_name}',
 | 
			
		||||
    'scripts': '{base}/Scripts',
 | 
			
		||||
    'data': '{base}',
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
INSTALL_SCHEMES = {
 | 
			
		||||
    'posix_prefix': {
 | 
			
		||||
        'purelib': '{base}/lib/{implementation_lower}{py_version_short}/site-packages',
 | 
			
		||||
        'platlib': '{platbase}/{platlibdir}/{implementation_lower}'
 | 
			
		||||
        '{py_version_short}/site-packages',
 | 
			
		||||
        'headers': '{base}/include/{implementation_lower}'
 | 
			
		||||
        '{py_version_short}{abiflags}/{dist_name}',
 | 
			
		||||
        'scripts': '{base}/bin',
 | 
			
		||||
        'data': '{base}',
 | 
			
		||||
    },
 | 
			
		||||
    'posix_home': {
 | 
			
		||||
        'purelib': '{base}/lib/{implementation_lower}',
 | 
			
		||||
        'platlib': '{base}/{platlibdir}/{implementation_lower}',
 | 
			
		||||
        'headers': '{base}/include/{implementation_lower}/{dist_name}',
 | 
			
		||||
        'scripts': '{base}/bin',
 | 
			
		||||
        'data': '{base}',
 | 
			
		||||
    },
 | 
			
		||||
    'nt': WINDOWS_SCHEME,
 | 
			
		||||
    'pypy': {
 | 
			
		||||
        'purelib': '{base}/site-packages',
 | 
			
		||||
        'platlib': '{base}/site-packages',
 | 
			
		||||
        'headers': '{base}/include/{dist_name}',
 | 
			
		||||
        'scripts': '{base}/bin',
 | 
			
		||||
        'data': '{base}',
 | 
			
		||||
    },
 | 
			
		||||
    'pypy_nt': {
 | 
			
		||||
        'purelib': '{base}/site-packages',
 | 
			
		||||
        'platlib': '{base}/site-packages',
 | 
			
		||||
        'headers': '{base}/include/{dist_name}',
 | 
			
		||||
        'scripts': '{base}/Scripts',
 | 
			
		||||
        'data': '{base}',
 | 
			
		||||
    },
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
# user site schemes
 | 
			
		||||
if HAS_USER_SITE:
 | 
			
		||||
    INSTALL_SCHEMES['nt_user'] = {
 | 
			
		||||
        'purelib': '{usersite}',
 | 
			
		||||
        'platlib': '{usersite}',
 | 
			
		||||
        'headers': '{userbase}/{implementation}{py_version_nodot_plat}'
 | 
			
		||||
        '/Include/{dist_name}',
 | 
			
		||||
        'scripts': '{userbase}/{implementation}{py_version_nodot_plat}/Scripts',
 | 
			
		||||
        'data': '{userbase}',
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    INSTALL_SCHEMES['posix_user'] = {
 | 
			
		||||
        'purelib': '{usersite}',
 | 
			
		||||
        'platlib': '{usersite}',
 | 
			
		||||
        'headers': '{userbase}/include/{implementation_lower}'
 | 
			
		||||
        '{py_version_short}{abiflags}/{dist_name}',
 | 
			
		||||
        'scripts': '{userbase}/bin',
 | 
			
		||||
        'data': '{userbase}',
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
INSTALL_SCHEMES.update(fw.schemes)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# The keys to an installation scheme; if any new types of files are to be
 | 
			
		||||
# installed, be sure to add an entry to every installation scheme above,
 | 
			
		||||
# and to SCHEME_KEYS here.
 | 
			
		||||
SCHEME_KEYS = ('purelib', 'platlib', 'headers', 'scripts', 'data')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _load_sysconfig_schemes():
 | 
			
		||||
    with contextlib.suppress(AttributeError):
 | 
			
		||||
        return {
 | 
			
		||||
            scheme: sysconfig.get_paths(scheme, expand=False)
 | 
			
		||||
            for scheme in sysconfig.get_scheme_names()
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _load_schemes():
 | 
			
		||||
    """
 | 
			
		||||
    Extend default schemes with schemes from sysconfig.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    sysconfig_schemes = _load_sysconfig_schemes() or {}
 | 
			
		||||
 | 
			
		||||
    return {
 | 
			
		||||
        scheme: {
 | 
			
		||||
            **INSTALL_SCHEMES.get(scheme, {}),
 | 
			
		||||
            **sysconfig_schemes.get(scheme, {}),
 | 
			
		||||
        }
 | 
			
		||||
        for scheme in set(itertools.chain(INSTALL_SCHEMES, sysconfig_schemes))
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _get_implementation():
 | 
			
		||||
    if hasattr(sys, 'pypy_version_info'):
 | 
			
		||||
        return 'PyPy'
 | 
			
		||||
    else:
 | 
			
		||||
        return 'Python'
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _select_scheme(ob, name):
 | 
			
		||||
    scheme = _inject_headers(name, _load_scheme(_resolve_scheme(name)))
 | 
			
		||||
    vars(ob).update(_remove_set(ob, _scheme_attrs(scheme)))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _remove_set(ob, attrs):
 | 
			
		||||
    """
 | 
			
		||||
    Include only attrs that are None in ob.
 | 
			
		||||
    """
 | 
			
		||||
    return {key: value for key, value in attrs.items() if getattr(ob, key) is None}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _resolve_scheme(name):
 | 
			
		||||
    os_name, sep, key = name.partition('_')
 | 
			
		||||
    try:
 | 
			
		||||
        resolved = sysconfig.get_preferred_scheme(key)
 | 
			
		||||
    except Exception:
 | 
			
		||||
        resolved = fw.scheme(_pypy_hack(name))
 | 
			
		||||
    return resolved
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _load_scheme(name):
 | 
			
		||||
    return _load_schemes()[name]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _inject_headers(name, scheme):
 | 
			
		||||
    """
 | 
			
		||||
    Given a scheme name and the resolved scheme,
 | 
			
		||||
    if the scheme does not include headers, resolve
 | 
			
		||||
    the fallback scheme for the name and use headers
 | 
			
		||||
    from it. pypa/distutils#88
 | 
			
		||||
    """
 | 
			
		||||
    # Bypass the preferred scheme, which may not
 | 
			
		||||
    # have defined headers.
 | 
			
		||||
    fallback = _load_scheme(_pypy_hack(name))
 | 
			
		||||
    scheme.setdefault('headers', fallback['headers'])
 | 
			
		||||
    return scheme
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _scheme_attrs(scheme):
 | 
			
		||||
    """Resolve install directories by applying the install schemes."""
 | 
			
		||||
    return {f'install_{key}': scheme[key] for key in SCHEME_KEYS}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _pypy_hack(name):
 | 
			
		||||
    PY37 = sys.version_info < (3, 8)
 | 
			
		||||
    old_pypy = hasattr(sys, 'pypy_version_info') and PY37
 | 
			
		||||
    prefix = not name.endswith(('_user', '_home'))
 | 
			
		||||
    pypy_name = 'pypy' + '_nt' * (os.name == 'nt')
 | 
			
		||||
    return pypy_name if old_pypy and prefix else name
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class install(Command):
 | 
			
		||||
 | 
			
		||||
    description = "install everything from build directory"
 | 
			
		||||
 | 
			
		||||
    user_options = [
 | 
			
		||||
        # Select installation scheme and set base director(y|ies)
 | 
			
		||||
        ('prefix=', None, "installation prefix"),
 | 
			
		||||
        ('exec-prefix=', None, "(Unix only) prefix for platform-specific files"),
 | 
			
		||||
        ('home=', None, "(Unix only) home directory to install under"),
 | 
			
		||||
        # Or, just set the base director(y|ies)
 | 
			
		||||
        (
 | 
			
		||||
            'install-base=',
 | 
			
		||||
            None,
 | 
			
		||||
            "base installation directory (instead of --prefix or --home)",
 | 
			
		||||
        ),
 | 
			
		||||
        (
 | 
			
		||||
            'install-platbase=',
 | 
			
		||||
            None,
 | 
			
		||||
            "base installation directory for platform-specific files "
 | 
			
		||||
            + "(instead of --exec-prefix or --home)",
 | 
			
		||||
        ),
 | 
			
		||||
        ('root=', None, "install everything relative to this alternate root directory"),
 | 
			
		||||
        # Or, explicitly set the installation scheme
 | 
			
		||||
        (
 | 
			
		||||
            'install-purelib=',
 | 
			
		||||
            None,
 | 
			
		||||
            "installation directory for pure Python module distributions",
 | 
			
		||||
        ),
 | 
			
		||||
        (
 | 
			
		||||
            'install-platlib=',
 | 
			
		||||
            None,
 | 
			
		||||
            "installation directory for non-pure module distributions",
 | 
			
		||||
        ),
 | 
			
		||||
        (
 | 
			
		||||
            'install-lib=',
 | 
			
		||||
            None,
 | 
			
		||||
            "installation directory for all module distributions "
 | 
			
		||||
            + "(overrides --install-purelib and --install-platlib)",
 | 
			
		||||
        ),
 | 
			
		||||
        ('install-headers=', None, "installation directory for C/C++ headers"),
 | 
			
		||||
        ('install-scripts=', None, "installation directory for Python scripts"),
 | 
			
		||||
        ('install-data=', None, "installation directory for data files"),
 | 
			
		||||
        # Byte-compilation options -- see install_lib.py for details, as
 | 
			
		||||
        # these are duplicated from there (but only install_lib does
 | 
			
		||||
        # anything with them).
 | 
			
		||||
        ('compile', 'c', "compile .py to .pyc [default]"),
 | 
			
		||||
        ('no-compile', None, "don't compile .py files"),
 | 
			
		||||
        (
 | 
			
		||||
            'optimize=',
 | 
			
		||||
            'O',
 | 
			
		||||
            "also compile with optimization: -O1 for \"python -O\", "
 | 
			
		||||
            "-O2 for \"python -OO\", and -O0 to disable [default: -O0]",
 | 
			
		||||
        ),
 | 
			
		||||
        # Miscellaneous control options
 | 
			
		||||
        ('force', 'f', "force installation (overwrite any existing files)"),
 | 
			
		||||
        ('skip-build', None, "skip rebuilding everything (for testing/debugging)"),
 | 
			
		||||
        # Where to install documentation (eventually!)
 | 
			
		||||
        # ('doc-format=', None, "format of documentation to generate"),
 | 
			
		||||
        # ('install-man=', None, "directory for Unix man pages"),
 | 
			
		||||
        # ('install-html=', None, "directory for HTML documentation"),
 | 
			
		||||
        # ('install-info=', None, "directory for GNU info files"),
 | 
			
		||||
        ('record=', None, "filename in which to record list of installed files"),
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
    boolean_options = ['compile', 'force', 'skip-build']
 | 
			
		||||
 | 
			
		||||
    if HAS_USER_SITE:
 | 
			
		||||
        user_options.append(
 | 
			
		||||
            ('user', None, "install in user site-package '%s'" % USER_SITE)
 | 
			
		||||
        )
 | 
			
		||||
        boolean_options.append('user')
 | 
			
		||||
 | 
			
		||||
    negative_opt = {'no-compile': 'compile'}
 | 
			
		||||
 | 
			
		||||
    def initialize_options(self):
 | 
			
		||||
        """Initializes options."""
 | 
			
		||||
        # High-level options: these select both an installation base
 | 
			
		||||
        # and scheme.
 | 
			
		||||
        self.prefix = None
 | 
			
		||||
        self.exec_prefix = None
 | 
			
		||||
        self.home = None
 | 
			
		||||
        self.user = 0
 | 
			
		||||
 | 
			
		||||
        # These select only the installation base; it's up to the user to
 | 
			
		||||
        # specify the installation scheme (currently, that means supplying
 | 
			
		||||
        # the --install-{platlib,purelib,scripts,data} options).
 | 
			
		||||
        self.install_base = None
 | 
			
		||||
        self.install_platbase = None
 | 
			
		||||
        self.root = None
 | 
			
		||||
 | 
			
		||||
        # These options are the actual installation directories; if not
 | 
			
		||||
        # supplied by the user, they are filled in using the installation
 | 
			
		||||
        # scheme implied by prefix/exec-prefix/home and the contents of
 | 
			
		||||
        # that installation scheme.
 | 
			
		||||
        self.install_purelib = None  # for pure module distributions
 | 
			
		||||
        self.install_platlib = None  # non-pure (dists w/ extensions)
 | 
			
		||||
        self.install_headers = None  # for C/C++ headers
 | 
			
		||||
        self.install_lib = None  # set to either purelib or platlib
 | 
			
		||||
        self.install_scripts = None
 | 
			
		||||
        self.install_data = None
 | 
			
		||||
        self.install_userbase = USER_BASE
 | 
			
		||||
        self.install_usersite = USER_SITE
 | 
			
		||||
 | 
			
		||||
        self.compile = None
 | 
			
		||||
        self.optimize = None
 | 
			
		||||
 | 
			
		||||
        # Deprecated
 | 
			
		||||
        # These two are for putting non-packagized distributions into their
 | 
			
		||||
        # own directory and creating a .pth file if it makes sense.
 | 
			
		||||
        # 'extra_path' comes from the setup file; 'install_path_file' can
 | 
			
		||||
        # be turned off if it makes no sense to install a .pth file.  (But
 | 
			
		||||
        # better to install it uselessly than to guess wrong and not
 | 
			
		||||
        # install it when it's necessary and would be used!)  Currently,
 | 
			
		||||
        # 'install_path_file' is always true unless some outsider meddles
 | 
			
		||||
        # with it.
 | 
			
		||||
        self.extra_path = None
 | 
			
		||||
        self.install_path_file = 1
 | 
			
		||||
 | 
			
		||||
        # 'force' forces installation, even if target files are not
 | 
			
		||||
        # out-of-date.  'skip_build' skips running the "build" command,
 | 
			
		||||
        # handy if you know it's not necessary.  'warn_dir' (which is *not*
 | 
			
		||||
        # a user option, it's just there so the bdist_* commands can turn
 | 
			
		||||
        # it off) determines whether we warn about installing to a
 | 
			
		||||
        # directory not in sys.path.
 | 
			
		||||
        self.force = 0
 | 
			
		||||
        self.skip_build = 0
 | 
			
		||||
        self.warn_dir = 1
 | 
			
		||||
 | 
			
		||||
        # These are only here as a conduit from the 'build' command to the
 | 
			
		||||
        # 'install_*' commands that do the real work.  ('build_base' isn't
 | 
			
		||||
        # actually used anywhere, but it might be useful in future.)  They
 | 
			
		||||
        # are not user options, because if the user told the install
 | 
			
		||||
        # command where the build directory is, that wouldn't affect the
 | 
			
		||||
        # build command.
 | 
			
		||||
        self.build_base = None
 | 
			
		||||
        self.build_lib = None
 | 
			
		||||
 | 
			
		||||
        # Not defined yet because we don't know anything about
 | 
			
		||||
        # documentation yet.
 | 
			
		||||
        # self.install_man = None
 | 
			
		||||
        # self.install_html = None
 | 
			
		||||
        # self.install_info = None
 | 
			
		||||
 | 
			
		||||
        self.record = None
 | 
			
		||||
 | 
			
		||||
    # -- Option finalizing methods -------------------------------------
 | 
			
		||||
    # (This is rather more involved than for most commands,
 | 
			
		||||
    # because this is where the policy for installing third-
 | 
			
		||||
    # party Python modules on various platforms given a wide
 | 
			
		||||
    # array of user input is decided.  Yes, it's quite complex!)
 | 
			
		||||
 | 
			
		||||
    def finalize_options(self):  # noqa: C901
 | 
			
		||||
        """Finalizes options."""
 | 
			
		||||
        # This method (and its helpers, like 'finalize_unix()',
 | 
			
		||||
        # 'finalize_other()', and 'select_scheme()') is where the default
 | 
			
		||||
        # installation directories for modules, extension modules, and
 | 
			
		||||
        # anything else we care to install from a Python module
 | 
			
		||||
        # distribution.  Thus, this code makes a pretty important policy
 | 
			
		||||
        # statement about how third-party stuff is added to a Python
 | 
			
		||||
        # installation!  Note that the actual work of installation is done
 | 
			
		||||
        # by the relatively simple 'install_*' commands; they just take
 | 
			
		||||
        # their orders from the installation directory options determined
 | 
			
		||||
        # here.
 | 
			
		||||
 | 
			
		||||
        # Check for errors/inconsistencies in the options; first, stuff
 | 
			
		||||
        # that's wrong on any platform.
 | 
			
		||||
 | 
			
		||||
        if (self.prefix or self.exec_prefix or self.home) and (
 | 
			
		||||
            self.install_base or self.install_platbase
 | 
			
		||||
        ):
 | 
			
		||||
            raise DistutilsOptionError(
 | 
			
		||||
                "must supply either prefix/exec-prefix/home or "
 | 
			
		||||
                + "install-base/install-platbase -- not both"
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        if self.home and (self.prefix or self.exec_prefix):
 | 
			
		||||
            raise DistutilsOptionError(
 | 
			
		||||
                "must supply either home or prefix/exec-prefix -- not both"
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        if self.user and (
 | 
			
		||||
            self.prefix
 | 
			
		||||
            or self.exec_prefix
 | 
			
		||||
            or self.home
 | 
			
		||||
            or self.install_base
 | 
			
		||||
            or self.install_platbase
 | 
			
		||||
        ):
 | 
			
		||||
            raise DistutilsOptionError(
 | 
			
		||||
                "can't combine user with prefix, "
 | 
			
		||||
                "exec_prefix/home, or install_(plat)base"
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        # Next, stuff that's wrong (or dubious) only on certain platforms.
 | 
			
		||||
        if os.name != "posix":
 | 
			
		||||
            if self.exec_prefix:
 | 
			
		||||
                self.warn("exec-prefix option ignored on this platform")
 | 
			
		||||
                self.exec_prefix = None
 | 
			
		||||
 | 
			
		||||
        # Now the interesting logic -- so interesting that we farm it out
 | 
			
		||||
        # to other methods.  The goal of these methods is to set the final
 | 
			
		||||
        # values for the install_{lib,scripts,data,...}  options, using as
 | 
			
		||||
        # input a heady brew of prefix, exec_prefix, home, install_base,
 | 
			
		||||
        # install_platbase, user-supplied versions of
 | 
			
		||||
        # install_{purelib,platlib,lib,scripts,data,...}, and the
 | 
			
		||||
        # install schemes.  Phew!
 | 
			
		||||
 | 
			
		||||
        self.dump_dirs("pre-finalize_{unix,other}")
 | 
			
		||||
 | 
			
		||||
        if os.name == 'posix':
 | 
			
		||||
            self.finalize_unix()
 | 
			
		||||
        else:
 | 
			
		||||
            self.finalize_other()
 | 
			
		||||
 | 
			
		||||
        self.dump_dirs("post-finalize_{unix,other}()")
 | 
			
		||||
 | 
			
		||||
        # Expand configuration variables, tilde, etc. in self.install_base
 | 
			
		||||
        # and self.install_platbase -- that way, we can use $base or
 | 
			
		||||
        # $platbase in the other installation directories and not worry
 | 
			
		||||
        # about needing recursive variable expansion (shudder).
 | 
			
		||||
 | 
			
		||||
        py_version = sys.version.split()[0]
 | 
			
		||||
        (prefix, exec_prefix) = get_config_vars('prefix', 'exec_prefix')
 | 
			
		||||
        try:
 | 
			
		||||
            abiflags = sys.abiflags
 | 
			
		||||
        except AttributeError:
 | 
			
		||||
            # sys.abiflags may not be defined on all platforms.
 | 
			
		||||
            abiflags = ''
 | 
			
		||||
        local_vars = {
 | 
			
		||||
            'dist_name': self.distribution.get_name(),
 | 
			
		||||
            'dist_version': self.distribution.get_version(),
 | 
			
		||||
            'dist_fullname': self.distribution.get_fullname(),
 | 
			
		||||
            'py_version': py_version,
 | 
			
		||||
            'py_version_short': '%d.%d' % sys.version_info[:2],
 | 
			
		||||
            'py_version_nodot': '%d%d' % sys.version_info[:2],
 | 
			
		||||
            'sys_prefix': prefix,
 | 
			
		||||
            'prefix': prefix,
 | 
			
		||||
            'sys_exec_prefix': exec_prefix,
 | 
			
		||||
            'exec_prefix': exec_prefix,
 | 
			
		||||
            'abiflags': abiflags,
 | 
			
		||||
            'platlibdir': getattr(sys, 'platlibdir', 'lib'),
 | 
			
		||||
            'implementation_lower': _get_implementation().lower(),
 | 
			
		||||
            'implementation': _get_implementation(),
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        # vars for compatibility on older Pythons
 | 
			
		||||
        compat_vars = dict(
 | 
			
		||||
            # Python 3.9 and earlier
 | 
			
		||||
            py_version_nodot_plat=getattr(sys, 'winver', '').replace('.', ''),
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        if HAS_USER_SITE:
 | 
			
		||||
            local_vars['userbase'] = self.install_userbase
 | 
			
		||||
            local_vars['usersite'] = self.install_usersite
 | 
			
		||||
 | 
			
		||||
        self.config_vars = _collections.DictStack(
 | 
			
		||||
            [fw.vars(), compat_vars, sysconfig.get_config_vars(), local_vars]
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        self.expand_basedirs()
 | 
			
		||||
 | 
			
		||||
        self.dump_dirs("post-expand_basedirs()")
 | 
			
		||||
 | 
			
		||||
        # Now define config vars for the base directories so we can expand
 | 
			
		||||
        # everything else.
 | 
			
		||||
        local_vars['base'] = self.install_base
 | 
			
		||||
        local_vars['platbase'] = self.install_platbase
 | 
			
		||||
 | 
			
		||||
        if DEBUG:
 | 
			
		||||
            from pprint import pprint
 | 
			
		||||
 | 
			
		||||
            print("config vars:")
 | 
			
		||||
            pprint(dict(self.config_vars))
 | 
			
		||||
 | 
			
		||||
        # Expand "~" and configuration variables in the installation
 | 
			
		||||
        # directories.
 | 
			
		||||
        self.expand_dirs()
 | 
			
		||||
 | 
			
		||||
        self.dump_dirs("post-expand_dirs()")
 | 
			
		||||
 | 
			
		||||
        # Create directories in the home dir:
 | 
			
		||||
        if self.user:
 | 
			
		||||
            self.create_home_path()
 | 
			
		||||
 | 
			
		||||
        # Pick the actual directory to install all modules to: either
 | 
			
		||||
        # install_purelib or install_platlib, depending on whether this
 | 
			
		||||
        # module distribution is pure or not.  Of course, if the user
 | 
			
		||||
        # already specified install_lib, use their selection.
 | 
			
		||||
        if self.install_lib is None:
 | 
			
		||||
            if self.distribution.has_ext_modules():  # has extensions: non-pure
 | 
			
		||||
                self.install_lib = self.install_platlib
 | 
			
		||||
            else:
 | 
			
		||||
                self.install_lib = self.install_purelib
 | 
			
		||||
 | 
			
		||||
        # Convert directories from Unix /-separated syntax to the local
 | 
			
		||||
        # convention.
 | 
			
		||||
        self.convert_paths(
 | 
			
		||||
            'lib',
 | 
			
		||||
            'purelib',
 | 
			
		||||
            'platlib',
 | 
			
		||||
            'scripts',
 | 
			
		||||
            'data',
 | 
			
		||||
            'headers',
 | 
			
		||||
            'userbase',
 | 
			
		||||
            'usersite',
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        # Deprecated
 | 
			
		||||
        # Well, we're not actually fully completely finalized yet: we still
 | 
			
		||||
        # have to deal with 'extra_path', which is the hack for allowing
 | 
			
		||||
        # non-packagized module distributions (hello, Numerical Python!) to
 | 
			
		||||
        # get their own directories.
 | 
			
		||||
        self.handle_extra_path()
 | 
			
		||||
        self.install_libbase = self.install_lib  # needed for .pth file
 | 
			
		||||
        self.install_lib = os.path.join(self.install_lib, self.extra_dirs)
 | 
			
		||||
 | 
			
		||||
        # If a new root directory was supplied, make all the installation
 | 
			
		||||
        # dirs relative to it.
 | 
			
		||||
        if self.root is not None:
 | 
			
		||||
            self.change_roots(
 | 
			
		||||
                'libbase', 'lib', 'purelib', 'platlib', 'scripts', 'data', 'headers'
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        self.dump_dirs("after prepending root")
 | 
			
		||||
 | 
			
		||||
        # Find out the build directories, ie. where to install from.
 | 
			
		||||
        self.set_undefined_options(
 | 
			
		||||
            'build', ('build_base', 'build_base'), ('build_lib', 'build_lib')
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        # Punt on doc directories for now -- after all, we're punting on
 | 
			
		||||
        # documentation completely!
 | 
			
		||||
 | 
			
		||||
    def dump_dirs(self, msg):
 | 
			
		||||
        """Dumps the list of user options."""
 | 
			
		||||
        if not DEBUG:
 | 
			
		||||
            return
 | 
			
		||||
        from ..fancy_getopt import longopt_xlate
 | 
			
		||||
 | 
			
		||||
        log.debug(msg + ":")
 | 
			
		||||
        for opt in self.user_options:
 | 
			
		||||
            opt_name = opt[0]
 | 
			
		||||
            if opt_name[-1] == "=":
 | 
			
		||||
                opt_name = opt_name[0:-1]
 | 
			
		||||
            if opt_name in self.negative_opt:
 | 
			
		||||
                opt_name = self.negative_opt[opt_name]
 | 
			
		||||
                opt_name = opt_name.translate(longopt_xlate)
 | 
			
		||||
                val = not getattr(self, opt_name)
 | 
			
		||||
            else:
 | 
			
		||||
                opt_name = opt_name.translate(longopt_xlate)
 | 
			
		||||
                val = getattr(self, opt_name)
 | 
			
		||||
            log.debug("  %s: %s", opt_name, val)
 | 
			
		||||
 | 
			
		||||
    def finalize_unix(self):
 | 
			
		||||
        """Finalizes options for posix platforms."""
 | 
			
		||||
        if self.install_base is not None or self.install_platbase is not None:
 | 
			
		||||
            incomplete_scheme = (
 | 
			
		||||
                (
 | 
			
		||||
                    self.install_lib is None
 | 
			
		||||
                    and self.install_purelib is None
 | 
			
		||||
                    and self.install_platlib is None
 | 
			
		||||
                )
 | 
			
		||||
                or self.install_headers is None
 | 
			
		||||
                or self.install_scripts is None
 | 
			
		||||
                or self.install_data is None
 | 
			
		||||
            )
 | 
			
		||||
            if incomplete_scheme:
 | 
			
		||||
                raise DistutilsOptionError(
 | 
			
		||||
                    "install-base or install-platbase supplied, but "
 | 
			
		||||
                    "installation scheme is incomplete"
 | 
			
		||||
                )
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
        if self.user:
 | 
			
		||||
            if self.install_userbase is None:
 | 
			
		||||
                raise DistutilsPlatformError("User base directory is not specified")
 | 
			
		||||
            self.install_base = self.install_platbase = self.install_userbase
 | 
			
		||||
            self.select_scheme("posix_user")
 | 
			
		||||
        elif self.home is not None:
 | 
			
		||||
            self.install_base = self.install_platbase = self.home
 | 
			
		||||
            self.select_scheme("posix_home")
 | 
			
		||||
        else:
 | 
			
		||||
            if self.prefix is None:
 | 
			
		||||
                if self.exec_prefix is not None:
 | 
			
		||||
                    raise DistutilsOptionError(
 | 
			
		||||
                        "must not supply exec-prefix without prefix"
 | 
			
		||||
                    )
 | 
			
		||||
 | 
			
		||||
                # Allow Fedora to add components to the prefix
 | 
			
		||||
                _prefix_addition = getattr(sysconfig, '_prefix_addition', "")
 | 
			
		||||
 | 
			
		||||
                self.prefix = os.path.normpath(sys.prefix) + _prefix_addition
 | 
			
		||||
                self.exec_prefix = os.path.normpath(sys.exec_prefix) + _prefix_addition
 | 
			
		||||
 | 
			
		||||
            else:
 | 
			
		||||
                if self.exec_prefix is None:
 | 
			
		||||
                    self.exec_prefix = self.prefix
 | 
			
		||||
 | 
			
		||||
            self.install_base = self.prefix
 | 
			
		||||
            self.install_platbase = self.exec_prefix
 | 
			
		||||
            self.select_scheme("posix_prefix")
 | 
			
		||||
 | 
			
		||||
    def finalize_other(self):
 | 
			
		||||
        """Finalizes options for non-posix platforms"""
 | 
			
		||||
        if self.user:
 | 
			
		||||
            if self.install_userbase is None:
 | 
			
		||||
                raise DistutilsPlatformError("User base directory is not specified")
 | 
			
		||||
            self.install_base = self.install_platbase = self.install_userbase
 | 
			
		||||
            self.select_scheme(os.name + "_user")
 | 
			
		||||
        elif self.home is not None:
 | 
			
		||||
            self.install_base = self.install_platbase = self.home
 | 
			
		||||
            self.select_scheme("posix_home")
 | 
			
		||||
        else:
 | 
			
		||||
            if self.prefix is None:
 | 
			
		||||
                self.prefix = os.path.normpath(sys.prefix)
 | 
			
		||||
 | 
			
		||||
            self.install_base = self.install_platbase = self.prefix
 | 
			
		||||
            try:
 | 
			
		||||
                self.select_scheme(os.name)
 | 
			
		||||
            except KeyError:
 | 
			
		||||
                raise DistutilsPlatformError(
 | 
			
		||||
                    "I don't know how to install stuff on '%s'" % os.name
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
    def select_scheme(self, name):
 | 
			
		||||
        _select_scheme(self, name)
 | 
			
		||||
 | 
			
		||||
    def _expand_attrs(self, attrs):
 | 
			
		||||
        for attr in attrs:
 | 
			
		||||
            val = getattr(self, attr)
 | 
			
		||||
            if val is not None:
 | 
			
		||||
                if os.name == 'posix' or os.name == 'nt':
 | 
			
		||||
                    val = os.path.expanduser(val)
 | 
			
		||||
                val = subst_vars(val, self.config_vars)
 | 
			
		||||
                setattr(self, attr, val)
 | 
			
		||||
 | 
			
		||||
    def expand_basedirs(self):
 | 
			
		||||
        """Calls `os.path.expanduser` on install_base, install_platbase and
 | 
			
		||||
        root."""
 | 
			
		||||
        self._expand_attrs(['install_base', 'install_platbase', 'root'])
 | 
			
		||||
 | 
			
		||||
    def expand_dirs(self):
 | 
			
		||||
        """Calls `os.path.expanduser` on install dirs."""
 | 
			
		||||
        self._expand_attrs(
 | 
			
		||||
            [
 | 
			
		||||
                'install_purelib',
 | 
			
		||||
                'install_platlib',
 | 
			
		||||
                'install_lib',
 | 
			
		||||
                'install_headers',
 | 
			
		||||
                'install_scripts',
 | 
			
		||||
                'install_data',
 | 
			
		||||
            ]
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def convert_paths(self, *names):
 | 
			
		||||
        """Call `convert_path` over `names`."""
 | 
			
		||||
        for name in names:
 | 
			
		||||
            attr = "install_" + name
 | 
			
		||||
            setattr(self, attr, convert_path(getattr(self, attr)))
 | 
			
		||||
 | 
			
		||||
    def handle_extra_path(self):
 | 
			
		||||
        """Set `path_file` and `extra_dirs` using `extra_path`."""
 | 
			
		||||
        if self.extra_path is None:
 | 
			
		||||
            self.extra_path = self.distribution.extra_path
 | 
			
		||||
 | 
			
		||||
        if self.extra_path is not None:
 | 
			
		||||
            log.warning(
 | 
			
		||||
                "Distribution option extra_path is deprecated. "
 | 
			
		||||
                "See issue27919 for details."
 | 
			
		||||
            )
 | 
			
		||||
            if isinstance(self.extra_path, str):
 | 
			
		||||
                self.extra_path = self.extra_path.split(',')
 | 
			
		||||
 | 
			
		||||
            if len(self.extra_path) == 1:
 | 
			
		||||
                path_file = extra_dirs = self.extra_path[0]
 | 
			
		||||
            elif len(self.extra_path) == 2:
 | 
			
		||||
                path_file, extra_dirs = self.extra_path
 | 
			
		||||
            else:
 | 
			
		||||
                raise DistutilsOptionError(
 | 
			
		||||
                    "'extra_path' option must be a list, tuple, or "
 | 
			
		||||
                    "comma-separated string with 1 or 2 elements"
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
            # convert to local form in case Unix notation used (as it
 | 
			
		||||
            # should be in setup scripts)
 | 
			
		||||
            extra_dirs = convert_path(extra_dirs)
 | 
			
		||||
        else:
 | 
			
		||||
            path_file = None
 | 
			
		||||
            extra_dirs = ''
 | 
			
		||||
 | 
			
		||||
        # XXX should we warn if path_file and not extra_dirs? (in which
 | 
			
		||||
        # case the path file would be harmless but pointless)
 | 
			
		||||
        self.path_file = path_file
 | 
			
		||||
        self.extra_dirs = extra_dirs
 | 
			
		||||
 | 
			
		||||
    def change_roots(self, *names):
 | 
			
		||||
        """Change the install directories pointed by name using root."""
 | 
			
		||||
        for name in names:
 | 
			
		||||
            attr = "install_" + name
 | 
			
		||||
            setattr(self, attr, change_root(self.root, getattr(self, attr)))
 | 
			
		||||
 | 
			
		||||
    def create_home_path(self):
 | 
			
		||||
        """Create directories under ~."""
 | 
			
		||||
        if not self.user:
 | 
			
		||||
            return
 | 
			
		||||
        home = convert_path(os.path.expanduser("~"))
 | 
			
		||||
        for name, path in self.config_vars.items():
 | 
			
		||||
            if str(path).startswith(home) and not os.path.isdir(path):
 | 
			
		||||
                self.debug_print("os.makedirs('%s', 0o700)" % path)
 | 
			
		||||
                os.makedirs(path, 0o700)
 | 
			
		||||
 | 
			
		||||
    # -- Command execution methods -------------------------------------
 | 
			
		||||
 | 
			
		||||
    def run(self):
 | 
			
		||||
        """Runs the command."""
 | 
			
		||||
        # Obviously have to build before we can install
 | 
			
		||||
        if not self.skip_build:
 | 
			
		||||
            self.run_command('build')
 | 
			
		||||
            # If we built for any other platform, we can't install.
 | 
			
		||||
            build_plat = self.distribution.get_command_obj('build').plat_name
 | 
			
		||||
            # check warn_dir - it is a clue that the 'install' is happening
 | 
			
		||||
            # internally, and not to sys.path, so we don't check the platform
 | 
			
		||||
            # matches what we are running.
 | 
			
		||||
            if self.warn_dir and build_plat != get_platform():
 | 
			
		||||
                raise DistutilsPlatformError("Can't install when " "cross-compiling")
 | 
			
		||||
 | 
			
		||||
        # Run all sub-commands (at least those that need to be run)
 | 
			
		||||
        for cmd_name in self.get_sub_commands():
 | 
			
		||||
            self.run_command(cmd_name)
 | 
			
		||||
 | 
			
		||||
        if self.path_file:
 | 
			
		||||
            self.create_path_file()
 | 
			
		||||
 | 
			
		||||
        # write list of installed files, if requested.
 | 
			
		||||
        if self.record:
 | 
			
		||||
            outputs = self.get_outputs()
 | 
			
		||||
            if self.root:  # strip any package prefix
 | 
			
		||||
                root_len = len(self.root)
 | 
			
		||||
                for counter in range(len(outputs)):
 | 
			
		||||
                    outputs[counter] = outputs[counter][root_len:]
 | 
			
		||||
            self.execute(
 | 
			
		||||
                write_file,
 | 
			
		||||
                (self.record, outputs),
 | 
			
		||||
                "writing list of installed files to '%s'" % self.record,
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        sys_path = map(os.path.normpath, sys.path)
 | 
			
		||||
        sys_path = map(os.path.normcase, sys_path)
 | 
			
		||||
        install_lib = os.path.normcase(os.path.normpath(self.install_lib))
 | 
			
		||||
        if (
 | 
			
		||||
            self.warn_dir
 | 
			
		||||
            and not (self.path_file and self.install_path_file)
 | 
			
		||||
            and install_lib not in sys_path
 | 
			
		||||
        ):
 | 
			
		||||
            log.debug(
 | 
			
		||||
                (
 | 
			
		||||
                    "modules installed to '%s', which is not in "
 | 
			
		||||
                    "Python's module search path (sys.path) -- "
 | 
			
		||||
                    "you'll have to change the search path yourself"
 | 
			
		||||
                ),
 | 
			
		||||
                self.install_lib,
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
    def create_path_file(self):
 | 
			
		||||
        """Creates the .pth file"""
 | 
			
		||||
        filename = os.path.join(self.install_libbase, self.path_file + ".pth")
 | 
			
		||||
        if self.install_path_file:
 | 
			
		||||
            self.execute(
 | 
			
		||||
                write_file, (filename, [self.extra_dirs]), "creating %s" % filename
 | 
			
		||||
            )
 | 
			
		||||
        else:
 | 
			
		||||
            self.warn("path file '%s' not created" % filename)
 | 
			
		||||
 | 
			
		||||
    # -- Reporting methods ---------------------------------------------
 | 
			
		||||
 | 
			
		||||
    def get_outputs(self):
 | 
			
		||||
        """Assembles the outputs of all the sub-commands."""
 | 
			
		||||
        outputs = []
 | 
			
		||||
        for cmd_name in self.get_sub_commands():
 | 
			
		||||
            cmd = self.get_finalized_command(cmd_name)
 | 
			
		||||
            # Add the contents of cmd.get_outputs(), ensuring
 | 
			
		||||
            # that outputs doesn't contain duplicate entries
 | 
			
		||||
            for filename in cmd.get_outputs():
 | 
			
		||||
                if filename not in outputs:
 | 
			
		||||
                    outputs.append(filename)
 | 
			
		||||
 | 
			
		||||
        if self.path_file and self.install_path_file:
 | 
			
		||||
            outputs.append(os.path.join(self.install_libbase, self.path_file + ".pth"))
 | 
			
		||||
 | 
			
		||||
        return outputs
 | 
			
		||||
 | 
			
		||||
    def get_inputs(self):
 | 
			
		||||
        """Returns the inputs of all the sub-commands"""
 | 
			
		||||
        # XXX gee, this looks familiar ;-(
 | 
			
		||||
        inputs = []
 | 
			
		||||
        for cmd_name in self.get_sub_commands():
 | 
			
		||||
            cmd = self.get_finalized_command(cmd_name)
 | 
			
		||||
            inputs.extend(cmd.get_inputs())
 | 
			
		||||
 | 
			
		||||
        return inputs
 | 
			
		||||
 | 
			
		||||
    # -- Predicates for sub-command list -------------------------------
 | 
			
		||||
 | 
			
		||||
    def has_lib(self):
 | 
			
		||||
        """Returns true if the current distribution has any Python
 | 
			
		||||
        modules to install."""
 | 
			
		||||
        return (
 | 
			
		||||
            self.distribution.has_pure_modules() or self.distribution.has_ext_modules()
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def has_headers(self):
 | 
			
		||||
        """Returns true if the current distribution has any headers to
 | 
			
		||||
        install."""
 | 
			
		||||
        return self.distribution.has_headers()
 | 
			
		||||
 | 
			
		||||
    def has_scripts(self):
 | 
			
		||||
        """Returns true if the current distribution has any scripts to.
 | 
			
		||||
        install."""
 | 
			
		||||
        return self.distribution.has_scripts()
 | 
			
		||||
 | 
			
		||||
    def has_data(self):
 | 
			
		||||
        """Returns true if the current distribution has any data to.
 | 
			
		||||
        install."""
 | 
			
		||||
        return self.distribution.has_data_files()
 | 
			
		||||
 | 
			
		||||
    # 'sub_commands': a list of commands this command might have to run to
 | 
			
		||||
    # get its work done.  See cmd.py for more info.
 | 
			
		||||
    sub_commands = [
 | 
			
		||||
        ('install_lib', has_lib),
 | 
			
		||||
        ('install_headers', has_headers),
 | 
			
		||||
        ('install_scripts', has_scripts),
 | 
			
		||||
        ('install_data', has_data),
 | 
			
		||||
        ('install_egg_info', lambda self: True),
 | 
			
		||||
    ]
 | 
			
		||||
@@ -0,0 +1,84 @@
 | 
			
		||||
"""distutils.command.install_data
 | 
			
		||||
 | 
			
		||||
Implements the Distutils 'install_data' command, for installing
 | 
			
		||||
platform-independent data files."""
 | 
			
		||||
 | 
			
		||||
# contributed by Bastian Kleineidam
 | 
			
		||||
 | 
			
		||||
import os
 | 
			
		||||
from ..core import Command
 | 
			
		||||
from ..util import change_root, convert_path
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class install_data(Command):
 | 
			
		||||
 | 
			
		||||
    description = "install data files"
 | 
			
		||||
 | 
			
		||||
    user_options = [
 | 
			
		||||
        (
 | 
			
		||||
            'install-dir=',
 | 
			
		||||
            'd',
 | 
			
		||||
            "base directory for installing data files "
 | 
			
		||||
            "(default: installation base dir)",
 | 
			
		||||
        ),
 | 
			
		||||
        ('root=', None, "install everything relative to this alternate root directory"),
 | 
			
		||||
        ('force', 'f', "force installation (overwrite existing files)"),
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
    boolean_options = ['force']
 | 
			
		||||
 | 
			
		||||
    def initialize_options(self):
 | 
			
		||||
        self.install_dir = None
 | 
			
		||||
        self.outfiles = []
 | 
			
		||||
        self.root = None
 | 
			
		||||
        self.force = 0
 | 
			
		||||
        self.data_files = self.distribution.data_files
 | 
			
		||||
        self.warn_dir = 1
 | 
			
		||||
 | 
			
		||||
    def finalize_options(self):
 | 
			
		||||
        self.set_undefined_options(
 | 
			
		||||
            'install',
 | 
			
		||||
            ('install_data', 'install_dir'),
 | 
			
		||||
            ('root', 'root'),
 | 
			
		||||
            ('force', 'force'),
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def run(self):
 | 
			
		||||
        self.mkpath(self.install_dir)
 | 
			
		||||
        for f in self.data_files:
 | 
			
		||||
            if isinstance(f, str):
 | 
			
		||||
                # it's a simple file, so copy it
 | 
			
		||||
                f = convert_path(f)
 | 
			
		||||
                if self.warn_dir:
 | 
			
		||||
                    self.warn(
 | 
			
		||||
                        "setup script did not provide a directory for "
 | 
			
		||||
                        "'%s' -- installing right in '%s'" % (f, self.install_dir)
 | 
			
		||||
                    )
 | 
			
		||||
                (out, _) = self.copy_file(f, self.install_dir)
 | 
			
		||||
                self.outfiles.append(out)
 | 
			
		||||
            else:
 | 
			
		||||
                # it's a tuple with path to install to and a list of files
 | 
			
		||||
                dir = convert_path(f[0])
 | 
			
		||||
                if not os.path.isabs(dir):
 | 
			
		||||
                    dir = os.path.join(self.install_dir, dir)
 | 
			
		||||
                elif self.root:
 | 
			
		||||
                    dir = change_root(self.root, dir)
 | 
			
		||||
                self.mkpath(dir)
 | 
			
		||||
 | 
			
		||||
                if f[1] == []:
 | 
			
		||||
                    # If there are no files listed, the user must be
 | 
			
		||||
                    # trying to create an empty directory, so add the
 | 
			
		||||
                    # directory to the list of output files.
 | 
			
		||||
                    self.outfiles.append(dir)
 | 
			
		||||
                else:
 | 
			
		||||
                    # Copy files, adding them to the list of output files.
 | 
			
		||||
                    for data in f[1]:
 | 
			
		||||
                        data = convert_path(data)
 | 
			
		||||
                        (out, _) = self.copy_file(data, dir)
 | 
			
		||||
                        self.outfiles.append(out)
 | 
			
		||||
 | 
			
		||||
    def get_inputs(self):
 | 
			
		||||
        return self.data_files or []
 | 
			
		||||
 | 
			
		||||
    def get_outputs(self):
 | 
			
		||||
        return self.outfiles
 | 
			
		||||
@@ -0,0 +1,92 @@
 | 
			
		||||
"""
 | 
			
		||||
distutils.command.install_egg_info
 | 
			
		||||
 | 
			
		||||
Implements the Distutils 'install_egg_info' command, for installing
 | 
			
		||||
a package's PKG-INFO metadata.
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
import os
 | 
			
		||||
import sys
 | 
			
		||||
import re
 | 
			
		||||
 | 
			
		||||
from ..cmd import Command
 | 
			
		||||
from .. import dir_util
 | 
			
		||||
from .._log import log
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class install_egg_info(Command):
 | 
			
		||||
    """Install an .egg-info file for the package"""
 | 
			
		||||
 | 
			
		||||
    description = "Install package's PKG-INFO metadata as an .egg-info file"
 | 
			
		||||
    user_options = [
 | 
			
		||||
        ('install-dir=', 'd', "directory to install to"),
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
    def initialize_options(self):
 | 
			
		||||
        self.install_dir = None
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def basename(self):
 | 
			
		||||
        """
 | 
			
		||||
        Allow basename to be overridden by child class.
 | 
			
		||||
        Ref pypa/distutils#2.
 | 
			
		||||
        """
 | 
			
		||||
        return "%s-%s-py%d.%d.egg-info" % (
 | 
			
		||||
            to_filename(safe_name(self.distribution.get_name())),
 | 
			
		||||
            to_filename(safe_version(self.distribution.get_version())),
 | 
			
		||||
            *sys.version_info[:2],
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def finalize_options(self):
 | 
			
		||||
        self.set_undefined_options('install_lib', ('install_dir', 'install_dir'))
 | 
			
		||||
        self.target = os.path.join(self.install_dir, self.basename)
 | 
			
		||||
        self.outputs = [self.target]
 | 
			
		||||
 | 
			
		||||
    def run(self):
 | 
			
		||||
        target = self.target
 | 
			
		||||
        if os.path.isdir(target) and not os.path.islink(target):
 | 
			
		||||
            dir_util.remove_tree(target, dry_run=self.dry_run)
 | 
			
		||||
        elif os.path.exists(target):
 | 
			
		||||
            self.execute(os.unlink, (self.target,), "Removing " + target)
 | 
			
		||||
        elif not os.path.isdir(self.install_dir):
 | 
			
		||||
            self.execute(
 | 
			
		||||
                os.makedirs, (self.install_dir,), "Creating " + self.install_dir
 | 
			
		||||
            )
 | 
			
		||||
        log.info("Writing %s", target)
 | 
			
		||||
        if not self.dry_run:
 | 
			
		||||
            with open(target, 'w', encoding='UTF-8') as f:
 | 
			
		||||
                self.distribution.metadata.write_pkg_file(f)
 | 
			
		||||
 | 
			
		||||
    def get_outputs(self):
 | 
			
		||||
        return self.outputs
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# The following routines are taken from setuptools' pkg_resources module and
 | 
			
		||||
# can be replaced by importing them from pkg_resources once it is included
 | 
			
		||||
# in the stdlib.
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def safe_name(name):
 | 
			
		||||
    """Convert an arbitrary string to a standard distribution name
 | 
			
		||||
 | 
			
		||||
    Any runs of non-alphanumeric/. characters are replaced with a single '-'.
 | 
			
		||||
    """
 | 
			
		||||
    return re.sub('[^A-Za-z0-9.]+', '-', name)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def safe_version(version):
 | 
			
		||||
    """Convert an arbitrary string to a standard version string
 | 
			
		||||
 | 
			
		||||
    Spaces become dots, and all other non-alphanumeric characters become
 | 
			
		||||
    dashes, with runs of multiple dashes condensed to a single dash.
 | 
			
		||||
    """
 | 
			
		||||
    version = version.replace(' ', '.')
 | 
			
		||||
    return re.sub('[^A-Za-z0-9.]+', '-', version)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def to_filename(name):
 | 
			
		||||
    """Convert a project or version name to its filename-escaped form
 | 
			
		||||
 | 
			
		||||
    Any '-' characters are currently replaced with '_'.
 | 
			
		||||
    """
 | 
			
		||||
    return name.replace('-', '_')
 | 
			
		||||
@@ -0,0 +1,45 @@
 | 
			
		||||
"""distutils.command.install_headers
 | 
			
		||||
 | 
			
		||||
Implements the Distutils 'install_headers' command, to install C/C++ header
 | 
			
		||||
files to the Python include directory."""
 | 
			
		||||
 | 
			
		||||
from ..core import Command
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# XXX force is never used
 | 
			
		||||
class install_headers(Command):
 | 
			
		||||
 | 
			
		||||
    description = "install C/C++ header files"
 | 
			
		||||
 | 
			
		||||
    user_options = [
 | 
			
		||||
        ('install-dir=', 'd', "directory to install header files to"),
 | 
			
		||||
        ('force', 'f', "force installation (overwrite existing files)"),
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
    boolean_options = ['force']
 | 
			
		||||
 | 
			
		||||
    def initialize_options(self):
 | 
			
		||||
        self.install_dir = None
 | 
			
		||||
        self.force = 0
 | 
			
		||||
        self.outfiles = []
 | 
			
		||||
 | 
			
		||||
    def finalize_options(self):
 | 
			
		||||
        self.set_undefined_options(
 | 
			
		||||
            'install', ('install_headers', 'install_dir'), ('force', 'force')
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def run(self):
 | 
			
		||||
        headers = self.distribution.headers
 | 
			
		||||
        if not headers:
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
        self.mkpath(self.install_dir)
 | 
			
		||||
        for header in headers:
 | 
			
		||||
            (out, _) = self.copy_file(header, self.install_dir)
 | 
			
		||||
            self.outfiles.append(out)
 | 
			
		||||
 | 
			
		||||
    def get_inputs(self):
 | 
			
		||||
        return self.distribution.headers or []
 | 
			
		||||
 | 
			
		||||
    def get_outputs(self):
 | 
			
		||||
        return self.outfiles
 | 
			
		||||
@@ -0,0 +1,238 @@
 | 
			
		||||
"""distutils.command.install_lib
 | 
			
		||||
 | 
			
		||||
Implements the Distutils 'install_lib' command
 | 
			
		||||
(install all Python modules)."""
 | 
			
		||||
 | 
			
		||||
import os
 | 
			
		||||
import importlib.util
 | 
			
		||||
import sys
 | 
			
		||||
 | 
			
		||||
from ..core import Command
 | 
			
		||||
from ..errors import DistutilsOptionError
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Extension for Python source files.
 | 
			
		||||
PYTHON_SOURCE_EXTENSION = ".py"
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class install_lib(Command):
 | 
			
		||||
 | 
			
		||||
    description = "install all Python modules (extensions and pure Python)"
 | 
			
		||||
 | 
			
		||||
    # The byte-compilation options are a tad confusing.  Here are the
 | 
			
		||||
    # possible scenarios:
 | 
			
		||||
    #   1) no compilation at all (--no-compile --no-optimize)
 | 
			
		||||
    #   2) compile .pyc only (--compile --no-optimize; default)
 | 
			
		||||
    #   3) compile .pyc and "opt-1" .pyc (--compile --optimize)
 | 
			
		||||
    #   4) compile "opt-1" .pyc only (--no-compile --optimize)
 | 
			
		||||
    #   5) compile .pyc and "opt-2" .pyc (--compile --optimize-more)
 | 
			
		||||
    #   6) compile "opt-2" .pyc only (--no-compile --optimize-more)
 | 
			
		||||
    #
 | 
			
		||||
    # The UI for this is two options, 'compile' and 'optimize'.
 | 
			
		||||
    # 'compile' is strictly boolean, and only decides whether to
 | 
			
		||||
    # generate .pyc files.  'optimize' is three-way (0, 1, or 2), and
 | 
			
		||||
    # decides both whether to generate .pyc files and what level of
 | 
			
		||||
    # optimization to use.
 | 
			
		||||
 | 
			
		||||
    user_options = [
 | 
			
		||||
        ('install-dir=', 'd', "directory to install to"),
 | 
			
		||||
        ('build-dir=', 'b', "build directory (where to install from)"),
 | 
			
		||||
        ('force', 'f', "force installation (overwrite existing files)"),
 | 
			
		||||
        ('compile', 'c', "compile .py to .pyc [default]"),
 | 
			
		||||
        ('no-compile', None, "don't compile .py files"),
 | 
			
		||||
        (
 | 
			
		||||
            'optimize=',
 | 
			
		||||
            'O',
 | 
			
		||||
            "also compile with optimization: -O1 for \"python -O\", "
 | 
			
		||||
            "-O2 for \"python -OO\", and -O0 to disable [default: -O0]",
 | 
			
		||||
        ),
 | 
			
		||||
        ('skip-build', None, "skip the build steps"),
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
    boolean_options = ['force', 'compile', 'skip-build']
 | 
			
		||||
    negative_opt = {'no-compile': 'compile'}
 | 
			
		||||
 | 
			
		||||
    def initialize_options(self):
 | 
			
		||||
        # let the 'install' command dictate our installation directory
 | 
			
		||||
        self.install_dir = None
 | 
			
		||||
        self.build_dir = None
 | 
			
		||||
        self.force = 0
 | 
			
		||||
        self.compile = None
 | 
			
		||||
        self.optimize = None
 | 
			
		||||
        self.skip_build = None
 | 
			
		||||
 | 
			
		||||
    def finalize_options(self):
 | 
			
		||||
        # Get all the information we need to install pure Python modules
 | 
			
		||||
        # from the umbrella 'install' command -- build (source) directory,
 | 
			
		||||
        # install (target) directory, and whether to compile .py files.
 | 
			
		||||
        self.set_undefined_options(
 | 
			
		||||
            'install',
 | 
			
		||||
            ('build_lib', 'build_dir'),
 | 
			
		||||
            ('install_lib', 'install_dir'),
 | 
			
		||||
            ('force', 'force'),
 | 
			
		||||
            ('compile', 'compile'),
 | 
			
		||||
            ('optimize', 'optimize'),
 | 
			
		||||
            ('skip_build', 'skip_build'),
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        if self.compile is None:
 | 
			
		||||
            self.compile = True
 | 
			
		||||
        if self.optimize is None:
 | 
			
		||||
            self.optimize = False
 | 
			
		||||
 | 
			
		||||
        if not isinstance(self.optimize, int):
 | 
			
		||||
            try:
 | 
			
		||||
                self.optimize = int(self.optimize)
 | 
			
		||||
                if self.optimize not in (0, 1, 2):
 | 
			
		||||
                    raise AssertionError
 | 
			
		||||
            except (ValueError, AssertionError):
 | 
			
		||||
                raise DistutilsOptionError("optimize must be 0, 1, or 2")
 | 
			
		||||
 | 
			
		||||
    def run(self):
 | 
			
		||||
        # Make sure we have built everything we need first
 | 
			
		||||
        self.build()
 | 
			
		||||
 | 
			
		||||
        # Install everything: simply dump the entire contents of the build
 | 
			
		||||
        # directory to the installation directory (that's the beauty of
 | 
			
		||||
        # having a build directory!)
 | 
			
		||||
        outfiles = self.install()
 | 
			
		||||
 | 
			
		||||
        # (Optionally) compile .py to .pyc
 | 
			
		||||
        if outfiles is not None and self.distribution.has_pure_modules():
 | 
			
		||||
            self.byte_compile(outfiles)
 | 
			
		||||
 | 
			
		||||
    # -- Top-level worker functions ------------------------------------
 | 
			
		||||
    # (called from 'run()')
 | 
			
		||||
 | 
			
		||||
    def build(self):
 | 
			
		||||
        if not self.skip_build:
 | 
			
		||||
            if self.distribution.has_pure_modules():
 | 
			
		||||
                self.run_command('build_py')
 | 
			
		||||
            if self.distribution.has_ext_modules():
 | 
			
		||||
                self.run_command('build_ext')
 | 
			
		||||
 | 
			
		||||
    def install(self):
 | 
			
		||||
        if os.path.isdir(self.build_dir):
 | 
			
		||||
            outfiles = self.copy_tree(self.build_dir, self.install_dir)
 | 
			
		||||
        else:
 | 
			
		||||
            self.warn(
 | 
			
		||||
                "'%s' does not exist -- no Python modules to install" % self.build_dir
 | 
			
		||||
            )
 | 
			
		||||
            return
 | 
			
		||||
        return outfiles
 | 
			
		||||
 | 
			
		||||
    def byte_compile(self, files):
 | 
			
		||||
        if sys.dont_write_bytecode:
 | 
			
		||||
            self.warn('byte-compiling is disabled, skipping.')
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
        from ..util import byte_compile
 | 
			
		||||
 | 
			
		||||
        # Get the "--root" directory supplied to the "install" command,
 | 
			
		||||
        # and use it as a prefix to strip off the purported filename
 | 
			
		||||
        # encoded in bytecode files.  This is far from complete, but it
 | 
			
		||||
        # should at least generate usable bytecode in RPM distributions.
 | 
			
		||||
        install_root = self.get_finalized_command('install').root
 | 
			
		||||
 | 
			
		||||
        if self.compile:
 | 
			
		||||
            byte_compile(
 | 
			
		||||
                files,
 | 
			
		||||
                optimize=0,
 | 
			
		||||
                force=self.force,
 | 
			
		||||
                prefix=install_root,
 | 
			
		||||
                dry_run=self.dry_run,
 | 
			
		||||
            )
 | 
			
		||||
        if self.optimize > 0:
 | 
			
		||||
            byte_compile(
 | 
			
		||||
                files,
 | 
			
		||||
                optimize=self.optimize,
 | 
			
		||||
                force=self.force,
 | 
			
		||||
                prefix=install_root,
 | 
			
		||||
                verbose=self.verbose,
 | 
			
		||||
                dry_run=self.dry_run,
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
    # -- Utility methods -----------------------------------------------
 | 
			
		||||
 | 
			
		||||
    def _mutate_outputs(self, has_any, build_cmd, cmd_option, output_dir):
 | 
			
		||||
        if not has_any:
 | 
			
		||||
            return []
 | 
			
		||||
 | 
			
		||||
        build_cmd = self.get_finalized_command(build_cmd)
 | 
			
		||||
        build_files = build_cmd.get_outputs()
 | 
			
		||||
        build_dir = getattr(build_cmd, cmd_option)
 | 
			
		||||
 | 
			
		||||
        prefix_len = len(build_dir) + len(os.sep)
 | 
			
		||||
        outputs = []
 | 
			
		||||
        for file in build_files:
 | 
			
		||||
            outputs.append(os.path.join(output_dir, file[prefix_len:]))
 | 
			
		||||
 | 
			
		||||
        return outputs
 | 
			
		||||
 | 
			
		||||
    def _bytecode_filenames(self, py_filenames):
 | 
			
		||||
        bytecode_files = []
 | 
			
		||||
        for py_file in py_filenames:
 | 
			
		||||
            # Since build_py handles package data installation, the
 | 
			
		||||
            # list of outputs can contain more than just .py files.
 | 
			
		||||
            # Make sure we only report bytecode for the .py files.
 | 
			
		||||
            ext = os.path.splitext(os.path.normcase(py_file))[1]
 | 
			
		||||
            if ext != PYTHON_SOURCE_EXTENSION:
 | 
			
		||||
                continue
 | 
			
		||||
            if self.compile:
 | 
			
		||||
                bytecode_files.append(
 | 
			
		||||
                    importlib.util.cache_from_source(py_file, optimization='')
 | 
			
		||||
                )
 | 
			
		||||
            if self.optimize > 0:
 | 
			
		||||
                bytecode_files.append(
 | 
			
		||||
                    importlib.util.cache_from_source(
 | 
			
		||||
                        py_file, optimization=self.optimize
 | 
			
		||||
                    )
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
        return bytecode_files
 | 
			
		||||
 | 
			
		||||
    # -- External interface --------------------------------------------
 | 
			
		||||
    # (called by outsiders)
 | 
			
		||||
 | 
			
		||||
    def get_outputs(self):
 | 
			
		||||
        """Return the list of files that would be installed if this command
 | 
			
		||||
        were actually run.  Not affected by the "dry-run" flag or whether
 | 
			
		||||
        modules have actually been built yet.
 | 
			
		||||
        """
 | 
			
		||||
        pure_outputs = self._mutate_outputs(
 | 
			
		||||
            self.distribution.has_pure_modules(),
 | 
			
		||||
            'build_py',
 | 
			
		||||
            'build_lib',
 | 
			
		||||
            self.install_dir,
 | 
			
		||||
        )
 | 
			
		||||
        if self.compile:
 | 
			
		||||
            bytecode_outputs = self._bytecode_filenames(pure_outputs)
 | 
			
		||||
        else:
 | 
			
		||||
            bytecode_outputs = []
 | 
			
		||||
 | 
			
		||||
        ext_outputs = self._mutate_outputs(
 | 
			
		||||
            self.distribution.has_ext_modules(),
 | 
			
		||||
            'build_ext',
 | 
			
		||||
            'build_lib',
 | 
			
		||||
            self.install_dir,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        return pure_outputs + bytecode_outputs + ext_outputs
 | 
			
		||||
 | 
			
		||||
    def get_inputs(self):
 | 
			
		||||
        """Get the list of files that are input to this command, ie. the
 | 
			
		||||
        files that get installed as they are named in the build tree.
 | 
			
		||||
        The files in this list correspond one-to-one to the output
 | 
			
		||||
        filenames returned by 'get_outputs()'.
 | 
			
		||||
        """
 | 
			
		||||
        inputs = []
 | 
			
		||||
 | 
			
		||||
        if self.distribution.has_pure_modules():
 | 
			
		||||
            build_py = self.get_finalized_command('build_py')
 | 
			
		||||
            inputs.extend(build_py.get_outputs())
 | 
			
		||||
 | 
			
		||||
        if self.distribution.has_ext_modules():
 | 
			
		||||
            build_ext = self.get_finalized_command('build_ext')
 | 
			
		||||
            inputs.extend(build_ext.get_outputs())
 | 
			
		||||
 | 
			
		||||
        return inputs
 | 
			
		||||
@@ -0,0 +1,61 @@
 | 
			
		||||
"""distutils.command.install_scripts
 | 
			
		||||
 | 
			
		||||
Implements the Distutils 'install_scripts' command, for installing
 | 
			
		||||
Python scripts."""
 | 
			
		||||
 | 
			
		||||
# contributed by Bastian Kleineidam
 | 
			
		||||
 | 
			
		||||
import os
 | 
			
		||||
from ..core import Command
 | 
			
		||||
from distutils._log import log
 | 
			
		||||
from stat import ST_MODE
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class install_scripts(Command):
 | 
			
		||||
 | 
			
		||||
    description = "install scripts (Python or otherwise)"
 | 
			
		||||
 | 
			
		||||
    user_options = [
 | 
			
		||||
        ('install-dir=', 'd', "directory to install scripts to"),
 | 
			
		||||
        ('build-dir=', 'b', "build directory (where to install from)"),
 | 
			
		||||
        ('force', 'f', "force installation (overwrite existing files)"),
 | 
			
		||||
        ('skip-build', None, "skip the build steps"),
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
    boolean_options = ['force', 'skip-build']
 | 
			
		||||
 | 
			
		||||
    def initialize_options(self):
 | 
			
		||||
        self.install_dir = None
 | 
			
		||||
        self.force = 0
 | 
			
		||||
        self.build_dir = None
 | 
			
		||||
        self.skip_build = None
 | 
			
		||||
 | 
			
		||||
    def finalize_options(self):
 | 
			
		||||
        self.set_undefined_options('build', ('build_scripts', 'build_dir'))
 | 
			
		||||
        self.set_undefined_options(
 | 
			
		||||
            'install',
 | 
			
		||||
            ('install_scripts', 'install_dir'),
 | 
			
		||||
            ('force', 'force'),
 | 
			
		||||
            ('skip_build', 'skip_build'),
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def run(self):
 | 
			
		||||
        if not self.skip_build:
 | 
			
		||||
            self.run_command('build_scripts')
 | 
			
		||||
        self.outfiles = self.copy_tree(self.build_dir, self.install_dir)
 | 
			
		||||
        if os.name == 'posix':
 | 
			
		||||
            # Set the executable bits (owner, group, and world) on
 | 
			
		||||
            # all the scripts we just installed.
 | 
			
		||||
            for file in self.get_outputs():
 | 
			
		||||
                if self.dry_run:
 | 
			
		||||
                    log.info("changing mode of %s", file)
 | 
			
		||||
                else:
 | 
			
		||||
                    mode = ((os.stat(file)[ST_MODE]) | 0o555) & 0o7777
 | 
			
		||||
                    log.info("changing mode of %s to %o", file, mode)
 | 
			
		||||
                    os.chmod(file, mode)
 | 
			
		||||
 | 
			
		||||
    def get_inputs(self):
 | 
			
		||||
        return self.distribution.scripts or []
 | 
			
		||||
 | 
			
		||||
    def get_outputs(self):
 | 
			
		||||
        return self.outfiles or []
 | 
			
		||||
@@ -0,0 +1,31 @@
 | 
			
		||||
import sys
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _pythonlib_compat():
 | 
			
		||||
    """
 | 
			
		||||
    On Python 3.7 and earlier, distutils would include the Python
 | 
			
		||||
    library. See pypa/distutils#9.
 | 
			
		||||
    """
 | 
			
		||||
    from distutils import sysconfig
 | 
			
		||||
 | 
			
		||||
    if not sysconfig.get_config_var('Py_ENABLED_SHARED'):
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    yield 'python{}.{}{}'.format(
 | 
			
		||||
        sys.hexversion >> 24,
 | 
			
		||||
        (sys.hexversion >> 16) & 0xFF,
 | 
			
		||||
        sysconfig.get_config_var('ABIFLAGS'),
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def compose(f1, f2):
 | 
			
		||||
    return lambda *args, **kwargs: f1(f2(*args, **kwargs))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
pythonlib = (
 | 
			
		||||
    compose(list, _pythonlib_compat)
 | 
			
		||||
    if sys.version_info < (3, 8)
 | 
			
		||||
    and sys.platform != 'darwin'
 | 
			
		||||
    and sys.platform[:3] != 'aix'
 | 
			
		||||
    else list
 | 
			
		||||
)
 | 
			
		||||
@@ -0,0 +1,321 @@
 | 
			
		||||
"""distutils.command.register
 | 
			
		||||
 | 
			
		||||
Implements the Distutils 'register' command (register with the repository).
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
# created 2002/10/21, Richard Jones
 | 
			
		||||
 | 
			
		||||
import getpass
 | 
			
		||||
import io
 | 
			
		||||
import logging
 | 
			
		||||
import urllib.parse
 | 
			
		||||
import urllib.request
 | 
			
		||||
from warnings import warn
 | 
			
		||||
 | 
			
		||||
from ..core import PyPIRCCommand
 | 
			
		||||
from distutils._log import log
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class register(PyPIRCCommand):
 | 
			
		||||
 | 
			
		||||
    description = "register the distribution with the Python package index"
 | 
			
		||||
    user_options = PyPIRCCommand.user_options + [
 | 
			
		||||
        ('list-classifiers', None, 'list the valid Trove classifiers'),
 | 
			
		||||
        (
 | 
			
		||||
            'strict',
 | 
			
		||||
            None,
 | 
			
		||||
            'Will stop the registering if the meta-data are not fully compliant',
 | 
			
		||||
        ),
 | 
			
		||||
    ]
 | 
			
		||||
    boolean_options = PyPIRCCommand.boolean_options + [
 | 
			
		||||
        'verify',
 | 
			
		||||
        'list-classifiers',
 | 
			
		||||
        'strict',
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
    sub_commands = [('check', lambda self: True)]
 | 
			
		||||
 | 
			
		||||
    def initialize_options(self):
 | 
			
		||||
        PyPIRCCommand.initialize_options(self)
 | 
			
		||||
        self.list_classifiers = 0
 | 
			
		||||
        self.strict = 0
 | 
			
		||||
 | 
			
		||||
    def finalize_options(self):
 | 
			
		||||
        PyPIRCCommand.finalize_options(self)
 | 
			
		||||
        # setting options for the `check` subcommand
 | 
			
		||||
        check_options = {
 | 
			
		||||
            'strict': ('register', self.strict),
 | 
			
		||||
            'restructuredtext': ('register', 1),
 | 
			
		||||
        }
 | 
			
		||||
        self.distribution.command_options['check'] = check_options
 | 
			
		||||
 | 
			
		||||
    def run(self):
 | 
			
		||||
        self.finalize_options()
 | 
			
		||||
        self._set_config()
 | 
			
		||||
 | 
			
		||||
        # Run sub commands
 | 
			
		||||
        for cmd_name in self.get_sub_commands():
 | 
			
		||||
            self.run_command(cmd_name)
 | 
			
		||||
 | 
			
		||||
        if self.dry_run:
 | 
			
		||||
            self.verify_metadata()
 | 
			
		||||
        elif self.list_classifiers:
 | 
			
		||||
            self.classifiers()
 | 
			
		||||
        else:
 | 
			
		||||
            self.send_metadata()
 | 
			
		||||
 | 
			
		||||
    def check_metadata(self):
 | 
			
		||||
        """Deprecated API."""
 | 
			
		||||
        warn(
 | 
			
		||||
            "distutils.command.register.check_metadata is deprecated; "
 | 
			
		||||
            "use the check command instead",
 | 
			
		||||
            DeprecationWarning,
 | 
			
		||||
        )
 | 
			
		||||
        check = self.distribution.get_command_obj('check')
 | 
			
		||||
        check.ensure_finalized()
 | 
			
		||||
        check.strict = self.strict
 | 
			
		||||
        check.restructuredtext = 1
 | 
			
		||||
        check.run()
 | 
			
		||||
 | 
			
		||||
    def _set_config(self):
 | 
			
		||||
        '''Reads the configuration file and set attributes.'''
 | 
			
		||||
        config = self._read_pypirc()
 | 
			
		||||
        if config != {}:
 | 
			
		||||
            self.username = config['username']
 | 
			
		||||
            self.password = config['password']
 | 
			
		||||
            self.repository = config['repository']
 | 
			
		||||
            self.realm = config['realm']
 | 
			
		||||
            self.has_config = True
 | 
			
		||||
        else:
 | 
			
		||||
            if self.repository not in ('pypi', self.DEFAULT_REPOSITORY):
 | 
			
		||||
                raise ValueError('%s not found in .pypirc' % self.repository)
 | 
			
		||||
            if self.repository == 'pypi':
 | 
			
		||||
                self.repository = self.DEFAULT_REPOSITORY
 | 
			
		||||
            self.has_config = False
 | 
			
		||||
 | 
			
		||||
    def classifiers(self):
 | 
			
		||||
        '''Fetch the list of classifiers from the server.'''
 | 
			
		||||
        url = self.repository + '?:action=list_classifiers'
 | 
			
		||||
        response = urllib.request.urlopen(url)
 | 
			
		||||
        log.info(self._read_pypi_response(response))
 | 
			
		||||
 | 
			
		||||
    def verify_metadata(self):
 | 
			
		||||
        '''Send the metadata to the package index server to be checked.'''
 | 
			
		||||
        # send the info to the server and report the result
 | 
			
		||||
        (code, result) = self.post_to_server(self.build_post_data('verify'))
 | 
			
		||||
        log.info('Server response (%s): %s', code, result)
 | 
			
		||||
 | 
			
		||||
    def send_metadata(self):  # noqa: C901
 | 
			
		||||
        '''Send the metadata to the package index server.
 | 
			
		||||
 | 
			
		||||
        Well, do the following:
 | 
			
		||||
        1. figure who the user is, and then
 | 
			
		||||
        2. send the data as a Basic auth'ed POST.
 | 
			
		||||
 | 
			
		||||
        First we try to read the username/password from $HOME/.pypirc,
 | 
			
		||||
        which is a ConfigParser-formatted file with a section
 | 
			
		||||
        [distutils] containing username and password entries (both
 | 
			
		||||
        in clear text). Eg:
 | 
			
		||||
 | 
			
		||||
            [distutils]
 | 
			
		||||
            index-servers =
 | 
			
		||||
                pypi
 | 
			
		||||
 | 
			
		||||
            [pypi]
 | 
			
		||||
            username: fred
 | 
			
		||||
            password: sekrit
 | 
			
		||||
 | 
			
		||||
        Otherwise, to figure who the user is, we offer the user three
 | 
			
		||||
        choices:
 | 
			
		||||
 | 
			
		||||
         1. use existing login,
 | 
			
		||||
         2. register as a new user, or
 | 
			
		||||
         3. set the password to a random string and email the user.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        # see if we can short-cut and get the username/password from the
 | 
			
		||||
        # config
 | 
			
		||||
        if self.has_config:
 | 
			
		||||
            choice = '1'
 | 
			
		||||
            username = self.username
 | 
			
		||||
            password = self.password
 | 
			
		||||
        else:
 | 
			
		||||
            choice = 'x'
 | 
			
		||||
            username = password = ''
 | 
			
		||||
 | 
			
		||||
        # get the user's login info
 | 
			
		||||
        choices = '1 2 3 4'.split()
 | 
			
		||||
        while choice not in choices:
 | 
			
		||||
            self.announce(
 | 
			
		||||
                '''\
 | 
			
		||||
We need to know who you are, so please choose either:
 | 
			
		||||
 1. use your existing login,
 | 
			
		||||
 2. register as a new user,
 | 
			
		||||
 3. have the server generate a new password for you (and email it to you), or
 | 
			
		||||
 4. quit
 | 
			
		||||
Your selection [default 1]: ''',
 | 
			
		||||
                logging.INFO,
 | 
			
		||||
            )
 | 
			
		||||
            choice = input()
 | 
			
		||||
            if not choice:
 | 
			
		||||
                choice = '1'
 | 
			
		||||
            elif choice not in choices:
 | 
			
		||||
                print('Please choose one of the four options!')
 | 
			
		||||
 | 
			
		||||
        if choice == '1':
 | 
			
		||||
            # get the username and password
 | 
			
		||||
            while not username:
 | 
			
		||||
                username = input('Username: ')
 | 
			
		||||
            while not password:
 | 
			
		||||
                password = getpass.getpass('Password: ')
 | 
			
		||||
 | 
			
		||||
            # set up the authentication
 | 
			
		||||
            auth = urllib.request.HTTPPasswordMgr()
 | 
			
		||||
            host = urllib.parse.urlparse(self.repository)[1]
 | 
			
		||||
            auth.add_password(self.realm, host, username, password)
 | 
			
		||||
            # send the info to the server and report the result
 | 
			
		||||
            code, result = self.post_to_server(self.build_post_data('submit'), auth)
 | 
			
		||||
            self.announce('Server response ({}): {}'.format(code, result), logging.INFO)
 | 
			
		||||
 | 
			
		||||
            # possibly save the login
 | 
			
		||||
            if code == 200:
 | 
			
		||||
                if self.has_config:
 | 
			
		||||
                    # sharing the password in the distribution instance
 | 
			
		||||
                    # so the upload command can reuse it
 | 
			
		||||
                    self.distribution.password = password
 | 
			
		||||
                else:
 | 
			
		||||
                    self.announce(
 | 
			
		||||
                        (
 | 
			
		||||
                            'I can store your PyPI login so future '
 | 
			
		||||
                            'submissions will be faster.'
 | 
			
		||||
                        ),
 | 
			
		||||
                        logging.INFO,
 | 
			
		||||
                    )
 | 
			
		||||
                    self.announce(
 | 
			
		||||
                        '(the login will be stored in %s)' % self._get_rc_file(),
 | 
			
		||||
                        logging.INFO,
 | 
			
		||||
                    )
 | 
			
		||||
                    choice = 'X'
 | 
			
		||||
                    while choice.lower() not in 'yn':
 | 
			
		||||
                        choice = input('Save your login (y/N)?')
 | 
			
		||||
                        if not choice:
 | 
			
		||||
                            choice = 'n'
 | 
			
		||||
                    if choice.lower() == 'y':
 | 
			
		||||
                        self._store_pypirc(username, password)
 | 
			
		||||
 | 
			
		||||
        elif choice == '2':
 | 
			
		||||
            data = {':action': 'user'}
 | 
			
		||||
            data['name'] = data['password'] = data['email'] = ''
 | 
			
		||||
            data['confirm'] = None
 | 
			
		||||
            while not data['name']:
 | 
			
		||||
                data['name'] = input('Username: ')
 | 
			
		||||
            while data['password'] != data['confirm']:
 | 
			
		||||
                while not data['password']:
 | 
			
		||||
                    data['password'] = getpass.getpass('Password: ')
 | 
			
		||||
                while not data['confirm']:
 | 
			
		||||
                    data['confirm'] = getpass.getpass(' Confirm: ')
 | 
			
		||||
                if data['password'] != data['confirm']:
 | 
			
		||||
                    data['password'] = ''
 | 
			
		||||
                    data['confirm'] = None
 | 
			
		||||
                    print("Password and confirm don't match!")
 | 
			
		||||
            while not data['email']:
 | 
			
		||||
                data['email'] = input('   EMail: ')
 | 
			
		||||
            code, result = self.post_to_server(data)
 | 
			
		||||
            if code != 200:
 | 
			
		||||
                log.info('Server response (%s): %s', code, result)
 | 
			
		||||
            else:
 | 
			
		||||
                log.info('You will receive an email shortly.')
 | 
			
		||||
                log.info('Follow the instructions in it to ' 'complete registration.')
 | 
			
		||||
        elif choice == '3':
 | 
			
		||||
            data = {':action': 'password_reset'}
 | 
			
		||||
            data['email'] = ''
 | 
			
		||||
            while not data['email']:
 | 
			
		||||
                data['email'] = input('Your email address: ')
 | 
			
		||||
            code, result = self.post_to_server(data)
 | 
			
		||||
            log.info('Server response (%s): %s', code, result)
 | 
			
		||||
 | 
			
		||||
    def build_post_data(self, action):
 | 
			
		||||
        # figure the data to send - the metadata plus some additional
 | 
			
		||||
        # information used by the package server
 | 
			
		||||
        meta = self.distribution.metadata
 | 
			
		||||
        data = {
 | 
			
		||||
            ':action': action,
 | 
			
		||||
            'metadata_version': '1.0',
 | 
			
		||||
            'name': meta.get_name(),
 | 
			
		||||
            'version': meta.get_version(),
 | 
			
		||||
            'summary': meta.get_description(),
 | 
			
		||||
            'home_page': meta.get_url(),
 | 
			
		||||
            'author': meta.get_contact(),
 | 
			
		||||
            'author_email': meta.get_contact_email(),
 | 
			
		||||
            'license': meta.get_licence(),
 | 
			
		||||
            'description': meta.get_long_description(),
 | 
			
		||||
            'keywords': meta.get_keywords(),
 | 
			
		||||
            'platform': meta.get_platforms(),
 | 
			
		||||
            'classifiers': meta.get_classifiers(),
 | 
			
		||||
            'download_url': meta.get_download_url(),
 | 
			
		||||
            # PEP 314
 | 
			
		||||
            'provides': meta.get_provides(),
 | 
			
		||||
            'requires': meta.get_requires(),
 | 
			
		||||
            'obsoletes': meta.get_obsoletes(),
 | 
			
		||||
        }
 | 
			
		||||
        if data['provides'] or data['requires'] or data['obsoletes']:
 | 
			
		||||
            data['metadata_version'] = '1.1'
 | 
			
		||||
        return data
 | 
			
		||||
 | 
			
		||||
    def post_to_server(self, data, auth=None):  # noqa: C901
 | 
			
		||||
        '''Post a query to the server, and return a string response.'''
 | 
			
		||||
        if 'name' in data:
 | 
			
		||||
            self.announce(
 | 
			
		||||
                'Registering {} to {}'.format(data['name'], self.repository),
 | 
			
		||||
                logging.INFO,
 | 
			
		||||
            )
 | 
			
		||||
        # Build up the MIME payload for the urllib2 POST data
 | 
			
		||||
        boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
 | 
			
		||||
        sep_boundary = '\n--' + boundary
 | 
			
		||||
        end_boundary = sep_boundary + '--'
 | 
			
		||||
        body = io.StringIO()
 | 
			
		||||
        for key, value in data.items():
 | 
			
		||||
            # handle multiple entries for the same name
 | 
			
		||||
            if type(value) not in (type([]), type(())):
 | 
			
		||||
                value = [value]
 | 
			
		||||
            for value in value:
 | 
			
		||||
                value = str(value)
 | 
			
		||||
                body.write(sep_boundary)
 | 
			
		||||
                body.write('\nContent-Disposition: form-data; name="%s"' % key)
 | 
			
		||||
                body.write("\n\n")
 | 
			
		||||
                body.write(value)
 | 
			
		||||
                if value and value[-1] == '\r':
 | 
			
		||||
                    body.write('\n')  # write an extra newline (lurve Macs)
 | 
			
		||||
        body.write(end_boundary)
 | 
			
		||||
        body.write("\n")
 | 
			
		||||
        body = body.getvalue().encode("utf-8")
 | 
			
		||||
 | 
			
		||||
        # build the Request
 | 
			
		||||
        headers = {
 | 
			
		||||
            'Content-type': 'multipart/form-data; boundary=%s; charset=utf-8'
 | 
			
		||||
            % boundary,
 | 
			
		||||
            'Content-length': str(len(body)),
 | 
			
		||||
        }
 | 
			
		||||
        req = urllib.request.Request(self.repository, body, headers)
 | 
			
		||||
 | 
			
		||||
        # handle HTTP and include the Basic Auth handler
 | 
			
		||||
        opener = urllib.request.build_opener(
 | 
			
		||||
            urllib.request.HTTPBasicAuthHandler(password_mgr=auth)
 | 
			
		||||
        )
 | 
			
		||||
        data = ''
 | 
			
		||||
        try:
 | 
			
		||||
            result = opener.open(req)
 | 
			
		||||
        except urllib.error.HTTPError as e:
 | 
			
		||||
            if self.show_response:
 | 
			
		||||
                data = e.fp.read()
 | 
			
		||||
            result = e.code, e.msg
 | 
			
		||||
        except urllib.error.URLError as e:
 | 
			
		||||
            result = 500, str(e)
 | 
			
		||||
        else:
 | 
			
		||||
            if self.show_response:
 | 
			
		||||
                data = self._read_pypi_response(result)
 | 
			
		||||
            result = 200, 'OK'
 | 
			
		||||
        if self.show_response:
 | 
			
		||||
            msg = '\n'.join(('-' * 75, data, '-' * 75))
 | 
			
		||||
            self.announce(msg, logging.INFO)
 | 
			
		||||
        return result
 | 
			
		||||
@@ -0,0 +1,531 @@
 | 
			
		||||
"""distutils.command.sdist
 | 
			
		||||
 | 
			
		||||
Implements the Distutils 'sdist' command (create a source distribution)."""
 | 
			
		||||
 | 
			
		||||
import os
 | 
			
		||||
import sys
 | 
			
		||||
from glob import glob
 | 
			
		||||
from warnings import warn
 | 
			
		||||
 | 
			
		||||
from ..core import Command
 | 
			
		||||
from distutils import dir_util
 | 
			
		||||
from distutils import file_util
 | 
			
		||||
from distutils import archive_util
 | 
			
		||||
from ..text_file import TextFile
 | 
			
		||||
from ..filelist import FileList
 | 
			
		||||
from distutils._log import log
 | 
			
		||||
from ..util import convert_path
 | 
			
		||||
from ..errors import DistutilsOptionError, DistutilsTemplateError
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def show_formats():
 | 
			
		||||
    """Print all possible values for the 'formats' option (used by
 | 
			
		||||
    the "--help-formats" command-line option).
 | 
			
		||||
    """
 | 
			
		||||
    from ..fancy_getopt import FancyGetopt
 | 
			
		||||
    from ..archive_util import ARCHIVE_FORMATS
 | 
			
		||||
 | 
			
		||||
    formats = []
 | 
			
		||||
    for format in ARCHIVE_FORMATS.keys():
 | 
			
		||||
        formats.append(("formats=" + format, None, ARCHIVE_FORMATS[format][2]))
 | 
			
		||||
    formats.sort()
 | 
			
		||||
    FancyGetopt(formats).print_help("List of available source distribution formats:")
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class sdist(Command):
 | 
			
		||||
 | 
			
		||||
    description = "create a source distribution (tarball, zip file, etc.)"
 | 
			
		||||
 | 
			
		||||
    def checking_metadata(self):
 | 
			
		||||
        """Callable used for the check sub-command.
 | 
			
		||||
 | 
			
		||||
        Placed here so user_options can view it"""
 | 
			
		||||
        return self.metadata_check
 | 
			
		||||
 | 
			
		||||
    user_options = [
 | 
			
		||||
        ('template=', 't', "name of manifest template file [default: MANIFEST.in]"),
 | 
			
		||||
        ('manifest=', 'm', "name of manifest file [default: MANIFEST]"),
 | 
			
		||||
        (
 | 
			
		||||
            'use-defaults',
 | 
			
		||||
            None,
 | 
			
		||||
            "include the default file set in the manifest "
 | 
			
		||||
            "[default; disable with --no-defaults]",
 | 
			
		||||
        ),
 | 
			
		||||
        ('no-defaults', None, "don't include the default file set"),
 | 
			
		||||
        (
 | 
			
		||||
            'prune',
 | 
			
		||||
            None,
 | 
			
		||||
            "specifically exclude files/directories that should not be "
 | 
			
		||||
            "distributed (build tree, RCS/CVS dirs, etc.) "
 | 
			
		||||
            "[default; disable with --no-prune]",
 | 
			
		||||
        ),
 | 
			
		||||
        ('no-prune', None, "don't automatically exclude anything"),
 | 
			
		||||
        (
 | 
			
		||||
            'manifest-only',
 | 
			
		||||
            'o',
 | 
			
		||||
            "just regenerate the manifest and then stop " "(implies --force-manifest)",
 | 
			
		||||
        ),
 | 
			
		||||
        (
 | 
			
		||||
            'force-manifest',
 | 
			
		||||
            'f',
 | 
			
		||||
            "forcibly regenerate the manifest and carry on as usual. "
 | 
			
		||||
            "Deprecated: now the manifest is always regenerated.",
 | 
			
		||||
        ),
 | 
			
		||||
        ('formats=', None, "formats for source distribution (comma-separated list)"),
 | 
			
		||||
        (
 | 
			
		||||
            'keep-temp',
 | 
			
		||||
            'k',
 | 
			
		||||
            "keep the distribution tree around after creating " + "archive file(s)",
 | 
			
		||||
        ),
 | 
			
		||||
        (
 | 
			
		||||
            'dist-dir=',
 | 
			
		||||
            'd',
 | 
			
		||||
            "directory to put the source distribution archive(s) in " "[default: dist]",
 | 
			
		||||
        ),
 | 
			
		||||
        (
 | 
			
		||||
            'metadata-check',
 | 
			
		||||
            None,
 | 
			
		||||
            "Ensure that all required elements of meta-data "
 | 
			
		||||
            "are supplied. Warn if any missing. [default]",
 | 
			
		||||
        ),
 | 
			
		||||
        (
 | 
			
		||||
            'owner=',
 | 
			
		||||
            'u',
 | 
			
		||||
            "Owner name used when creating a tar file [default: current user]",
 | 
			
		||||
        ),
 | 
			
		||||
        (
 | 
			
		||||
            'group=',
 | 
			
		||||
            'g',
 | 
			
		||||
            "Group name used when creating a tar file [default: current group]",
 | 
			
		||||
        ),
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
    boolean_options = [
 | 
			
		||||
        'use-defaults',
 | 
			
		||||
        'prune',
 | 
			
		||||
        'manifest-only',
 | 
			
		||||
        'force-manifest',
 | 
			
		||||
        'keep-temp',
 | 
			
		||||
        'metadata-check',
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
    help_options = [
 | 
			
		||||
        ('help-formats', None, "list available distribution formats", show_formats),
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
    negative_opt = {'no-defaults': 'use-defaults', 'no-prune': 'prune'}
 | 
			
		||||
 | 
			
		||||
    sub_commands = [('check', checking_metadata)]
 | 
			
		||||
 | 
			
		||||
    READMES = ('README', 'README.txt', 'README.rst')
 | 
			
		||||
 | 
			
		||||
    def initialize_options(self):
 | 
			
		||||
        # 'template' and 'manifest' are, respectively, the names of
 | 
			
		||||
        # the manifest template and manifest file.
 | 
			
		||||
        self.template = None
 | 
			
		||||
        self.manifest = None
 | 
			
		||||
 | 
			
		||||
        # 'use_defaults': if true, we will include the default file set
 | 
			
		||||
        # in the manifest
 | 
			
		||||
        self.use_defaults = 1
 | 
			
		||||
        self.prune = 1
 | 
			
		||||
 | 
			
		||||
        self.manifest_only = 0
 | 
			
		||||
        self.force_manifest = 0
 | 
			
		||||
 | 
			
		||||
        self.formats = ['gztar']
 | 
			
		||||
        self.keep_temp = 0
 | 
			
		||||
        self.dist_dir = None
 | 
			
		||||
 | 
			
		||||
        self.archive_files = None
 | 
			
		||||
        self.metadata_check = 1
 | 
			
		||||
        self.owner = None
 | 
			
		||||
        self.group = None
 | 
			
		||||
 | 
			
		||||
    def finalize_options(self):
 | 
			
		||||
        if self.manifest is None:
 | 
			
		||||
            self.manifest = "MANIFEST"
 | 
			
		||||
        if self.template is None:
 | 
			
		||||
            self.template = "MANIFEST.in"
 | 
			
		||||
 | 
			
		||||
        self.ensure_string_list('formats')
 | 
			
		||||
 | 
			
		||||
        bad_format = archive_util.check_archive_formats(self.formats)
 | 
			
		||||
        if bad_format:
 | 
			
		||||
            raise DistutilsOptionError("unknown archive format '%s'" % bad_format)
 | 
			
		||||
 | 
			
		||||
        if self.dist_dir is None:
 | 
			
		||||
            self.dist_dir = "dist"
 | 
			
		||||
 | 
			
		||||
    def run(self):
 | 
			
		||||
        # 'filelist' contains the list of files that will make up the
 | 
			
		||||
        # manifest
 | 
			
		||||
        self.filelist = FileList()
 | 
			
		||||
 | 
			
		||||
        # Run sub commands
 | 
			
		||||
        for cmd_name in self.get_sub_commands():
 | 
			
		||||
            self.run_command(cmd_name)
 | 
			
		||||
 | 
			
		||||
        # Do whatever it takes to get the list of files to process
 | 
			
		||||
        # (process the manifest template, read an existing manifest,
 | 
			
		||||
        # whatever).  File list is accumulated in 'self.filelist'.
 | 
			
		||||
        self.get_file_list()
 | 
			
		||||
 | 
			
		||||
        # If user just wanted us to regenerate the manifest, stop now.
 | 
			
		||||
        if self.manifest_only:
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
        # Otherwise, go ahead and create the source distribution tarball,
 | 
			
		||||
        # or zipfile, or whatever.
 | 
			
		||||
        self.make_distribution()
 | 
			
		||||
 | 
			
		||||
    def check_metadata(self):
 | 
			
		||||
        """Deprecated API."""
 | 
			
		||||
        warn(
 | 
			
		||||
            "distutils.command.sdist.check_metadata is deprecated, \
 | 
			
		||||
              use the check command instead",
 | 
			
		||||
            PendingDeprecationWarning,
 | 
			
		||||
        )
 | 
			
		||||
        check = self.distribution.get_command_obj('check')
 | 
			
		||||
        check.ensure_finalized()
 | 
			
		||||
        check.run()
 | 
			
		||||
 | 
			
		||||
    def get_file_list(self):
 | 
			
		||||
        """Figure out the list of files to include in the source
 | 
			
		||||
        distribution, and put it in 'self.filelist'.  This might involve
 | 
			
		||||
        reading the manifest template (and writing the manifest), or just
 | 
			
		||||
        reading the manifest, or just using the default file set -- it all
 | 
			
		||||
        depends on the user's options.
 | 
			
		||||
        """
 | 
			
		||||
        # new behavior when using a template:
 | 
			
		||||
        # the file list is recalculated every time because
 | 
			
		||||
        # even if MANIFEST.in or setup.py are not changed
 | 
			
		||||
        # the user might have added some files in the tree that
 | 
			
		||||
        # need to be included.
 | 
			
		||||
        #
 | 
			
		||||
        #  This makes --force the default and only behavior with templates.
 | 
			
		||||
        template_exists = os.path.isfile(self.template)
 | 
			
		||||
        if not template_exists and self._manifest_is_not_generated():
 | 
			
		||||
            self.read_manifest()
 | 
			
		||||
            self.filelist.sort()
 | 
			
		||||
            self.filelist.remove_duplicates()
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
        if not template_exists:
 | 
			
		||||
            self.warn(
 | 
			
		||||
                ("manifest template '%s' does not exist " + "(using default file list)")
 | 
			
		||||
                % self.template
 | 
			
		||||
            )
 | 
			
		||||
        self.filelist.findall()
 | 
			
		||||
 | 
			
		||||
        if self.use_defaults:
 | 
			
		||||
            self.add_defaults()
 | 
			
		||||
 | 
			
		||||
        if template_exists:
 | 
			
		||||
            self.read_template()
 | 
			
		||||
 | 
			
		||||
        if self.prune:
 | 
			
		||||
            self.prune_file_list()
 | 
			
		||||
 | 
			
		||||
        self.filelist.sort()
 | 
			
		||||
        self.filelist.remove_duplicates()
 | 
			
		||||
        self.write_manifest()
 | 
			
		||||
 | 
			
		||||
    def add_defaults(self):
 | 
			
		||||
        """Add all the default files to self.filelist:
 | 
			
		||||
          - README or README.txt
 | 
			
		||||
          - setup.py
 | 
			
		||||
          - test/test*.py
 | 
			
		||||
          - all pure Python modules mentioned in setup script
 | 
			
		||||
          - all files pointed by package_data (build_py)
 | 
			
		||||
          - all files defined in data_files.
 | 
			
		||||
          - all files defined as scripts.
 | 
			
		||||
          - all C sources listed as part of extensions or C libraries
 | 
			
		||||
            in the setup script (doesn't catch C headers!)
 | 
			
		||||
        Warns if (README or README.txt) or setup.py are missing; everything
 | 
			
		||||
        else is optional.
 | 
			
		||||
        """
 | 
			
		||||
        self._add_defaults_standards()
 | 
			
		||||
        self._add_defaults_optional()
 | 
			
		||||
        self._add_defaults_python()
 | 
			
		||||
        self._add_defaults_data_files()
 | 
			
		||||
        self._add_defaults_ext()
 | 
			
		||||
        self._add_defaults_c_libs()
 | 
			
		||||
        self._add_defaults_scripts()
 | 
			
		||||
 | 
			
		||||
    @staticmethod
 | 
			
		||||
    def _cs_path_exists(fspath):
 | 
			
		||||
        """
 | 
			
		||||
        Case-sensitive path existence check
 | 
			
		||||
 | 
			
		||||
        >>> sdist._cs_path_exists(__file__)
 | 
			
		||||
        True
 | 
			
		||||
        >>> sdist._cs_path_exists(__file__.upper())
 | 
			
		||||
        False
 | 
			
		||||
        """
 | 
			
		||||
        if not os.path.exists(fspath):
 | 
			
		||||
            return False
 | 
			
		||||
        # make absolute so we always have a directory
 | 
			
		||||
        abspath = os.path.abspath(fspath)
 | 
			
		||||
        directory, filename = os.path.split(abspath)
 | 
			
		||||
        return filename in os.listdir(directory)
 | 
			
		||||
 | 
			
		||||
    def _add_defaults_standards(self):
 | 
			
		||||
        standards = [self.READMES, self.distribution.script_name]
 | 
			
		||||
        for fn in standards:
 | 
			
		||||
            if isinstance(fn, tuple):
 | 
			
		||||
                alts = fn
 | 
			
		||||
                got_it = False
 | 
			
		||||
                for fn in alts:
 | 
			
		||||
                    if self._cs_path_exists(fn):
 | 
			
		||||
                        got_it = True
 | 
			
		||||
                        self.filelist.append(fn)
 | 
			
		||||
                        break
 | 
			
		||||
 | 
			
		||||
                if not got_it:
 | 
			
		||||
                    self.warn(
 | 
			
		||||
                        "standard file not found: should have one of " + ', '.join(alts)
 | 
			
		||||
                    )
 | 
			
		||||
            else:
 | 
			
		||||
                if self._cs_path_exists(fn):
 | 
			
		||||
                    self.filelist.append(fn)
 | 
			
		||||
                else:
 | 
			
		||||
                    self.warn("standard file '%s' not found" % fn)
 | 
			
		||||
 | 
			
		||||
    def _add_defaults_optional(self):
 | 
			
		||||
        optional = ['test/test*.py', 'setup.cfg']
 | 
			
		||||
        for pattern in optional:
 | 
			
		||||
            files = filter(os.path.isfile, glob(pattern))
 | 
			
		||||
            self.filelist.extend(files)
 | 
			
		||||
 | 
			
		||||
    def _add_defaults_python(self):
 | 
			
		||||
        # build_py is used to get:
 | 
			
		||||
        #  - python modules
 | 
			
		||||
        #  - files defined in package_data
 | 
			
		||||
        build_py = self.get_finalized_command('build_py')
 | 
			
		||||
 | 
			
		||||
        # getting python files
 | 
			
		||||
        if self.distribution.has_pure_modules():
 | 
			
		||||
            self.filelist.extend(build_py.get_source_files())
 | 
			
		||||
 | 
			
		||||
        # getting package_data files
 | 
			
		||||
        # (computed in build_py.data_files by build_py.finalize_options)
 | 
			
		||||
        for pkg, src_dir, build_dir, filenames in build_py.data_files:
 | 
			
		||||
            for filename in filenames:
 | 
			
		||||
                self.filelist.append(os.path.join(src_dir, filename))
 | 
			
		||||
 | 
			
		||||
    def _add_defaults_data_files(self):
 | 
			
		||||
        # getting distribution.data_files
 | 
			
		||||
        if self.distribution.has_data_files():
 | 
			
		||||
            for item in self.distribution.data_files:
 | 
			
		||||
                if isinstance(item, str):
 | 
			
		||||
                    # plain file
 | 
			
		||||
                    item = convert_path(item)
 | 
			
		||||
                    if os.path.isfile(item):
 | 
			
		||||
                        self.filelist.append(item)
 | 
			
		||||
                else:
 | 
			
		||||
                    # a (dirname, filenames) tuple
 | 
			
		||||
                    dirname, filenames = item
 | 
			
		||||
                    for f in filenames:
 | 
			
		||||
                        f = convert_path(f)
 | 
			
		||||
                        if os.path.isfile(f):
 | 
			
		||||
                            self.filelist.append(f)
 | 
			
		||||
 | 
			
		||||
    def _add_defaults_ext(self):
 | 
			
		||||
        if self.distribution.has_ext_modules():
 | 
			
		||||
            build_ext = self.get_finalized_command('build_ext')
 | 
			
		||||
            self.filelist.extend(build_ext.get_source_files())
 | 
			
		||||
 | 
			
		||||
    def _add_defaults_c_libs(self):
 | 
			
		||||
        if self.distribution.has_c_libraries():
 | 
			
		||||
            build_clib = self.get_finalized_command('build_clib')
 | 
			
		||||
            self.filelist.extend(build_clib.get_source_files())
 | 
			
		||||
 | 
			
		||||
    def _add_defaults_scripts(self):
 | 
			
		||||
        if self.distribution.has_scripts():
 | 
			
		||||
            build_scripts = self.get_finalized_command('build_scripts')
 | 
			
		||||
            self.filelist.extend(build_scripts.get_source_files())
 | 
			
		||||
 | 
			
		||||
    def read_template(self):
 | 
			
		||||
        """Read and parse manifest template file named by self.template.
 | 
			
		||||
 | 
			
		||||
        (usually "MANIFEST.in") The parsing and processing is done by
 | 
			
		||||
        'self.filelist', which updates itself accordingly.
 | 
			
		||||
        """
 | 
			
		||||
        log.info("reading manifest template '%s'", self.template)
 | 
			
		||||
        template = TextFile(
 | 
			
		||||
            self.template,
 | 
			
		||||
            strip_comments=1,
 | 
			
		||||
            skip_blanks=1,
 | 
			
		||||
            join_lines=1,
 | 
			
		||||
            lstrip_ws=1,
 | 
			
		||||
            rstrip_ws=1,
 | 
			
		||||
            collapse_join=1,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        try:
 | 
			
		||||
            while True:
 | 
			
		||||
                line = template.readline()
 | 
			
		||||
                if line is None:  # end of file
 | 
			
		||||
                    break
 | 
			
		||||
 | 
			
		||||
                try:
 | 
			
		||||
                    self.filelist.process_template_line(line)
 | 
			
		||||
                # the call above can raise a DistutilsTemplateError for
 | 
			
		||||
                # malformed lines, or a ValueError from the lower-level
 | 
			
		||||
                # convert_path function
 | 
			
		||||
                except (DistutilsTemplateError, ValueError) as msg:
 | 
			
		||||
                    self.warn(
 | 
			
		||||
                        "%s, line %d: %s"
 | 
			
		||||
                        % (template.filename, template.current_line, msg)
 | 
			
		||||
                    )
 | 
			
		||||
        finally:
 | 
			
		||||
            template.close()
 | 
			
		||||
 | 
			
		||||
    def prune_file_list(self):
 | 
			
		||||
        """Prune off branches that might slip into the file list as created
 | 
			
		||||
        by 'read_template()', but really don't belong there:
 | 
			
		||||
          * the build tree (typically "build")
 | 
			
		||||
          * the release tree itself (only an issue if we ran "sdist"
 | 
			
		||||
            previously with --keep-temp, or it aborted)
 | 
			
		||||
          * any RCS, CVS, .svn, .hg, .git, .bzr, _darcs directories
 | 
			
		||||
        """
 | 
			
		||||
        build = self.get_finalized_command('build')
 | 
			
		||||
        base_dir = self.distribution.get_fullname()
 | 
			
		||||
 | 
			
		||||
        self.filelist.exclude_pattern(None, prefix=build.build_base)
 | 
			
		||||
        self.filelist.exclude_pattern(None, prefix=base_dir)
 | 
			
		||||
 | 
			
		||||
        if sys.platform == 'win32':
 | 
			
		||||
            seps = r'/|\\'
 | 
			
		||||
        else:
 | 
			
		||||
            seps = '/'
 | 
			
		||||
 | 
			
		||||
        vcs_dirs = ['RCS', 'CVS', r'\.svn', r'\.hg', r'\.git', r'\.bzr', '_darcs']
 | 
			
		||||
        vcs_ptrn = r'(^|{})({})({}).*'.format(seps, '|'.join(vcs_dirs), seps)
 | 
			
		||||
        self.filelist.exclude_pattern(vcs_ptrn, is_regex=1)
 | 
			
		||||
 | 
			
		||||
    def write_manifest(self):
 | 
			
		||||
        """Write the file list in 'self.filelist' (presumably as filled in
 | 
			
		||||
        by 'add_defaults()' and 'read_template()') to the manifest file
 | 
			
		||||
        named by 'self.manifest'.
 | 
			
		||||
        """
 | 
			
		||||
        if self._manifest_is_not_generated():
 | 
			
		||||
            log.info(
 | 
			
		||||
                "not writing to manually maintained "
 | 
			
		||||
                "manifest file '%s'" % self.manifest
 | 
			
		||||
            )
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
        content = self.filelist.files[:]
 | 
			
		||||
        content.insert(0, '# file GENERATED by distutils, do NOT edit')
 | 
			
		||||
        self.execute(
 | 
			
		||||
            file_util.write_file,
 | 
			
		||||
            (self.manifest, content),
 | 
			
		||||
            "writing manifest file '%s'" % self.manifest,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def _manifest_is_not_generated(self):
 | 
			
		||||
        # check for special comment used in 3.1.3 and higher
 | 
			
		||||
        if not os.path.isfile(self.manifest):
 | 
			
		||||
            return False
 | 
			
		||||
 | 
			
		||||
        fp = open(self.manifest)
 | 
			
		||||
        try:
 | 
			
		||||
            first_line = fp.readline()
 | 
			
		||||
        finally:
 | 
			
		||||
            fp.close()
 | 
			
		||||
        return first_line != '# file GENERATED by distutils, do NOT edit\n'
 | 
			
		||||
 | 
			
		||||
    def read_manifest(self):
 | 
			
		||||
        """Read the manifest file (named by 'self.manifest') and use it to
 | 
			
		||||
        fill in 'self.filelist', the list of files to include in the source
 | 
			
		||||
        distribution.
 | 
			
		||||
        """
 | 
			
		||||
        log.info("reading manifest file '%s'", self.manifest)
 | 
			
		||||
        with open(self.manifest) as manifest:
 | 
			
		||||
            for line in manifest:
 | 
			
		||||
                # ignore comments and blank lines
 | 
			
		||||
                line = line.strip()
 | 
			
		||||
                if line.startswith('#') or not line:
 | 
			
		||||
                    continue
 | 
			
		||||
                self.filelist.append(line)
 | 
			
		||||
 | 
			
		||||
    def make_release_tree(self, base_dir, files):
 | 
			
		||||
        """Create the directory tree that will become the source
 | 
			
		||||
        distribution archive.  All directories implied by the filenames in
 | 
			
		||||
        'files' are created under 'base_dir', and then we hard link or copy
 | 
			
		||||
        (if hard linking is unavailable) those files into place.
 | 
			
		||||
        Essentially, this duplicates the developer's source tree, but in a
 | 
			
		||||
        directory named after the distribution, containing only the files
 | 
			
		||||
        to be distributed.
 | 
			
		||||
        """
 | 
			
		||||
        # Create all the directories under 'base_dir' necessary to
 | 
			
		||||
        # put 'files' there; the 'mkpath()' is just so we don't die
 | 
			
		||||
        # if the manifest happens to be empty.
 | 
			
		||||
        self.mkpath(base_dir)
 | 
			
		||||
        dir_util.create_tree(base_dir, files, dry_run=self.dry_run)
 | 
			
		||||
 | 
			
		||||
        # And walk over the list of files, either making a hard link (if
 | 
			
		||||
        # os.link exists) to each one that doesn't already exist in its
 | 
			
		||||
        # corresponding location under 'base_dir', or copying each file
 | 
			
		||||
        # that's out-of-date in 'base_dir'.  (Usually, all files will be
 | 
			
		||||
        # out-of-date, because by default we blow away 'base_dir' when
 | 
			
		||||
        # we're done making the distribution archives.)
 | 
			
		||||
 | 
			
		||||
        if hasattr(os, 'link'):  # can make hard links on this system
 | 
			
		||||
            link = 'hard'
 | 
			
		||||
            msg = "making hard links in %s..." % base_dir
 | 
			
		||||
        else:  # nope, have to copy
 | 
			
		||||
            link = None
 | 
			
		||||
            msg = "copying files to %s..." % base_dir
 | 
			
		||||
 | 
			
		||||
        if not files:
 | 
			
		||||
            log.warning("no files to distribute -- empty manifest?")
 | 
			
		||||
        else:
 | 
			
		||||
            log.info(msg)
 | 
			
		||||
        for file in files:
 | 
			
		||||
            if not os.path.isfile(file):
 | 
			
		||||
                log.warning("'%s' not a regular file -- skipping", file)
 | 
			
		||||
            else:
 | 
			
		||||
                dest = os.path.join(base_dir, file)
 | 
			
		||||
                self.copy_file(file, dest, link=link)
 | 
			
		||||
 | 
			
		||||
        self.distribution.metadata.write_pkg_info(base_dir)
 | 
			
		||||
 | 
			
		||||
    def make_distribution(self):
 | 
			
		||||
        """Create the source distribution(s).  First, we create the release
 | 
			
		||||
        tree with 'make_release_tree()'; then, we create all required
 | 
			
		||||
        archive files (according to 'self.formats') from the release tree.
 | 
			
		||||
        Finally, we clean up by blowing away the release tree (unless
 | 
			
		||||
        'self.keep_temp' is true).  The list of archive files created is
 | 
			
		||||
        stored so it can be retrieved later by 'get_archive_files()'.
 | 
			
		||||
        """
 | 
			
		||||
        # Don't warn about missing meta-data here -- should be (and is!)
 | 
			
		||||
        # done elsewhere.
 | 
			
		||||
        base_dir = self.distribution.get_fullname()
 | 
			
		||||
        base_name = os.path.join(self.dist_dir, base_dir)
 | 
			
		||||
 | 
			
		||||
        self.make_release_tree(base_dir, self.filelist.files)
 | 
			
		||||
        archive_files = []  # remember names of files we create
 | 
			
		||||
        # tar archive must be created last to avoid overwrite and remove
 | 
			
		||||
        if 'tar' in self.formats:
 | 
			
		||||
            self.formats.append(self.formats.pop(self.formats.index('tar')))
 | 
			
		||||
 | 
			
		||||
        for fmt in self.formats:
 | 
			
		||||
            file = self.make_archive(
 | 
			
		||||
                base_name, fmt, base_dir=base_dir, owner=self.owner, group=self.group
 | 
			
		||||
            )
 | 
			
		||||
            archive_files.append(file)
 | 
			
		||||
            self.distribution.dist_files.append(('sdist', '', file))
 | 
			
		||||
 | 
			
		||||
        self.archive_files = archive_files
 | 
			
		||||
 | 
			
		||||
        if not self.keep_temp:
 | 
			
		||||
            dir_util.remove_tree(base_dir, dry_run=self.dry_run)
 | 
			
		||||
 | 
			
		||||
    def get_archive_files(self):
 | 
			
		||||
        """Return the list of archive files created when the command
 | 
			
		||||
        was run, or None if the command hasn't run yet.
 | 
			
		||||
        """
 | 
			
		||||
        return self.archive_files
 | 
			
		||||
@@ -0,0 +1,207 @@
 | 
			
		||||
"""
 | 
			
		||||
distutils.command.upload
 | 
			
		||||
 | 
			
		||||
Implements the Distutils 'upload' subcommand (upload package to a package
 | 
			
		||||
index).
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
import os
 | 
			
		||||
import io
 | 
			
		||||
import hashlib
 | 
			
		||||
import logging
 | 
			
		||||
from base64 import standard_b64encode
 | 
			
		||||
from urllib.request import urlopen, Request, HTTPError
 | 
			
		||||
from urllib.parse import urlparse
 | 
			
		||||
from ..errors import DistutilsError, DistutilsOptionError
 | 
			
		||||
from ..core import PyPIRCCommand
 | 
			
		||||
from ..spawn import spawn
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# PyPI Warehouse supports MD5, SHA256, and Blake2 (blake2-256)
 | 
			
		||||
# https://bugs.python.org/issue40698
 | 
			
		||||
_FILE_CONTENT_DIGESTS = {
 | 
			
		||||
    "md5_digest": getattr(hashlib, "md5", None),
 | 
			
		||||
    "sha256_digest": getattr(hashlib, "sha256", None),
 | 
			
		||||
    "blake2_256_digest": getattr(hashlib, "blake2b", None),
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class upload(PyPIRCCommand):
 | 
			
		||||
 | 
			
		||||
    description = "upload binary package to PyPI"
 | 
			
		||||
 | 
			
		||||
    user_options = PyPIRCCommand.user_options + [
 | 
			
		||||
        ('sign', 's', 'sign files to upload using gpg'),
 | 
			
		||||
        ('identity=', 'i', 'GPG identity used to sign files'),
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
    boolean_options = PyPIRCCommand.boolean_options + ['sign']
 | 
			
		||||
 | 
			
		||||
    def initialize_options(self):
 | 
			
		||||
        PyPIRCCommand.initialize_options(self)
 | 
			
		||||
        self.username = ''
 | 
			
		||||
        self.password = ''
 | 
			
		||||
        self.show_response = 0
 | 
			
		||||
        self.sign = False
 | 
			
		||||
        self.identity = None
 | 
			
		||||
 | 
			
		||||
    def finalize_options(self):
 | 
			
		||||
        PyPIRCCommand.finalize_options(self)
 | 
			
		||||
        if self.identity and not self.sign:
 | 
			
		||||
            raise DistutilsOptionError("Must use --sign for --identity to have meaning")
 | 
			
		||||
        config = self._read_pypirc()
 | 
			
		||||
        if config != {}:
 | 
			
		||||
            self.username = config['username']
 | 
			
		||||
            self.password = config['password']
 | 
			
		||||
            self.repository = config['repository']
 | 
			
		||||
            self.realm = config['realm']
 | 
			
		||||
 | 
			
		||||
        # getting the password from the distribution
 | 
			
		||||
        # if previously set by the register command
 | 
			
		||||
        if not self.password and self.distribution.password:
 | 
			
		||||
            self.password = self.distribution.password
 | 
			
		||||
 | 
			
		||||
    def run(self):
 | 
			
		||||
        if not self.distribution.dist_files:
 | 
			
		||||
            msg = (
 | 
			
		||||
                "Must create and upload files in one command "
 | 
			
		||||
                "(e.g. setup.py sdist upload)"
 | 
			
		||||
            )
 | 
			
		||||
            raise DistutilsOptionError(msg)
 | 
			
		||||
        for command, pyversion, filename in self.distribution.dist_files:
 | 
			
		||||
            self.upload_file(command, pyversion, filename)
 | 
			
		||||
 | 
			
		||||
    def upload_file(self, command, pyversion, filename):  # noqa: C901
 | 
			
		||||
        # Makes sure the repository URL is compliant
 | 
			
		||||
        schema, netloc, url, params, query, fragments = urlparse(self.repository)
 | 
			
		||||
        if params or query or fragments:
 | 
			
		||||
            raise AssertionError("Incompatible url %s" % self.repository)
 | 
			
		||||
 | 
			
		||||
        if schema not in ('http', 'https'):
 | 
			
		||||
            raise AssertionError("unsupported schema " + schema)
 | 
			
		||||
 | 
			
		||||
        # Sign if requested
 | 
			
		||||
        if self.sign:
 | 
			
		||||
            gpg_args = ["gpg", "--detach-sign", "-a", filename]
 | 
			
		||||
            if self.identity:
 | 
			
		||||
                gpg_args[2:2] = ["--local-user", self.identity]
 | 
			
		||||
            spawn(gpg_args, dry_run=self.dry_run)
 | 
			
		||||
 | 
			
		||||
        # Fill in the data - send all the meta-data in case we need to
 | 
			
		||||
        # register a new release
 | 
			
		||||
        f = open(filename, 'rb')
 | 
			
		||||
        try:
 | 
			
		||||
            content = f.read()
 | 
			
		||||
        finally:
 | 
			
		||||
            f.close()
 | 
			
		||||
 | 
			
		||||
        meta = self.distribution.metadata
 | 
			
		||||
        data = {
 | 
			
		||||
            # action
 | 
			
		||||
            ':action': 'file_upload',
 | 
			
		||||
            'protocol_version': '1',
 | 
			
		||||
            # identify release
 | 
			
		||||
            'name': meta.get_name(),
 | 
			
		||||
            'version': meta.get_version(),
 | 
			
		||||
            # file content
 | 
			
		||||
            'content': (os.path.basename(filename), content),
 | 
			
		||||
            'filetype': command,
 | 
			
		||||
            'pyversion': pyversion,
 | 
			
		||||
            # additional meta-data
 | 
			
		||||
            'metadata_version': '1.0',
 | 
			
		||||
            'summary': meta.get_description(),
 | 
			
		||||
            'home_page': meta.get_url(),
 | 
			
		||||
            'author': meta.get_contact(),
 | 
			
		||||
            'author_email': meta.get_contact_email(),
 | 
			
		||||
            'license': meta.get_licence(),
 | 
			
		||||
            'description': meta.get_long_description(),
 | 
			
		||||
            'keywords': meta.get_keywords(),
 | 
			
		||||
            'platform': meta.get_platforms(),
 | 
			
		||||
            'classifiers': meta.get_classifiers(),
 | 
			
		||||
            'download_url': meta.get_download_url(),
 | 
			
		||||
            # PEP 314
 | 
			
		||||
            'provides': meta.get_provides(),
 | 
			
		||||
            'requires': meta.get_requires(),
 | 
			
		||||
            'obsoletes': meta.get_obsoletes(),
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        data['comment'] = ''
 | 
			
		||||
 | 
			
		||||
        # file content digests
 | 
			
		||||
        for digest_name, digest_cons in _FILE_CONTENT_DIGESTS.items():
 | 
			
		||||
            if digest_cons is None:
 | 
			
		||||
                continue
 | 
			
		||||
            try:
 | 
			
		||||
                data[digest_name] = digest_cons(content).hexdigest()
 | 
			
		||||
            except ValueError:
 | 
			
		||||
                # hash digest not available or blocked by security policy
 | 
			
		||||
                pass
 | 
			
		||||
 | 
			
		||||
        if self.sign:
 | 
			
		||||
            with open(filename + ".asc", "rb") as f:
 | 
			
		||||
                data['gpg_signature'] = (os.path.basename(filename) + ".asc", f.read())
 | 
			
		||||
 | 
			
		||||
        # set up the authentication
 | 
			
		||||
        user_pass = (self.username + ":" + self.password).encode('ascii')
 | 
			
		||||
        # The exact encoding of the authentication string is debated.
 | 
			
		||||
        # Anyway PyPI only accepts ascii for both username or password.
 | 
			
		||||
        auth = "Basic " + standard_b64encode(user_pass).decode('ascii')
 | 
			
		||||
 | 
			
		||||
        # Build up the MIME payload for the POST data
 | 
			
		||||
        boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
 | 
			
		||||
        sep_boundary = b'\r\n--' + boundary.encode('ascii')
 | 
			
		||||
        end_boundary = sep_boundary + b'--\r\n'
 | 
			
		||||
        body = io.BytesIO()
 | 
			
		||||
        for key, value in data.items():
 | 
			
		||||
            title = '\r\nContent-Disposition: form-data; name="%s"' % key
 | 
			
		||||
            # handle multiple entries for the same name
 | 
			
		||||
            if not isinstance(value, list):
 | 
			
		||||
                value = [value]
 | 
			
		||||
            for value in value:
 | 
			
		||||
                if type(value) is tuple:
 | 
			
		||||
                    title += '; filename="%s"' % value[0]
 | 
			
		||||
                    value = value[1]
 | 
			
		||||
                else:
 | 
			
		||||
                    value = str(value).encode('utf-8')
 | 
			
		||||
                body.write(sep_boundary)
 | 
			
		||||
                body.write(title.encode('utf-8'))
 | 
			
		||||
                body.write(b"\r\n\r\n")
 | 
			
		||||
                body.write(value)
 | 
			
		||||
        body.write(end_boundary)
 | 
			
		||||
        body = body.getvalue()
 | 
			
		||||
 | 
			
		||||
        msg = "Submitting {} to {}".format(filename, self.repository)
 | 
			
		||||
        self.announce(msg, logging.INFO)
 | 
			
		||||
 | 
			
		||||
        # build the Request
 | 
			
		||||
        headers = {
 | 
			
		||||
            'Content-type': 'multipart/form-data; boundary=%s' % boundary,
 | 
			
		||||
            'Content-length': str(len(body)),
 | 
			
		||||
            'Authorization': auth,
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        request = Request(self.repository, data=body, headers=headers)
 | 
			
		||||
        # send the data
 | 
			
		||||
        try:
 | 
			
		||||
            result = urlopen(request)
 | 
			
		||||
            status = result.getcode()
 | 
			
		||||
            reason = result.msg
 | 
			
		||||
        except HTTPError as e:
 | 
			
		||||
            status = e.code
 | 
			
		||||
            reason = e.msg
 | 
			
		||||
        except OSError as e:
 | 
			
		||||
            self.announce(str(e), logging.ERROR)
 | 
			
		||||
            raise
 | 
			
		||||
 | 
			
		||||
        if status == 200:
 | 
			
		||||
            self.announce(
 | 
			
		||||
                'Server response ({}): {}'.format(status, reason), logging.INFO
 | 
			
		||||
            )
 | 
			
		||||
            if self.show_response:
 | 
			
		||||
                text = self._read_pypi_response(result)
 | 
			
		||||
                msg = '\n'.join(('-' * 75, text, '-' * 75))
 | 
			
		||||
                self.announce(msg, logging.INFO)
 | 
			
		||||
        else:
 | 
			
		||||
            msg = 'Upload failed ({}): {}'.format(status, reason)
 | 
			
		||||
            self.announce(msg, logging.ERROR)
 | 
			
		||||
            raise DistutilsError(msg)
 | 
			
		||||
@@ -0,0 +1,139 @@
 | 
			
		||||
"""distutils.pypirc
 | 
			
		||||
 | 
			
		||||
Provides the PyPIRCCommand class, the base class for the command classes
 | 
			
		||||
that uses .pypirc in the distutils.command package.
 | 
			
		||||
"""
 | 
			
		||||
import os
 | 
			
		||||
from configparser import RawConfigParser
 | 
			
		||||
 | 
			
		||||
from .cmd import Command
 | 
			
		||||
 | 
			
		||||
DEFAULT_PYPIRC = """\
 | 
			
		||||
[distutils]
 | 
			
		||||
index-servers =
 | 
			
		||||
    pypi
 | 
			
		||||
 | 
			
		||||
[pypi]
 | 
			
		||||
username:%s
 | 
			
		||||
password:%s
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class PyPIRCCommand(Command):
 | 
			
		||||
    """Base command that knows how to handle the .pypirc file"""
 | 
			
		||||
 | 
			
		||||
    DEFAULT_REPOSITORY = 'https://upload.pypi.org/legacy/'
 | 
			
		||||
    DEFAULT_REALM = 'pypi'
 | 
			
		||||
    repository = None
 | 
			
		||||
    realm = None
 | 
			
		||||
 | 
			
		||||
    user_options = [
 | 
			
		||||
        ('repository=', 'r', "url of repository [default: %s]" % DEFAULT_REPOSITORY),
 | 
			
		||||
        ('show-response', None, 'display full response text from server'),
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
    boolean_options = ['show-response']
 | 
			
		||||
 | 
			
		||||
    def _get_rc_file(self):
 | 
			
		||||
        """Returns rc file path."""
 | 
			
		||||
        return os.path.join(os.path.expanduser('~'), '.pypirc')
 | 
			
		||||
 | 
			
		||||
    def _store_pypirc(self, username, password):
 | 
			
		||||
        """Creates a default .pypirc file."""
 | 
			
		||||
        rc = self._get_rc_file()
 | 
			
		||||
        with os.fdopen(os.open(rc, os.O_CREAT | os.O_WRONLY, 0o600), 'w') as f:
 | 
			
		||||
            f.write(DEFAULT_PYPIRC % (username, password))
 | 
			
		||||
 | 
			
		||||
    def _read_pypirc(self):  # noqa: C901
 | 
			
		||||
        """Reads the .pypirc file."""
 | 
			
		||||
        rc = self._get_rc_file()
 | 
			
		||||
        if os.path.exists(rc):
 | 
			
		||||
            self.announce('Using PyPI login from %s' % rc)
 | 
			
		||||
            repository = self.repository or self.DEFAULT_REPOSITORY
 | 
			
		||||
 | 
			
		||||
            config = RawConfigParser()
 | 
			
		||||
            config.read(rc)
 | 
			
		||||
            sections = config.sections()
 | 
			
		||||
            if 'distutils' in sections:
 | 
			
		||||
                # let's get the list of servers
 | 
			
		||||
                index_servers = config.get('distutils', 'index-servers')
 | 
			
		||||
                _servers = [
 | 
			
		||||
                    server.strip()
 | 
			
		||||
                    for server in index_servers.split('\n')
 | 
			
		||||
                    if server.strip() != ''
 | 
			
		||||
                ]
 | 
			
		||||
                if _servers == []:
 | 
			
		||||
                    # nothing set, let's try to get the default pypi
 | 
			
		||||
                    if 'pypi' in sections:
 | 
			
		||||
                        _servers = ['pypi']
 | 
			
		||||
                    else:
 | 
			
		||||
                        # the file is not properly defined, returning
 | 
			
		||||
                        # an empty dict
 | 
			
		||||
                        return {}
 | 
			
		||||
                for server in _servers:
 | 
			
		||||
                    current = {'server': server}
 | 
			
		||||
                    current['username'] = config.get(server, 'username')
 | 
			
		||||
 | 
			
		||||
                    # optional params
 | 
			
		||||
                    for key, default in (
 | 
			
		||||
                        ('repository', self.DEFAULT_REPOSITORY),
 | 
			
		||||
                        ('realm', self.DEFAULT_REALM),
 | 
			
		||||
                        ('password', None),
 | 
			
		||||
                    ):
 | 
			
		||||
                        if config.has_option(server, key):
 | 
			
		||||
                            current[key] = config.get(server, key)
 | 
			
		||||
                        else:
 | 
			
		||||
                            current[key] = default
 | 
			
		||||
 | 
			
		||||
                    # work around people having "repository" for the "pypi"
 | 
			
		||||
                    # section of their config set to the HTTP (rather than
 | 
			
		||||
                    # HTTPS) URL
 | 
			
		||||
                    if server == 'pypi' and repository in (
 | 
			
		||||
                        self.DEFAULT_REPOSITORY,
 | 
			
		||||
                        'pypi',
 | 
			
		||||
                    ):
 | 
			
		||||
                        current['repository'] = self.DEFAULT_REPOSITORY
 | 
			
		||||
                        return current
 | 
			
		||||
 | 
			
		||||
                    if (
 | 
			
		||||
                        current['server'] == repository
 | 
			
		||||
                        or current['repository'] == repository
 | 
			
		||||
                    ):
 | 
			
		||||
                        return current
 | 
			
		||||
            elif 'server-login' in sections:
 | 
			
		||||
                # old format
 | 
			
		||||
                server = 'server-login'
 | 
			
		||||
                if config.has_option(server, 'repository'):
 | 
			
		||||
                    repository = config.get(server, 'repository')
 | 
			
		||||
                else:
 | 
			
		||||
                    repository = self.DEFAULT_REPOSITORY
 | 
			
		||||
                return {
 | 
			
		||||
                    'username': config.get(server, 'username'),
 | 
			
		||||
                    'password': config.get(server, 'password'),
 | 
			
		||||
                    'repository': repository,
 | 
			
		||||
                    'server': server,
 | 
			
		||||
                    'realm': self.DEFAULT_REALM,
 | 
			
		||||
                }
 | 
			
		||||
 | 
			
		||||
        return {}
 | 
			
		||||
 | 
			
		||||
    def _read_pypi_response(self, response):
 | 
			
		||||
        """Read and decode a PyPI HTTP response."""
 | 
			
		||||
        import cgi
 | 
			
		||||
 | 
			
		||||
        content_type = response.getheader('content-type', 'text/plain')
 | 
			
		||||
        encoding = cgi.parse_header(content_type)[1].get('charset', 'ascii')
 | 
			
		||||
        return response.read().decode(encoding)
 | 
			
		||||
 | 
			
		||||
    def initialize_options(self):
 | 
			
		||||
        """Initialize options."""
 | 
			
		||||
        self.repository = None
 | 
			
		||||
        self.realm = None
 | 
			
		||||
        self.show_response = 0
 | 
			
		||||
 | 
			
		||||
    def finalize_options(self):
 | 
			
		||||
        """Finalizes options."""
 | 
			
		||||
        if self.repository is None:
 | 
			
		||||
            self.repository = self.DEFAULT_REPOSITORY
 | 
			
		||||
        if self.realm is None:
 | 
			
		||||
            self.realm = self.DEFAULT_REALM
 | 
			
		||||
@@ -0,0 +1,291 @@
 | 
			
		||||
"""distutils.core
 | 
			
		||||
 | 
			
		||||
The only module that needs to be imported to use the Distutils; provides
 | 
			
		||||
the 'setup' function (which is to be called from the setup script).  Also
 | 
			
		||||
indirectly provides the Distribution and Command classes, although they are
 | 
			
		||||
really defined in distutils.dist and distutils.cmd.
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
import os
 | 
			
		||||
import sys
 | 
			
		||||
import tokenize
 | 
			
		||||
 | 
			
		||||
from .debug import DEBUG
 | 
			
		||||
from .errors import (
 | 
			
		||||
    DistutilsSetupError,
 | 
			
		||||
    DistutilsError,
 | 
			
		||||
    CCompilerError,
 | 
			
		||||
    DistutilsArgError,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
# Mainly import these so setup scripts can "from distutils.core import" them.
 | 
			
		||||
from .dist import Distribution
 | 
			
		||||
from .cmd import Command
 | 
			
		||||
from .config import PyPIRCCommand
 | 
			
		||||
from .extension import Extension
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
__all__ = ['Distribution', 'Command', 'PyPIRCCommand', 'Extension', 'setup']
 | 
			
		||||
 | 
			
		||||
# This is a barebones help message generated displayed when the user
 | 
			
		||||
# runs the setup script with no arguments at all.  More useful help
 | 
			
		||||
# is generated with various --help options: global help, list commands,
 | 
			
		||||
# and per-command help.
 | 
			
		||||
USAGE = """\
 | 
			
		||||
usage: %(script)s [global_opts] cmd1 [cmd1_opts] [cmd2 [cmd2_opts] ...]
 | 
			
		||||
   or: %(script)s --help [cmd1 cmd2 ...]
 | 
			
		||||
   or: %(script)s --help-commands
 | 
			
		||||
   or: %(script)s cmd --help
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def gen_usage(script_name):
 | 
			
		||||
    script = os.path.basename(script_name)
 | 
			
		||||
    return USAGE % locals()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Some mild magic to control the behaviour of 'setup()' from 'run_setup()'.
 | 
			
		||||
_setup_stop_after = None
 | 
			
		||||
_setup_distribution = None
 | 
			
		||||
 | 
			
		||||
# Legal keyword arguments for the setup() function
 | 
			
		||||
setup_keywords = (
 | 
			
		||||
    'distclass',
 | 
			
		||||
    'script_name',
 | 
			
		||||
    'script_args',
 | 
			
		||||
    'options',
 | 
			
		||||
    'name',
 | 
			
		||||
    'version',
 | 
			
		||||
    'author',
 | 
			
		||||
    'author_email',
 | 
			
		||||
    'maintainer',
 | 
			
		||||
    'maintainer_email',
 | 
			
		||||
    'url',
 | 
			
		||||
    'license',
 | 
			
		||||
    'description',
 | 
			
		||||
    'long_description',
 | 
			
		||||
    'keywords',
 | 
			
		||||
    'platforms',
 | 
			
		||||
    'classifiers',
 | 
			
		||||
    'download_url',
 | 
			
		||||
    'requires',
 | 
			
		||||
    'provides',
 | 
			
		||||
    'obsoletes',
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
# Legal keyword arguments for the Extension constructor
 | 
			
		||||
extension_keywords = (
 | 
			
		||||
    'name',
 | 
			
		||||
    'sources',
 | 
			
		||||
    'include_dirs',
 | 
			
		||||
    'define_macros',
 | 
			
		||||
    'undef_macros',
 | 
			
		||||
    'library_dirs',
 | 
			
		||||
    'libraries',
 | 
			
		||||
    'runtime_library_dirs',
 | 
			
		||||
    'extra_objects',
 | 
			
		||||
    'extra_compile_args',
 | 
			
		||||
    'extra_link_args',
 | 
			
		||||
    'swig_opts',
 | 
			
		||||
    'export_symbols',
 | 
			
		||||
    'depends',
 | 
			
		||||
    'language',
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def setup(**attrs):  # noqa: C901
 | 
			
		||||
    """The gateway to the Distutils: do everything your setup script needs
 | 
			
		||||
    to do, in a highly flexible and user-driven way.  Briefly: create a
 | 
			
		||||
    Distribution instance; find and parse config files; parse the command
 | 
			
		||||
    line; run each Distutils command found there, customized by the options
 | 
			
		||||
    supplied to 'setup()' (as keyword arguments), in config files, and on
 | 
			
		||||
    the command line.
 | 
			
		||||
 | 
			
		||||
    The Distribution instance might be an instance of a class supplied via
 | 
			
		||||
    the 'distclass' keyword argument to 'setup'; if no such class is
 | 
			
		||||
    supplied, then the Distribution class (in dist.py) is instantiated.
 | 
			
		||||
    All other arguments to 'setup' (except for 'cmdclass') are used to set
 | 
			
		||||
    attributes of the Distribution instance.
 | 
			
		||||
 | 
			
		||||
    The 'cmdclass' argument, if supplied, is a dictionary mapping command
 | 
			
		||||
    names to command classes.  Each command encountered on the command line
 | 
			
		||||
    will be turned into a command class, which is in turn instantiated; any
 | 
			
		||||
    class found in 'cmdclass' is used in place of the default, which is
 | 
			
		||||
    (for command 'foo_bar') class 'foo_bar' in module
 | 
			
		||||
    'distutils.command.foo_bar'.  The command class must provide a
 | 
			
		||||
    'user_options' attribute which is a list of option specifiers for
 | 
			
		||||
    'distutils.fancy_getopt'.  Any command-line options between the current
 | 
			
		||||
    and the next command are used to set attributes of the current command
 | 
			
		||||
    object.
 | 
			
		||||
 | 
			
		||||
    When the entire command-line has been successfully parsed, calls the
 | 
			
		||||
    'run()' method on each command object in turn.  This method will be
 | 
			
		||||
    driven entirely by the Distribution object (which each command object
 | 
			
		||||
    has a reference to, thanks to its constructor), and the
 | 
			
		||||
    command-specific options that became attributes of each command
 | 
			
		||||
    object.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    global _setup_stop_after, _setup_distribution
 | 
			
		||||
 | 
			
		||||
    # Determine the distribution class -- either caller-supplied or
 | 
			
		||||
    # our Distribution (see below).
 | 
			
		||||
    klass = attrs.get('distclass')
 | 
			
		||||
    if klass:
 | 
			
		||||
        del attrs['distclass']
 | 
			
		||||
    else:
 | 
			
		||||
        klass = Distribution
 | 
			
		||||
 | 
			
		||||
    if 'script_name' not in attrs:
 | 
			
		||||
        attrs['script_name'] = os.path.basename(sys.argv[0])
 | 
			
		||||
    if 'script_args' not in attrs:
 | 
			
		||||
        attrs['script_args'] = sys.argv[1:]
 | 
			
		||||
 | 
			
		||||
    # Create the Distribution instance, using the remaining arguments
 | 
			
		||||
    # (ie. everything except distclass) to initialize it
 | 
			
		||||
    try:
 | 
			
		||||
        _setup_distribution = dist = klass(attrs)
 | 
			
		||||
    except DistutilsSetupError as msg:
 | 
			
		||||
        if 'name' not in attrs:
 | 
			
		||||
            raise SystemExit("error in setup command: %s" % msg)
 | 
			
		||||
        else:
 | 
			
		||||
            raise SystemExit("error in {} setup command: {}".format(attrs['name'], msg))
 | 
			
		||||
 | 
			
		||||
    if _setup_stop_after == "init":
 | 
			
		||||
        return dist
 | 
			
		||||
 | 
			
		||||
    # Find and parse the config file(s): they will override options from
 | 
			
		||||
    # the setup script, but be overridden by the command line.
 | 
			
		||||
    dist.parse_config_files()
 | 
			
		||||
 | 
			
		||||
    if DEBUG:
 | 
			
		||||
        print("options (after parsing config files):")
 | 
			
		||||
        dist.dump_option_dicts()
 | 
			
		||||
 | 
			
		||||
    if _setup_stop_after == "config":
 | 
			
		||||
        return dist
 | 
			
		||||
 | 
			
		||||
    # Parse the command line and override config files; any
 | 
			
		||||
    # command-line errors are the end user's fault, so turn them into
 | 
			
		||||
    # SystemExit to suppress tracebacks.
 | 
			
		||||
    try:
 | 
			
		||||
        ok = dist.parse_command_line()
 | 
			
		||||
    except DistutilsArgError as msg:
 | 
			
		||||
        raise SystemExit(gen_usage(dist.script_name) + "\nerror: %s" % msg)
 | 
			
		||||
 | 
			
		||||
    if DEBUG:
 | 
			
		||||
        print("options (after parsing command line):")
 | 
			
		||||
        dist.dump_option_dicts()
 | 
			
		||||
 | 
			
		||||
    if _setup_stop_after == "commandline":
 | 
			
		||||
        return dist
 | 
			
		||||
 | 
			
		||||
    # And finally, run all the commands found on the command line.
 | 
			
		||||
    if ok:
 | 
			
		||||
        return run_commands(dist)
 | 
			
		||||
 | 
			
		||||
    return dist
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# setup ()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def run_commands(dist):
 | 
			
		||||
    """Given a Distribution object run all the commands,
 | 
			
		||||
    raising ``SystemExit`` errors in the case of failure.
 | 
			
		||||
 | 
			
		||||
    This function assumes that either ``sys.argv`` or ``dist.script_args``
 | 
			
		||||
    is already set accordingly.
 | 
			
		||||
    """
 | 
			
		||||
    try:
 | 
			
		||||
        dist.run_commands()
 | 
			
		||||
    except KeyboardInterrupt:
 | 
			
		||||
        raise SystemExit("interrupted")
 | 
			
		||||
    except OSError as exc:
 | 
			
		||||
        if DEBUG:
 | 
			
		||||
            sys.stderr.write("error: {}\n".format(exc))
 | 
			
		||||
            raise
 | 
			
		||||
        else:
 | 
			
		||||
            raise SystemExit("error: {}".format(exc))
 | 
			
		||||
 | 
			
		||||
    except (DistutilsError, CCompilerError) as msg:
 | 
			
		||||
        if DEBUG:
 | 
			
		||||
            raise
 | 
			
		||||
        else:
 | 
			
		||||
            raise SystemExit("error: " + str(msg))
 | 
			
		||||
 | 
			
		||||
    return dist
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def run_setup(script_name, script_args=None, stop_after="run"):
 | 
			
		||||
    """Run a setup script in a somewhat controlled environment, and
 | 
			
		||||
    return the Distribution instance that drives things.  This is useful
 | 
			
		||||
    if you need to find out the distribution meta-data (passed as
 | 
			
		||||
    keyword args from 'script' to 'setup()', or the contents of the
 | 
			
		||||
    config files or command-line.
 | 
			
		||||
 | 
			
		||||
    'script_name' is a file that will be read and run with 'exec()';
 | 
			
		||||
    'sys.argv[0]' will be replaced with 'script' for the duration of the
 | 
			
		||||
    call.  'script_args' is a list of strings; if supplied,
 | 
			
		||||
    'sys.argv[1:]' will be replaced by 'script_args' for the duration of
 | 
			
		||||
    the call.
 | 
			
		||||
 | 
			
		||||
    'stop_after' tells 'setup()' when to stop processing; possible
 | 
			
		||||
    values:
 | 
			
		||||
      init
 | 
			
		||||
        stop after the Distribution instance has been created and
 | 
			
		||||
        populated with the keyword arguments to 'setup()'
 | 
			
		||||
      config
 | 
			
		||||
        stop after config files have been parsed (and their data
 | 
			
		||||
        stored in the Distribution instance)
 | 
			
		||||
      commandline
 | 
			
		||||
        stop after the command-line ('sys.argv[1:]' or 'script_args')
 | 
			
		||||
        have been parsed (and the data stored in the Distribution)
 | 
			
		||||
      run [default]
 | 
			
		||||
        stop after all commands have been run (the same as if 'setup()'
 | 
			
		||||
        had been called in the usual way
 | 
			
		||||
 | 
			
		||||
    Returns the Distribution instance, which provides all information
 | 
			
		||||
    used to drive the Distutils.
 | 
			
		||||
    """
 | 
			
		||||
    if stop_after not in ('init', 'config', 'commandline', 'run'):
 | 
			
		||||
        raise ValueError("invalid value for 'stop_after': {!r}".format(stop_after))
 | 
			
		||||
 | 
			
		||||
    global _setup_stop_after, _setup_distribution
 | 
			
		||||
    _setup_stop_after = stop_after
 | 
			
		||||
 | 
			
		||||
    save_argv = sys.argv.copy()
 | 
			
		||||
    g = {'__file__': script_name, '__name__': '__main__'}
 | 
			
		||||
    try:
 | 
			
		||||
        try:
 | 
			
		||||
            sys.argv[0] = script_name
 | 
			
		||||
            if script_args is not None:
 | 
			
		||||
                sys.argv[1:] = script_args
 | 
			
		||||
            # tokenize.open supports automatic encoding detection
 | 
			
		||||
            with tokenize.open(script_name) as f:
 | 
			
		||||
                code = f.read().replace(r'\r\n', r'\n')
 | 
			
		||||
                exec(code, g)
 | 
			
		||||
        finally:
 | 
			
		||||
            sys.argv = save_argv
 | 
			
		||||
            _setup_stop_after = None
 | 
			
		||||
    except SystemExit:
 | 
			
		||||
        # Hmm, should we do something if exiting with a non-zero code
 | 
			
		||||
        # (ie. error)?
 | 
			
		||||
        pass
 | 
			
		||||
 | 
			
		||||
    if _setup_distribution is None:
 | 
			
		||||
        raise RuntimeError(
 | 
			
		||||
            (
 | 
			
		||||
                "'distutils.core.setup()' was never called -- "
 | 
			
		||||
                "perhaps '%s' is not a Distutils setup script?"
 | 
			
		||||
            )
 | 
			
		||||
            % script_name
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    # I wonder if the setup script's namespace -- g and l -- would be of
 | 
			
		||||
    # any interest to callers?
 | 
			
		||||
    # print "_setup_distribution:", _setup_distribution
 | 
			
		||||
    return _setup_distribution
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# run_setup ()
 | 
			
		||||
@@ -0,0 +1,358 @@
 | 
			
		||||
"""distutils.cygwinccompiler
 | 
			
		||||
 | 
			
		||||
Provides the CygwinCCompiler class, a subclass of UnixCCompiler that
 | 
			
		||||
handles the Cygwin port of the GNU C compiler to Windows.  It also contains
 | 
			
		||||
the Mingw32CCompiler class which handles the mingw32 port of GCC (same as
 | 
			
		||||
cygwin in no-cygwin mode).
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
import os
 | 
			
		||||
import re
 | 
			
		||||
import sys
 | 
			
		||||
import copy
 | 
			
		||||
import shlex
 | 
			
		||||
import warnings
 | 
			
		||||
from subprocess import check_output
 | 
			
		||||
 | 
			
		||||
from .unixccompiler import UnixCCompiler
 | 
			
		||||
from .file_util import write_file
 | 
			
		||||
from .errors import (
 | 
			
		||||
    DistutilsExecError,
 | 
			
		||||
    DistutilsPlatformError,
 | 
			
		||||
    CCompilerError,
 | 
			
		||||
    CompileError,
 | 
			
		||||
)
 | 
			
		||||
from .version import LooseVersion, suppress_known_deprecation
 | 
			
		||||
from ._collections import RangeMap
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
_msvcr_lookup = RangeMap.left(
 | 
			
		||||
    {
 | 
			
		||||
        # MSVC 7.0
 | 
			
		||||
        1300: ['msvcr70'],
 | 
			
		||||
        # MSVC 7.1
 | 
			
		||||
        1310: ['msvcr71'],
 | 
			
		||||
        # VS2005 / MSVC 8.0
 | 
			
		||||
        1400: ['msvcr80'],
 | 
			
		||||
        # VS2008 / MSVC 9.0
 | 
			
		||||
        1500: ['msvcr90'],
 | 
			
		||||
        # VS2010 / MSVC 10.0
 | 
			
		||||
        1600: ['msvcr100'],
 | 
			
		||||
        # VS2012 / MSVC 11.0
 | 
			
		||||
        1700: ['msvcr110'],
 | 
			
		||||
        # VS2013 / MSVC 12.0
 | 
			
		||||
        1800: ['msvcr120'],
 | 
			
		||||
        # VS2015 / MSVC 14.0
 | 
			
		||||
        1900: ['ucrt', 'vcruntime140'],
 | 
			
		||||
        2000: RangeMap.undefined_value,
 | 
			
		||||
    },
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_msvcr():
 | 
			
		||||
    """Include the appropriate MSVC runtime library if Python was built
 | 
			
		||||
    with MSVC 7.0 or later.
 | 
			
		||||
    """
 | 
			
		||||
    match = re.search(r'MSC v\.(\d{4})', sys.version)
 | 
			
		||||
    try:
 | 
			
		||||
        msc_ver = int(match.group(1))
 | 
			
		||||
    except AttributeError:
 | 
			
		||||
        return
 | 
			
		||||
    try:
 | 
			
		||||
        return _msvcr_lookup[msc_ver]
 | 
			
		||||
    except KeyError:
 | 
			
		||||
        raise ValueError("Unknown MS Compiler version %s " % msc_ver)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
_runtime_library_dirs_msg = (
 | 
			
		||||
    "Unable to set runtime library search path on Windows, "
 | 
			
		||||
    "usually indicated by `runtime_library_dirs` parameter to Extension"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class CygwinCCompiler(UnixCCompiler):
 | 
			
		||||
    """Handles the Cygwin port of the GNU C compiler to Windows."""
 | 
			
		||||
 | 
			
		||||
    compiler_type = 'cygwin'
 | 
			
		||||
    obj_extension = ".o"
 | 
			
		||||
    static_lib_extension = ".a"
 | 
			
		||||
    shared_lib_extension = ".dll.a"
 | 
			
		||||
    dylib_lib_extension = ".dll"
 | 
			
		||||
    static_lib_format = "lib%s%s"
 | 
			
		||||
    shared_lib_format = "lib%s%s"
 | 
			
		||||
    dylib_lib_format = "cyg%s%s"
 | 
			
		||||
    exe_extension = ".exe"
 | 
			
		||||
 | 
			
		||||
    def __init__(self, verbose=0, dry_run=0, force=0):
 | 
			
		||||
 | 
			
		||||
        super().__init__(verbose, dry_run, force)
 | 
			
		||||
 | 
			
		||||
        status, details = check_config_h()
 | 
			
		||||
        self.debug_print(
 | 
			
		||||
            "Python's GCC status: {} (details: {})".format(status, details)
 | 
			
		||||
        )
 | 
			
		||||
        if status is not CONFIG_H_OK:
 | 
			
		||||
            self.warn(
 | 
			
		||||
                "Python's pyconfig.h doesn't seem to support your compiler. "
 | 
			
		||||
                "Reason: %s. "
 | 
			
		||||
                "Compiling may fail because of undefined preprocessor macros." % details
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        self.cc = os.environ.get('CC', 'gcc')
 | 
			
		||||
        self.cxx = os.environ.get('CXX', 'g++')
 | 
			
		||||
 | 
			
		||||
        self.linker_dll = self.cc
 | 
			
		||||
        shared_option = "-shared"
 | 
			
		||||
 | 
			
		||||
        self.set_executables(
 | 
			
		||||
            compiler='%s -mcygwin -O -Wall' % self.cc,
 | 
			
		||||
            compiler_so='%s -mcygwin -mdll -O -Wall' % self.cc,
 | 
			
		||||
            compiler_cxx='%s -mcygwin -O -Wall' % self.cxx,
 | 
			
		||||
            linker_exe='%s -mcygwin' % self.cc,
 | 
			
		||||
            linker_so=('{} -mcygwin {}'.format(self.linker_dll, shared_option)),
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        # Include the appropriate MSVC runtime library if Python was built
 | 
			
		||||
        # with MSVC 7.0 or later.
 | 
			
		||||
        self.dll_libraries = get_msvcr()
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def gcc_version(self):
 | 
			
		||||
        # Older numpy dependend on this existing to check for ancient
 | 
			
		||||
        # gcc versions. This doesn't make much sense with clang etc so
 | 
			
		||||
        # just hardcode to something recent.
 | 
			
		||||
        # https://github.com/numpy/numpy/pull/20333
 | 
			
		||||
        warnings.warn(
 | 
			
		||||
            "gcc_version attribute of CygwinCCompiler is deprecated. "
 | 
			
		||||
            "Instead of returning actual gcc version a fixed value 11.2.0 is returned.",
 | 
			
		||||
            DeprecationWarning,
 | 
			
		||||
            stacklevel=2,
 | 
			
		||||
        )
 | 
			
		||||
        with suppress_known_deprecation():
 | 
			
		||||
            return LooseVersion("11.2.0")
 | 
			
		||||
 | 
			
		||||
    def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
 | 
			
		||||
        """Compiles the source by spawning GCC and windres if needed."""
 | 
			
		||||
        if ext == '.rc' or ext == '.res':
 | 
			
		||||
            # gcc needs '.res' and '.rc' compiled to object files !!!
 | 
			
		||||
            try:
 | 
			
		||||
                self.spawn(["windres", "-i", src, "-o", obj])
 | 
			
		||||
            except DistutilsExecError as msg:
 | 
			
		||||
                raise CompileError(msg)
 | 
			
		||||
        else:  # for other files use the C-compiler
 | 
			
		||||
            try:
 | 
			
		||||
                self.spawn(
 | 
			
		||||
                    self.compiler_so + cc_args + [src, '-o', obj] + extra_postargs
 | 
			
		||||
                )
 | 
			
		||||
            except DistutilsExecError as msg:
 | 
			
		||||
                raise CompileError(msg)
 | 
			
		||||
 | 
			
		||||
    def link(
 | 
			
		||||
        self,
 | 
			
		||||
        target_desc,
 | 
			
		||||
        objects,
 | 
			
		||||
        output_filename,
 | 
			
		||||
        output_dir=None,
 | 
			
		||||
        libraries=None,
 | 
			
		||||
        library_dirs=None,
 | 
			
		||||
        runtime_library_dirs=None,
 | 
			
		||||
        export_symbols=None,
 | 
			
		||||
        debug=0,
 | 
			
		||||
        extra_preargs=None,
 | 
			
		||||
        extra_postargs=None,
 | 
			
		||||
        build_temp=None,
 | 
			
		||||
        target_lang=None,
 | 
			
		||||
    ):
 | 
			
		||||
        """Link the objects."""
 | 
			
		||||
        # use separate copies, so we can modify the lists
 | 
			
		||||
        extra_preargs = copy.copy(extra_preargs or [])
 | 
			
		||||
        libraries = copy.copy(libraries or [])
 | 
			
		||||
        objects = copy.copy(objects or [])
 | 
			
		||||
 | 
			
		||||
        if runtime_library_dirs:
 | 
			
		||||
            self.warn(_runtime_library_dirs_msg)
 | 
			
		||||
 | 
			
		||||
        # Additional libraries
 | 
			
		||||
        libraries.extend(self.dll_libraries)
 | 
			
		||||
 | 
			
		||||
        # handle export symbols by creating a def-file
 | 
			
		||||
        # with executables this only works with gcc/ld as linker
 | 
			
		||||
        if (export_symbols is not None) and (
 | 
			
		||||
            target_desc != self.EXECUTABLE or self.linker_dll == "gcc"
 | 
			
		||||
        ):
 | 
			
		||||
            # (The linker doesn't do anything if output is up-to-date.
 | 
			
		||||
            # So it would probably better to check if we really need this,
 | 
			
		||||
            # but for this we had to insert some unchanged parts of
 | 
			
		||||
            # UnixCCompiler, and this is not what we want.)
 | 
			
		||||
 | 
			
		||||
            # we want to put some files in the same directory as the
 | 
			
		||||
            # object files are, build_temp doesn't help much
 | 
			
		||||
            # where are the object files
 | 
			
		||||
            temp_dir = os.path.dirname(objects[0])
 | 
			
		||||
            # name of dll to give the helper files the same base name
 | 
			
		||||
            (dll_name, dll_extension) = os.path.splitext(
 | 
			
		||||
                os.path.basename(output_filename)
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
            # generate the filenames for these files
 | 
			
		||||
            def_file = os.path.join(temp_dir, dll_name + ".def")
 | 
			
		||||
 | 
			
		||||
            # Generate .def file
 | 
			
		||||
            contents = ["LIBRARY %s" % os.path.basename(output_filename), "EXPORTS"]
 | 
			
		||||
            for sym in export_symbols:
 | 
			
		||||
                contents.append(sym)
 | 
			
		||||
            self.execute(write_file, (def_file, contents), "writing %s" % def_file)
 | 
			
		||||
 | 
			
		||||
            # next add options for def-file
 | 
			
		||||
 | 
			
		||||
            # for gcc/ld the def-file is specified as any object files
 | 
			
		||||
            objects.append(def_file)
 | 
			
		||||
 | 
			
		||||
        # end: if ((export_symbols is not None) and
 | 
			
		||||
        #        (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")):
 | 
			
		||||
 | 
			
		||||
        # who wants symbols and a many times larger output file
 | 
			
		||||
        # should explicitly switch the debug mode on
 | 
			
		||||
        # otherwise we let ld strip the output file
 | 
			
		||||
        # (On my machine: 10KiB < stripped_file < ??100KiB
 | 
			
		||||
        #   unstripped_file = stripped_file + XXX KiB
 | 
			
		||||
        #  ( XXX=254 for a typical python extension))
 | 
			
		||||
        if not debug:
 | 
			
		||||
            extra_preargs.append("-s")
 | 
			
		||||
 | 
			
		||||
        UnixCCompiler.link(
 | 
			
		||||
            self,
 | 
			
		||||
            target_desc,
 | 
			
		||||
            objects,
 | 
			
		||||
            output_filename,
 | 
			
		||||
            output_dir,
 | 
			
		||||
            libraries,
 | 
			
		||||
            library_dirs,
 | 
			
		||||
            runtime_library_dirs,
 | 
			
		||||
            None,  # export_symbols, we do this in our def-file
 | 
			
		||||
            debug,
 | 
			
		||||
            extra_preargs,
 | 
			
		||||
            extra_postargs,
 | 
			
		||||
            build_temp,
 | 
			
		||||
            target_lang,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def runtime_library_dir_option(self, dir):
 | 
			
		||||
        # cygwin doesn't support rpath. While in theory we could error
 | 
			
		||||
        # out like MSVC does, code might expect it to work like on Unix, so
 | 
			
		||||
        # just warn and hope for the best.
 | 
			
		||||
        self.warn(_runtime_library_dirs_msg)
 | 
			
		||||
        return []
 | 
			
		||||
 | 
			
		||||
    # -- Miscellaneous methods -----------------------------------------
 | 
			
		||||
 | 
			
		||||
    def _make_out_path(self, output_dir, strip_dir, src_name):
 | 
			
		||||
        # use normcase to make sure '.rc' is really '.rc' and not '.RC'
 | 
			
		||||
        norm_src_name = os.path.normcase(src_name)
 | 
			
		||||
        return super()._make_out_path(output_dir, strip_dir, norm_src_name)
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def out_extensions(self):
 | 
			
		||||
        """
 | 
			
		||||
        Add support for rc and res files.
 | 
			
		||||
        """
 | 
			
		||||
        return {
 | 
			
		||||
            **super().out_extensions,
 | 
			
		||||
            **{ext: ext + self.obj_extension for ext in ('.res', '.rc')},
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# the same as cygwin plus some additional parameters
 | 
			
		||||
class Mingw32CCompiler(CygwinCCompiler):
 | 
			
		||||
    """Handles the Mingw32 port of the GNU C compiler to Windows."""
 | 
			
		||||
 | 
			
		||||
    compiler_type = 'mingw32'
 | 
			
		||||
 | 
			
		||||
    def __init__(self, verbose=0, dry_run=0, force=0):
 | 
			
		||||
 | 
			
		||||
        super().__init__(verbose, dry_run, force)
 | 
			
		||||
 | 
			
		||||
        shared_option = "-shared"
 | 
			
		||||
 | 
			
		||||
        if is_cygwincc(self.cc):
 | 
			
		||||
            raise CCompilerError('Cygwin gcc cannot be used with --compiler=mingw32')
 | 
			
		||||
 | 
			
		||||
        self.set_executables(
 | 
			
		||||
            compiler='%s -O -Wall' % self.cc,
 | 
			
		||||
            compiler_so='%s -mdll -O -Wall' % self.cc,
 | 
			
		||||
            compiler_cxx='%s -O -Wall' % self.cxx,
 | 
			
		||||
            linker_exe='%s' % self.cc,
 | 
			
		||||
            linker_so='{} {}'.format(self.linker_dll, shared_option),
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def runtime_library_dir_option(self, dir):
 | 
			
		||||
        raise DistutilsPlatformError(_runtime_library_dirs_msg)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Because these compilers aren't configured in Python's pyconfig.h file by
 | 
			
		||||
# default, we should at least warn the user if he is using an unmodified
 | 
			
		||||
# version.
 | 
			
		||||
 | 
			
		||||
CONFIG_H_OK = "ok"
 | 
			
		||||
CONFIG_H_NOTOK = "not ok"
 | 
			
		||||
CONFIG_H_UNCERTAIN = "uncertain"
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def check_config_h():
 | 
			
		||||
    """Check if the current Python installation appears amenable to building
 | 
			
		||||
    extensions with GCC.
 | 
			
		||||
 | 
			
		||||
    Returns a tuple (status, details), where 'status' is one of the following
 | 
			
		||||
    constants:
 | 
			
		||||
 | 
			
		||||
    - CONFIG_H_OK: all is well, go ahead and compile
 | 
			
		||||
    - CONFIG_H_NOTOK: doesn't look good
 | 
			
		||||
    - CONFIG_H_UNCERTAIN: not sure -- unable to read pyconfig.h
 | 
			
		||||
 | 
			
		||||
    'details' is a human-readable string explaining the situation.
 | 
			
		||||
 | 
			
		||||
    Note there are two ways to conclude "OK": either 'sys.version' contains
 | 
			
		||||
    the string "GCC" (implying that this Python was built with GCC), or the
 | 
			
		||||
    installed "pyconfig.h" contains the string "__GNUC__".
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    # XXX since this function also checks sys.version, it's not strictly a
 | 
			
		||||
    # "pyconfig.h" check -- should probably be renamed...
 | 
			
		||||
 | 
			
		||||
    from distutils import sysconfig
 | 
			
		||||
 | 
			
		||||
    # if sys.version contains GCC then python was compiled with GCC, and the
 | 
			
		||||
    # pyconfig.h file should be OK
 | 
			
		||||
    if "GCC" in sys.version:
 | 
			
		||||
        return CONFIG_H_OK, "sys.version mentions 'GCC'"
 | 
			
		||||
 | 
			
		||||
    # Clang would also work
 | 
			
		||||
    if "Clang" in sys.version:
 | 
			
		||||
        return CONFIG_H_OK, "sys.version mentions 'Clang'"
 | 
			
		||||
 | 
			
		||||
    # let's see if __GNUC__ is mentioned in python.h
 | 
			
		||||
    fn = sysconfig.get_config_h_filename()
 | 
			
		||||
    try:
 | 
			
		||||
        config_h = open(fn)
 | 
			
		||||
        try:
 | 
			
		||||
            if "__GNUC__" in config_h.read():
 | 
			
		||||
                return CONFIG_H_OK, "'%s' mentions '__GNUC__'" % fn
 | 
			
		||||
            else:
 | 
			
		||||
                return CONFIG_H_NOTOK, "'%s' does not mention '__GNUC__'" % fn
 | 
			
		||||
        finally:
 | 
			
		||||
            config_h.close()
 | 
			
		||||
    except OSError as exc:
 | 
			
		||||
        return (CONFIG_H_UNCERTAIN, "couldn't read '{}': {}".format(fn, exc.strerror))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def is_cygwincc(cc):
 | 
			
		||||
    '''Try to determine if the compiler that would be used is from cygwin.'''
 | 
			
		||||
    out_string = check_output(shlex.split(cc) + ['-dumpmachine'])
 | 
			
		||||
    return out_string.strip().endswith(b'cygwin')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
get_versions = None
 | 
			
		||||
"""
 | 
			
		||||
A stand-in for the previous get_versions() function to prevent failures
 | 
			
		||||
when monkeypatched. See pypa/setuptools#2969.
 | 
			
		||||
"""
 | 
			
		||||
@@ -0,0 +1,5 @@
 | 
			
		||||
import os
 | 
			
		||||
 | 
			
		||||
# If DISTUTILS_DEBUG is anything other than the empty string, we run in
 | 
			
		||||
# debug mode.
 | 
			
		||||
DEBUG = os.environ.get('DISTUTILS_DEBUG')
 | 
			
		||||
@@ -0,0 +1,96 @@
 | 
			
		||||
"""distutils.dep_util
 | 
			
		||||
 | 
			
		||||
Utility functions for simple, timestamp-based dependency of files
 | 
			
		||||
and groups of files; also, function based entirely on such
 | 
			
		||||
timestamp dependency analysis."""
 | 
			
		||||
 | 
			
		||||
import os
 | 
			
		||||
from .errors import DistutilsFileError
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def newer(source, target):
 | 
			
		||||
    """Return true if 'source' exists and is more recently modified than
 | 
			
		||||
    'target', or if 'source' exists and 'target' doesn't.  Return false if
 | 
			
		||||
    both exist and 'target' is the same age or younger than 'source'.
 | 
			
		||||
    Raise DistutilsFileError if 'source' does not exist.
 | 
			
		||||
    """
 | 
			
		||||
    if not os.path.exists(source):
 | 
			
		||||
        raise DistutilsFileError("file '%s' does not exist" % os.path.abspath(source))
 | 
			
		||||
    if not os.path.exists(target):
 | 
			
		||||
        return 1
 | 
			
		||||
 | 
			
		||||
    from stat import ST_MTIME
 | 
			
		||||
 | 
			
		||||
    mtime1 = os.stat(source)[ST_MTIME]
 | 
			
		||||
    mtime2 = os.stat(target)[ST_MTIME]
 | 
			
		||||
 | 
			
		||||
    return mtime1 > mtime2
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# newer ()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def newer_pairwise(sources, targets):
 | 
			
		||||
    """Walk two filename lists in parallel, testing if each source is newer
 | 
			
		||||
    than its corresponding target.  Return a pair of lists (sources,
 | 
			
		||||
    targets) where source is newer than target, according to the semantics
 | 
			
		||||
    of 'newer()'.
 | 
			
		||||
    """
 | 
			
		||||
    if len(sources) != len(targets):
 | 
			
		||||
        raise ValueError("'sources' and 'targets' must be same length")
 | 
			
		||||
 | 
			
		||||
    # build a pair of lists (sources, targets) where  source is newer
 | 
			
		||||
    n_sources = []
 | 
			
		||||
    n_targets = []
 | 
			
		||||
    for i in range(len(sources)):
 | 
			
		||||
        if newer(sources[i], targets[i]):
 | 
			
		||||
            n_sources.append(sources[i])
 | 
			
		||||
            n_targets.append(targets[i])
 | 
			
		||||
 | 
			
		||||
    return (n_sources, n_targets)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# newer_pairwise ()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def newer_group(sources, target, missing='error'):
 | 
			
		||||
    """Return true if 'target' is out-of-date with respect to any file
 | 
			
		||||
    listed in 'sources'.  In other words, if 'target' exists and is newer
 | 
			
		||||
    than every file in 'sources', return false; otherwise return true.
 | 
			
		||||
    'missing' controls what we do when a source file is missing; the
 | 
			
		||||
    default ("error") is to blow up with an OSError from inside 'stat()';
 | 
			
		||||
    if it is "ignore", we silently drop any missing source files; if it is
 | 
			
		||||
    "newer", any missing source files make us assume that 'target' is
 | 
			
		||||
    out-of-date (this is handy in "dry-run" mode: it'll make you pretend to
 | 
			
		||||
    carry out commands that wouldn't work because inputs are missing, but
 | 
			
		||||
    that doesn't matter because you're not actually going to run the
 | 
			
		||||
    commands).
 | 
			
		||||
    """
 | 
			
		||||
    # If the target doesn't even exist, then it's definitely out-of-date.
 | 
			
		||||
    if not os.path.exists(target):
 | 
			
		||||
        return 1
 | 
			
		||||
 | 
			
		||||
    # Otherwise we have to find out the hard way: if *any* source file
 | 
			
		||||
    # is more recent than 'target', then 'target' is out-of-date and
 | 
			
		||||
    # we can immediately return true.  If we fall through to the end
 | 
			
		||||
    # of the loop, then 'target' is up-to-date and we return false.
 | 
			
		||||
    from stat import ST_MTIME
 | 
			
		||||
 | 
			
		||||
    target_mtime = os.stat(target)[ST_MTIME]
 | 
			
		||||
    for source in sources:
 | 
			
		||||
        if not os.path.exists(source):
 | 
			
		||||
            if missing == 'error':  # blow up when we stat() the file
 | 
			
		||||
                pass
 | 
			
		||||
            elif missing == 'ignore':  # missing source dropped from
 | 
			
		||||
                continue  # target's dependency list
 | 
			
		||||
            elif missing == 'newer':  # missing source means target is
 | 
			
		||||
                return 1  # out-of-date
 | 
			
		||||
 | 
			
		||||
        source_mtime = os.stat(source)[ST_MTIME]
 | 
			
		||||
        if source_mtime > target_mtime:
 | 
			
		||||
            return 1
 | 
			
		||||
    else:
 | 
			
		||||
        return 0
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# newer_group ()
 | 
			
		||||
@@ -0,0 +1,243 @@
 | 
			
		||||
"""distutils.dir_util
 | 
			
		||||
 | 
			
		||||
Utility functions for manipulating directories and directory trees."""
 | 
			
		||||
 | 
			
		||||
import os
 | 
			
		||||
import errno
 | 
			
		||||
from .errors import DistutilsInternalError, DistutilsFileError
 | 
			
		||||
from ._log import log
 | 
			
		||||
 | 
			
		||||
# cache for by mkpath() -- in addition to cheapening redundant calls,
 | 
			
		||||
# eliminates redundant "creating /foo/bar/baz" messages in dry-run mode
 | 
			
		||||
_path_created = {}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def mkpath(name, mode=0o777, verbose=1, dry_run=0):  # noqa: C901
 | 
			
		||||
    """Create a directory and any missing ancestor directories.
 | 
			
		||||
 | 
			
		||||
    If the directory already exists (or if 'name' is the empty string, which
 | 
			
		||||
    means the current directory, which of course exists), then do nothing.
 | 
			
		||||
    Raise DistutilsFileError if unable to create some directory along the way
 | 
			
		||||
    (eg. some sub-path exists, but is a file rather than a directory).
 | 
			
		||||
    If 'verbose' is true, print a one-line summary of each mkdir to stdout.
 | 
			
		||||
    Return the list of directories actually created.
 | 
			
		||||
 | 
			
		||||
    os.makedirs is not used because:
 | 
			
		||||
 | 
			
		||||
    a) It's new to Python 1.5.2, and
 | 
			
		||||
    b) it blows up if the directory already exists (in which case it should
 | 
			
		||||
       silently succeed).
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    global _path_created
 | 
			
		||||
 | 
			
		||||
    # Detect a common bug -- name is None
 | 
			
		||||
    if not isinstance(name, str):
 | 
			
		||||
        raise DistutilsInternalError(
 | 
			
		||||
            "mkpath: 'name' must be a string (got {!r})".format(name)
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    # XXX what's the better way to handle verbosity? print as we create
 | 
			
		||||
    # each directory in the path (the current behaviour), or only announce
 | 
			
		||||
    # the creation of the whole path? (quite easy to do the latter since
 | 
			
		||||
    # we're not using a recursive algorithm)
 | 
			
		||||
 | 
			
		||||
    name = os.path.normpath(name)
 | 
			
		||||
    created_dirs = []
 | 
			
		||||
    if os.path.isdir(name) or name == '':
 | 
			
		||||
        return created_dirs
 | 
			
		||||
    if _path_created.get(os.path.abspath(name)):
 | 
			
		||||
        return created_dirs
 | 
			
		||||
 | 
			
		||||
    (head, tail) = os.path.split(name)
 | 
			
		||||
    tails = [tail]  # stack of lone dirs to create
 | 
			
		||||
 | 
			
		||||
    while head and tail and not os.path.isdir(head):
 | 
			
		||||
        (head, tail) = os.path.split(head)
 | 
			
		||||
        tails.insert(0, tail)  # push next higher dir onto stack
 | 
			
		||||
 | 
			
		||||
    # now 'head' contains the deepest directory that already exists
 | 
			
		||||
    # (that is, the child of 'head' in 'name' is the highest directory
 | 
			
		||||
    # that does *not* exist)
 | 
			
		||||
    for d in tails:
 | 
			
		||||
        # print "head = %s, d = %s: " % (head, d),
 | 
			
		||||
        head = os.path.join(head, d)
 | 
			
		||||
        abs_head = os.path.abspath(head)
 | 
			
		||||
 | 
			
		||||
        if _path_created.get(abs_head):
 | 
			
		||||
            continue
 | 
			
		||||
 | 
			
		||||
        if verbose >= 1:
 | 
			
		||||
            log.info("creating %s", head)
 | 
			
		||||
 | 
			
		||||
        if not dry_run:
 | 
			
		||||
            try:
 | 
			
		||||
                os.mkdir(head, mode)
 | 
			
		||||
            except OSError as exc:
 | 
			
		||||
                if not (exc.errno == errno.EEXIST and os.path.isdir(head)):
 | 
			
		||||
                    raise DistutilsFileError(
 | 
			
		||||
                        "could not create '{}': {}".format(head, exc.args[-1])
 | 
			
		||||
                    )
 | 
			
		||||
            created_dirs.append(head)
 | 
			
		||||
 | 
			
		||||
        _path_created[abs_head] = 1
 | 
			
		||||
    return created_dirs
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def create_tree(base_dir, files, mode=0o777, verbose=1, dry_run=0):
 | 
			
		||||
    """Create all the empty directories under 'base_dir' needed to put 'files'
 | 
			
		||||
    there.
 | 
			
		||||
 | 
			
		||||
    'base_dir' is just the name of a directory which doesn't necessarily
 | 
			
		||||
    exist yet; 'files' is a list of filenames to be interpreted relative to
 | 
			
		||||
    'base_dir'.  'base_dir' + the directory portion of every file in 'files'
 | 
			
		||||
    will be created if it doesn't already exist.  'mode', 'verbose' and
 | 
			
		||||
    'dry_run' flags are as for 'mkpath()'.
 | 
			
		||||
    """
 | 
			
		||||
    # First get the list of directories to create
 | 
			
		||||
    need_dir = set()
 | 
			
		||||
    for file in files:
 | 
			
		||||
        need_dir.add(os.path.join(base_dir, os.path.dirname(file)))
 | 
			
		||||
 | 
			
		||||
    # Now create them
 | 
			
		||||
    for dir in sorted(need_dir):
 | 
			
		||||
        mkpath(dir, mode, verbose=verbose, dry_run=dry_run)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def copy_tree(  # noqa: C901
 | 
			
		||||
    src,
 | 
			
		||||
    dst,
 | 
			
		||||
    preserve_mode=1,
 | 
			
		||||
    preserve_times=1,
 | 
			
		||||
    preserve_symlinks=0,
 | 
			
		||||
    update=0,
 | 
			
		||||
    verbose=1,
 | 
			
		||||
    dry_run=0,
 | 
			
		||||
):
 | 
			
		||||
    """Copy an entire directory tree 'src' to a new location 'dst'.
 | 
			
		||||
 | 
			
		||||
    Both 'src' and 'dst' must be directory names.  If 'src' is not a
 | 
			
		||||
    directory, raise DistutilsFileError.  If 'dst' does not exist, it is
 | 
			
		||||
    created with 'mkpath()'.  The end result of the copy is that every
 | 
			
		||||
    file in 'src' is copied to 'dst', and directories under 'src' are
 | 
			
		||||
    recursively copied to 'dst'.  Return the list of files that were
 | 
			
		||||
    copied or might have been copied, using their output name.  The
 | 
			
		||||
    return value is unaffected by 'update' or 'dry_run': it is simply
 | 
			
		||||
    the list of all files under 'src', with the names changed to be
 | 
			
		||||
    under 'dst'.
 | 
			
		||||
 | 
			
		||||
    'preserve_mode' and 'preserve_times' are the same as for
 | 
			
		||||
    'copy_file'; note that they only apply to regular files, not to
 | 
			
		||||
    directories.  If 'preserve_symlinks' is true, symlinks will be
 | 
			
		||||
    copied as symlinks (on platforms that support them!); otherwise
 | 
			
		||||
    (the default), the destination of the symlink will be copied.
 | 
			
		||||
    'update' and 'verbose' are the same as for 'copy_file'.
 | 
			
		||||
    """
 | 
			
		||||
    from distutils.file_util import copy_file
 | 
			
		||||
 | 
			
		||||
    if not dry_run and not os.path.isdir(src):
 | 
			
		||||
        raise DistutilsFileError("cannot copy tree '%s': not a directory" % src)
 | 
			
		||||
    try:
 | 
			
		||||
        names = os.listdir(src)
 | 
			
		||||
    except OSError as e:
 | 
			
		||||
        if dry_run:
 | 
			
		||||
            names = []
 | 
			
		||||
        else:
 | 
			
		||||
            raise DistutilsFileError(
 | 
			
		||||
                "error listing files in '{}': {}".format(src, e.strerror)
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
    if not dry_run:
 | 
			
		||||
        mkpath(dst, verbose=verbose)
 | 
			
		||||
 | 
			
		||||
    outputs = []
 | 
			
		||||
 | 
			
		||||
    for n in names:
 | 
			
		||||
        src_name = os.path.join(src, n)
 | 
			
		||||
        dst_name = os.path.join(dst, n)
 | 
			
		||||
 | 
			
		||||
        if n.startswith('.nfs'):
 | 
			
		||||
            # skip NFS rename files
 | 
			
		||||
            continue
 | 
			
		||||
 | 
			
		||||
        if preserve_symlinks and os.path.islink(src_name):
 | 
			
		||||
            link_dest = os.readlink(src_name)
 | 
			
		||||
            if verbose >= 1:
 | 
			
		||||
                log.info("linking %s -> %s", dst_name, link_dest)
 | 
			
		||||
            if not dry_run:
 | 
			
		||||
                os.symlink(link_dest, dst_name)
 | 
			
		||||
            outputs.append(dst_name)
 | 
			
		||||
 | 
			
		||||
        elif os.path.isdir(src_name):
 | 
			
		||||
            outputs.extend(
 | 
			
		||||
                copy_tree(
 | 
			
		||||
                    src_name,
 | 
			
		||||
                    dst_name,
 | 
			
		||||
                    preserve_mode,
 | 
			
		||||
                    preserve_times,
 | 
			
		||||
                    preserve_symlinks,
 | 
			
		||||
                    update,
 | 
			
		||||
                    verbose=verbose,
 | 
			
		||||
                    dry_run=dry_run,
 | 
			
		||||
                )
 | 
			
		||||
            )
 | 
			
		||||
        else:
 | 
			
		||||
            copy_file(
 | 
			
		||||
                src_name,
 | 
			
		||||
                dst_name,
 | 
			
		||||
                preserve_mode,
 | 
			
		||||
                preserve_times,
 | 
			
		||||
                update,
 | 
			
		||||
                verbose=verbose,
 | 
			
		||||
                dry_run=dry_run,
 | 
			
		||||
            )
 | 
			
		||||
            outputs.append(dst_name)
 | 
			
		||||
 | 
			
		||||
    return outputs
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _build_cmdtuple(path, cmdtuples):
 | 
			
		||||
    """Helper for remove_tree()."""
 | 
			
		||||
    for f in os.listdir(path):
 | 
			
		||||
        real_f = os.path.join(path, f)
 | 
			
		||||
        if os.path.isdir(real_f) and not os.path.islink(real_f):
 | 
			
		||||
            _build_cmdtuple(real_f, cmdtuples)
 | 
			
		||||
        else:
 | 
			
		||||
            cmdtuples.append((os.remove, real_f))
 | 
			
		||||
    cmdtuples.append((os.rmdir, path))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def remove_tree(directory, verbose=1, dry_run=0):
 | 
			
		||||
    """Recursively remove an entire directory tree.
 | 
			
		||||
 | 
			
		||||
    Any errors are ignored (apart from being reported to stdout if 'verbose'
 | 
			
		||||
    is true).
 | 
			
		||||
    """
 | 
			
		||||
    global _path_created
 | 
			
		||||
 | 
			
		||||
    if verbose >= 1:
 | 
			
		||||
        log.info("removing '%s' (and everything under it)", directory)
 | 
			
		||||
    if dry_run:
 | 
			
		||||
        return
 | 
			
		||||
    cmdtuples = []
 | 
			
		||||
    _build_cmdtuple(directory, cmdtuples)
 | 
			
		||||
    for cmd in cmdtuples:
 | 
			
		||||
        try:
 | 
			
		||||
            cmd[0](cmd[1])
 | 
			
		||||
            # remove dir from cache if it's already there
 | 
			
		||||
            abspath = os.path.abspath(cmd[1])
 | 
			
		||||
            if abspath in _path_created:
 | 
			
		||||
                del _path_created[abspath]
 | 
			
		||||
        except OSError as exc:
 | 
			
		||||
            log.warning("error removing %s: %s", directory, exc)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def ensure_relative(path):
 | 
			
		||||
    """Take the full path 'path', and make it a relative path.
 | 
			
		||||
 | 
			
		||||
    This is useful to make 'path' the second argument to os.path.join().
 | 
			
		||||
    """
 | 
			
		||||
    drive, path = os.path.splitdrive(path)
 | 
			
		||||
    if path[0:1] == os.sep:
 | 
			
		||||
        path = drive + path[1:]
 | 
			
		||||
    return path
 | 
			
		||||
							
								
								
									
										1287
									
								
								teil20/lib/python3.11/site-packages/setuptools/_distutils/dist.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1287
									
								
								teil20/lib/python3.11/site-packages/setuptools/_distutils/dist.py
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							@@ -0,0 +1,127 @@
 | 
			
		||||
"""distutils.errors
 | 
			
		||||
 | 
			
		||||
Provides exceptions used by the Distutils modules.  Note that Distutils
 | 
			
		||||
modules may raise standard exceptions; in particular, SystemExit is
 | 
			
		||||
usually raised for errors that are obviously the end-user's fault
 | 
			
		||||
(eg. bad command-line arguments).
 | 
			
		||||
 | 
			
		||||
This module is safe to use in "from ... import *" mode; it only exports
 | 
			
		||||
symbols whose names start with "Distutils" and end with "Error"."""
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class DistutilsError(Exception):
 | 
			
		||||
    """The root of all Distutils evil."""
 | 
			
		||||
 | 
			
		||||
    pass
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class DistutilsModuleError(DistutilsError):
 | 
			
		||||
    """Unable to load an expected module, or to find an expected class
 | 
			
		||||
    within some module (in particular, command modules and classes)."""
 | 
			
		||||
 | 
			
		||||
    pass
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class DistutilsClassError(DistutilsError):
 | 
			
		||||
    """Some command class (or possibly distribution class, if anyone
 | 
			
		||||
    feels a need to subclass Distribution) is found not to be holding
 | 
			
		||||
    up its end of the bargain, ie. implementing some part of the
 | 
			
		||||
    "command "interface."""
 | 
			
		||||
 | 
			
		||||
    pass
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class DistutilsGetoptError(DistutilsError):
 | 
			
		||||
    """The option table provided to 'fancy_getopt()' is bogus."""
 | 
			
		||||
 | 
			
		||||
    pass
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class DistutilsArgError(DistutilsError):
 | 
			
		||||
    """Raised by fancy_getopt in response to getopt.error -- ie. an
 | 
			
		||||
    error in the command line usage."""
 | 
			
		||||
 | 
			
		||||
    pass
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class DistutilsFileError(DistutilsError):
 | 
			
		||||
    """Any problems in the filesystem: expected file not found, etc.
 | 
			
		||||
    Typically this is for problems that we detect before OSError
 | 
			
		||||
    could be raised."""
 | 
			
		||||
 | 
			
		||||
    pass
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class DistutilsOptionError(DistutilsError):
 | 
			
		||||
    """Syntactic/semantic errors in command options, such as use of
 | 
			
		||||
    mutually conflicting options, or inconsistent options,
 | 
			
		||||
    badly-spelled values, etc.  No distinction is made between option
 | 
			
		||||
    values originating in the setup script, the command line, config
 | 
			
		||||
    files, or what-have-you -- but if we *know* something originated in
 | 
			
		||||
    the setup script, we'll raise DistutilsSetupError instead."""
 | 
			
		||||
 | 
			
		||||
    pass
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class DistutilsSetupError(DistutilsError):
 | 
			
		||||
    """For errors that can be definitely blamed on the setup script,
 | 
			
		||||
    such as invalid keyword arguments to 'setup()'."""
 | 
			
		||||
 | 
			
		||||
    pass
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class DistutilsPlatformError(DistutilsError):
 | 
			
		||||
    """We don't know how to do something on the current platform (but
 | 
			
		||||
    we do know how to do it on some platform) -- eg. trying to compile
 | 
			
		||||
    C files on a platform not supported by a CCompiler subclass."""
 | 
			
		||||
 | 
			
		||||
    pass
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class DistutilsExecError(DistutilsError):
 | 
			
		||||
    """Any problems executing an external program (such as the C
 | 
			
		||||
    compiler, when compiling C files)."""
 | 
			
		||||
 | 
			
		||||
    pass
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class DistutilsInternalError(DistutilsError):
 | 
			
		||||
    """Internal inconsistencies or impossibilities (obviously, this
 | 
			
		||||
    should never be seen if the code is working!)."""
 | 
			
		||||
 | 
			
		||||
    pass
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class DistutilsTemplateError(DistutilsError):
 | 
			
		||||
    """Syntax error in a file list template."""
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class DistutilsByteCompileError(DistutilsError):
 | 
			
		||||
    """Byte compile error."""
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Exception classes used by the CCompiler implementation classes
 | 
			
		||||
class CCompilerError(Exception):
 | 
			
		||||
    """Some compile/link operation failed."""
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class PreprocessError(CCompilerError):
 | 
			
		||||
    """Failure to preprocess one or more C/C++ files."""
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class CompileError(CCompilerError):
 | 
			
		||||
    """Failure to compile one or more C/C++ source files."""
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class LibError(CCompilerError):
 | 
			
		||||
    """Failure to create a static library from one or more C/C++ object
 | 
			
		||||
    files."""
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class LinkError(CCompilerError):
 | 
			
		||||
    """Failure to link one or more C/C++ object files into an executable
 | 
			
		||||
    or shared library file."""
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class UnknownFileError(CCompilerError):
 | 
			
		||||
    """Attempt to process an unknown file type."""
 | 
			
		||||
@@ -0,0 +1,248 @@
 | 
			
		||||
"""distutils.extension
 | 
			
		||||
 | 
			
		||||
Provides the Extension class, used to describe C/C++ extension
 | 
			
		||||
modules in setup scripts."""
 | 
			
		||||
 | 
			
		||||
import os
 | 
			
		||||
import warnings
 | 
			
		||||
 | 
			
		||||
# This class is really only used by the "build_ext" command, so it might
 | 
			
		||||
# make sense to put it in distutils.command.build_ext.  However, that
 | 
			
		||||
# module is already big enough, and I want to make this class a bit more
 | 
			
		||||
# complex to simplify some common cases ("foo" module in "foo.c") and do
 | 
			
		||||
# better error-checking ("foo.c" actually exists).
 | 
			
		||||
#
 | 
			
		||||
# Also, putting this in build_ext.py means every setup script would have to
 | 
			
		||||
# import that large-ish module (indirectly, through distutils.core) in
 | 
			
		||||
# order to do anything.
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Extension:
 | 
			
		||||
    """Just a collection of attributes that describes an extension
 | 
			
		||||
    module and everything needed to build it (hopefully in a portable
 | 
			
		||||
    way, but there are hooks that let you be as unportable as you need).
 | 
			
		||||
 | 
			
		||||
    Instance attributes:
 | 
			
		||||
      name : string
 | 
			
		||||
        the full name of the extension, including any packages -- ie.
 | 
			
		||||
        *not* a filename or pathname, but Python dotted name
 | 
			
		||||
      sources : [string]
 | 
			
		||||
        list of source filenames, relative to the distribution root
 | 
			
		||||
        (where the setup script lives), in Unix form (slash-separated)
 | 
			
		||||
        for portability.  Source files may be C, C++, SWIG (.i),
 | 
			
		||||
        platform-specific resource files, or whatever else is recognized
 | 
			
		||||
        by the "build_ext" command as source for a Python extension.
 | 
			
		||||
      include_dirs : [string]
 | 
			
		||||
        list of directories to search for C/C++ header files (in Unix
 | 
			
		||||
        form for portability)
 | 
			
		||||
      define_macros : [(name : string, value : string|None)]
 | 
			
		||||
        list of macros to define; each macro is defined using a 2-tuple,
 | 
			
		||||
        where 'value' is either the string to define it to or None to
 | 
			
		||||
        define it without a particular value (equivalent of "#define
 | 
			
		||||
        FOO" in source or -DFOO on Unix C compiler command line)
 | 
			
		||||
      undef_macros : [string]
 | 
			
		||||
        list of macros to undefine explicitly
 | 
			
		||||
      library_dirs : [string]
 | 
			
		||||
        list of directories to search for C/C++ libraries at link time
 | 
			
		||||
      libraries : [string]
 | 
			
		||||
        list of library names (not filenames or paths) to link against
 | 
			
		||||
      runtime_library_dirs : [string]
 | 
			
		||||
        list of directories to search for C/C++ libraries at run time
 | 
			
		||||
        (for shared extensions, this is when the extension is loaded)
 | 
			
		||||
      extra_objects : [string]
 | 
			
		||||
        list of extra files to link with (eg. object files not implied
 | 
			
		||||
        by 'sources', static library that must be explicitly specified,
 | 
			
		||||
        binary resource files, etc.)
 | 
			
		||||
      extra_compile_args : [string]
 | 
			
		||||
        any extra platform- and compiler-specific information to use
 | 
			
		||||
        when compiling the source files in 'sources'.  For platforms and
 | 
			
		||||
        compilers where "command line" makes sense, this is typically a
 | 
			
		||||
        list of command-line arguments, but for other platforms it could
 | 
			
		||||
        be anything.
 | 
			
		||||
      extra_link_args : [string]
 | 
			
		||||
        any extra platform- and compiler-specific information to use
 | 
			
		||||
        when linking object files together to create the extension (or
 | 
			
		||||
        to create a new static Python interpreter).  Similar
 | 
			
		||||
        interpretation as for 'extra_compile_args'.
 | 
			
		||||
      export_symbols : [string]
 | 
			
		||||
        list of symbols to be exported from a shared extension.  Not
 | 
			
		||||
        used on all platforms, and not generally necessary for Python
 | 
			
		||||
        extensions, which typically export exactly one symbol: "init" +
 | 
			
		||||
        extension_name.
 | 
			
		||||
      swig_opts : [string]
 | 
			
		||||
        any extra options to pass to SWIG if a source file has the .i
 | 
			
		||||
        extension.
 | 
			
		||||
      depends : [string]
 | 
			
		||||
        list of files that the extension depends on
 | 
			
		||||
      language : string
 | 
			
		||||
        extension language (i.e. "c", "c++", "objc"). Will be detected
 | 
			
		||||
        from the source extensions if not provided.
 | 
			
		||||
      optional : boolean
 | 
			
		||||
        specifies that a build failure in the extension should not abort the
 | 
			
		||||
        build process, but simply not install the failing extension.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    # When adding arguments to this constructor, be sure to update
 | 
			
		||||
    # setup_keywords in core.py.
 | 
			
		||||
    def __init__(
 | 
			
		||||
        self,
 | 
			
		||||
        name,
 | 
			
		||||
        sources,
 | 
			
		||||
        include_dirs=None,
 | 
			
		||||
        define_macros=None,
 | 
			
		||||
        undef_macros=None,
 | 
			
		||||
        library_dirs=None,
 | 
			
		||||
        libraries=None,
 | 
			
		||||
        runtime_library_dirs=None,
 | 
			
		||||
        extra_objects=None,
 | 
			
		||||
        extra_compile_args=None,
 | 
			
		||||
        extra_link_args=None,
 | 
			
		||||
        export_symbols=None,
 | 
			
		||||
        swig_opts=None,
 | 
			
		||||
        depends=None,
 | 
			
		||||
        language=None,
 | 
			
		||||
        optional=None,
 | 
			
		||||
        **kw  # To catch unknown keywords
 | 
			
		||||
    ):
 | 
			
		||||
        if not isinstance(name, str):
 | 
			
		||||
            raise AssertionError("'name' must be a string")
 | 
			
		||||
        if not (isinstance(sources, list) and all(isinstance(v, str) for v in sources)):
 | 
			
		||||
            raise AssertionError("'sources' must be a list of strings")
 | 
			
		||||
 | 
			
		||||
        self.name = name
 | 
			
		||||
        self.sources = sources
 | 
			
		||||
        self.include_dirs = include_dirs or []
 | 
			
		||||
        self.define_macros = define_macros or []
 | 
			
		||||
        self.undef_macros = undef_macros or []
 | 
			
		||||
        self.library_dirs = library_dirs or []
 | 
			
		||||
        self.libraries = libraries or []
 | 
			
		||||
        self.runtime_library_dirs = runtime_library_dirs or []
 | 
			
		||||
        self.extra_objects = extra_objects or []
 | 
			
		||||
        self.extra_compile_args = extra_compile_args or []
 | 
			
		||||
        self.extra_link_args = extra_link_args or []
 | 
			
		||||
        self.export_symbols = export_symbols or []
 | 
			
		||||
        self.swig_opts = swig_opts or []
 | 
			
		||||
        self.depends = depends or []
 | 
			
		||||
        self.language = language
 | 
			
		||||
        self.optional = optional
 | 
			
		||||
 | 
			
		||||
        # If there are unknown keyword options, warn about them
 | 
			
		||||
        if len(kw) > 0:
 | 
			
		||||
            options = [repr(option) for option in kw]
 | 
			
		||||
            options = ', '.join(sorted(options))
 | 
			
		||||
            msg = "Unknown Extension options: %s" % options
 | 
			
		||||
            warnings.warn(msg)
 | 
			
		||||
 | 
			
		||||
    def __repr__(self):
 | 
			
		||||
        return '<{}.{}({!r}) at {:#x}>'.format(
 | 
			
		||||
            self.__class__.__module__,
 | 
			
		||||
            self.__class__.__qualname__,
 | 
			
		||||
            self.name,
 | 
			
		||||
            id(self),
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def read_setup_file(filename):  # noqa: C901
 | 
			
		||||
    """Reads a Setup file and returns Extension instances."""
 | 
			
		||||
    from distutils.sysconfig import parse_makefile, expand_makefile_vars, _variable_rx
 | 
			
		||||
 | 
			
		||||
    from distutils.text_file import TextFile
 | 
			
		||||
    from distutils.util import split_quoted
 | 
			
		||||
 | 
			
		||||
    # First pass over the file to gather "VAR = VALUE" assignments.
 | 
			
		||||
    vars = parse_makefile(filename)
 | 
			
		||||
 | 
			
		||||
    # Second pass to gobble up the real content: lines of the form
 | 
			
		||||
    #   <module> ... [<sourcefile> ...] [<cpparg> ...] [<library> ...]
 | 
			
		||||
    file = TextFile(
 | 
			
		||||
        filename,
 | 
			
		||||
        strip_comments=1,
 | 
			
		||||
        skip_blanks=1,
 | 
			
		||||
        join_lines=1,
 | 
			
		||||
        lstrip_ws=1,
 | 
			
		||||
        rstrip_ws=1,
 | 
			
		||||
    )
 | 
			
		||||
    try:
 | 
			
		||||
        extensions = []
 | 
			
		||||
 | 
			
		||||
        while True:
 | 
			
		||||
            line = file.readline()
 | 
			
		||||
            if line is None:  # eof
 | 
			
		||||
                break
 | 
			
		||||
            if _variable_rx.match(line):  # VAR=VALUE, handled in first pass
 | 
			
		||||
                continue
 | 
			
		||||
 | 
			
		||||
            if line[0] == line[-1] == "*":
 | 
			
		||||
                file.warn("'%s' lines not handled yet" % line)
 | 
			
		||||
                continue
 | 
			
		||||
 | 
			
		||||
            line = expand_makefile_vars(line, vars)
 | 
			
		||||
            words = split_quoted(line)
 | 
			
		||||
 | 
			
		||||
            # NB. this parses a slightly different syntax than the old
 | 
			
		||||
            # makesetup script: here, there must be exactly one extension per
 | 
			
		||||
            # line, and it must be the first word of the line.  I have no idea
 | 
			
		||||
            # why the old syntax supported multiple extensions per line, as
 | 
			
		||||
            # they all wind up being the same.
 | 
			
		||||
 | 
			
		||||
            module = words[0]
 | 
			
		||||
            ext = Extension(module, [])
 | 
			
		||||
            append_next_word = None
 | 
			
		||||
 | 
			
		||||
            for word in words[1:]:
 | 
			
		||||
                if append_next_word is not None:
 | 
			
		||||
                    append_next_word.append(word)
 | 
			
		||||
                    append_next_word = None
 | 
			
		||||
                    continue
 | 
			
		||||
 | 
			
		||||
                suffix = os.path.splitext(word)[1]
 | 
			
		||||
                switch = word[0:2]
 | 
			
		||||
                value = word[2:]
 | 
			
		||||
 | 
			
		||||
                if suffix in (".c", ".cc", ".cpp", ".cxx", ".c++", ".m", ".mm"):
 | 
			
		||||
                    # hmm, should we do something about C vs. C++ sources?
 | 
			
		||||
                    # or leave it up to the CCompiler implementation to
 | 
			
		||||
                    # worry about?
 | 
			
		||||
                    ext.sources.append(word)
 | 
			
		||||
                elif switch == "-I":
 | 
			
		||||
                    ext.include_dirs.append(value)
 | 
			
		||||
                elif switch == "-D":
 | 
			
		||||
                    equals = value.find("=")
 | 
			
		||||
                    if equals == -1:  # bare "-DFOO" -- no value
 | 
			
		||||
                        ext.define_macros.append((value, None))
 | 
			
		||||
                    else:  # "-DFOO=blah"
 | 
			
		||||
                        ext.define_macros.append((value[0:equals], value[equals + 2 :]))
 | 
			
		||||
                elif switch == "-U":
 | 
			
		||||
                    ext.undef_macros.append(value)
 | 
			
		||||
                elif switch == "-C":  # only here 'cause makesetup has it!
 | 
			
		||||
                    ext.extra_compile_args.append(word)
 | 
			
		||||
                elif switch == "-l":
 | 
			
		||||
                    ext.libraries.append(value)
 | 
			
		||||
                elif switch == "-L":
 | 
			
		||||
                    ext.library_dirs.append(value)
 | 
			
		||||
                elif switch == "-R":
 | 
			
		||||
                    ext.runtime_library_dirs.append(value)
 | 
			
		||||
                elif word == "-rpath":
 | 
			
		||||
                    append_next_word = ext.runtime_library_dirs
 | 
			
		||||
                elif word == "-Xlinker":
 | 
			
		||||
                    append_next_word = ext.extra_link_args
 | 
			
		||||
                elif word == "-Xcompiler":
 | 
			
		||||
                    append_next_word = ext.extra_compile_args
 | 
			
		||||
                elif switch == "-u":
 | 
			
		||||
                    ext.extra_link_args.append(word)
 | 
			
		||||
                    if not value:
 | 
			
		||||
                        append_next_word = ext.extra_link_args
 | 
			
		||||
                elif suffix in (".a", ".so", ".sl", ".o", ".dylib"):
 | 
			
		||||
                    # NB. a really faithful emulation of makesetup would
 | 
			
		||||
                    # append a .o file to extra_objects only if it
 | 
			
		||||
                    # had a slash in it; otherwise, it would s/.o/.c/
 | 
			
		||||
                    # and append it to sources.  Hmmmm.
 | 
			
		||||
                    ext.extra_objects.append(word)
 | 
			
		||||
                else:
 | 
			
		||||
                    file.warn("unrecognized argument '%s'" % word)
 | 
			
		||||
 | 
			
		||||
            extensions.append(ext)
 | 
			
		||||
    finally:
 | 
			
		||||
        file.close()
 | 
			
		||||
 | 
			
		||||
    return extensions
 | 
			
		||||
@@ -0,0 +1,470 @@
 | 
			
		||||
"""distutils.fancy_getopt
 | 
			
		||||
 | 
			
		||||
Wrapper around the standard getopt module that provides the following
 | 
			
		||||
additional features:
 | 
			
		||||
  * short and long options are tied together
 | 
			
		||||
  * options have help strings, so fancy_getopt could potentially
 | 
			
		||||
    create a complete usage summary
 | 
			
		||||
  * options set attributes of a passed-in object
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
import sys
 | 
			
		||||
import string
 | 
			
		||||
import re
 | 
			
		||||
import getopt
 | 
			
		||||
from .errors import DistutilsGetoptError, DistutilsArgError
 | 
			
		||||
 | 
			
		||||
# Much like command_re in distutils.core, this is close to but not quite
 | 
			
		||||
# the same as a Python NAME -- except, in the spirit of most GNU
 | 
			
		||||
# utilities, we use '-' in place of '_'.  (The spirit of LISP lives on!)
 | 
			
		||||
# The similarities to NAME are again not a coincidence...
 | 
			
		||||
longopt_pat = r'[a-zA-Z](?:[a-zA-Z0-9-]*)'
 | 
			
		||||
longopt_re = re.compile(r'^%s$' % longopt_pat)
 | 
			
		||||
 | 
			
		||||
# For recognizing "negative alias" options, eg. "quiet=!verbose"
 | 
			
		||||
neg_alias_re = re.compile("^({})=!({})$".format(longopt_pat, longopt_pat))
 | 
			
		||||
 | 
			
		||||
# This is used to translate long options to legitimate Python identifiers
 | 
			
		||||
# (for use as attributes of some object).
 | 
			
		||||
longopt_xlate = str.maketrans('-', '_')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class FancyGetopt:
 | 
			
		||||
    """Wrapper around the standard 'getopt()' module that provides some
 | 
			
		||||
    handy extra functionality:
 | 
			
		||||
      * short and long options are tied together
 | 
			
		||||
      * options have help strings, and help text can be assembled
 | 
			
		||||
        from them
 | 
			
		||||
      * options set attributes of a passed-in object
 | 
			
		||||
      * boolean options can have "negative aliases" -- eg. if
 | 
			
		||||
        --quiet is the "negative alias" of --verbose, then "--quiet"
 | 
			
		||||
        on the command line sets 'verbose' to false
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    def __init__(self, option_table=None):
 | 
			
		||||
        # The option table is (currently) a list of tuples.  The
 | 
			
		||||
        # tuples may have 3 or four values:
 | 
			
		||||
        #   (long_option, short_option, help_string [, repeatable])
 | 
			
		||||
        # if an option takes an argument, its long_option should have '='
 | 
			
		||||
        # appended; short_option should just be a single character, no ':'
 | 
			
		||||
        # in any case.  If a long_option doesn't have a corresponding
 | 
			
		||||
        # short_option, short_option should be None.  All option tuples
 | 
			
		||||
        # must have long options.
 | 
			
		||||
        self.option_table = option_table
 | 
			
		||||
 | 
			
		||||
        # 'option_index' maps long option names to entries in the option
 | 
			
		||||
        # table (ie. those 3-tuples).
 | 
			
		||||
        self.option_index = {}
 | 
			
		||||
        if self.option_table:
 | 
			
		||||
            self._build_index()
 | 
			
		||||
 | 
			
		||||
        # 'alias' records (duh) alias options; {'foo': 'bar'} means
 | 
			
		||||
        # --foo is an alias for --bar
 | 
			
		||||
        self.alias = {}
 | 
			
		||||
 | 
			
		||||
        # 'negative_alias' keeps track of options that are the boolean
 | 
			
		||||
        # opposite of some other option
 | 
			
		||||
        self.negative_alias = {}
 | 
			
		||||
 | 
			
		||||
        # These keep track of the information in the option table.  We
 | 
			
		||||
        # don't actually populate these structures until we're ready to
 | 
			
		||||
        # parse the command-line, since the 'option_table' passed in here
 | 
			
		||||
        # isn't necessarily the final word.
 | 
			
		||||
        self.short_opts = []
 | 
			
		||||
        self.long_opts = []
 | 
			
		||||
        self.short2long = {}
 | 
			
		||||
        self.attr_name = {}
 | 
			
		||||
        self.takes_arg = {}
 | 
			
		||||
 | 
			
		||||
        # And 'option_order' is filled up in 'getopt()'; it records the
 | 
			
		||||
        # original order of options (and their values) on the command-line,
 | 
			
		||||
        # but expands short options, converts aliases, etc.
 | 
			
		||||
        self.option_order = []
 | 
			
		||||
 | 
			
		||||
    def _build_index(self):
 | 
			
		||||
        self.option_index.clear()
 | 
			
		||||
        for option in self.option_table:
 | 
			
		||||
            self.option_index[option[0]] = option
 | 
			
		||||
 | 
			
		||||
    def set_option_table(self, option_table):
 | 
			
		||||
        self.option_table = option_table
 | 
			
		||||
        self._build_index()
 | 
			
		||||
 | 
			
		||||
    def add_option(self, long_option, short_option=None, help_string=None):
 | 
			
		||||
        if long_option in self.option_index:
 | 
			
		||||
            raise DistutilsGetoptError(
 | 
			
		||||
                "option conflict: already an option '%s'" % long_option
 | 
			
		||||
            )
 | 
			
		||||
        else:
 | 
			
		||||
            option = (long_option, short_option, help_string)
 | 
			
		||||
            self.option_table.append(option)
 | 
			
		||||
            self.option_index[long_option] = option
 | 
			
		||||
 | 
			
		||||
    def has_option(self, long_option):
 | 
			
		||||
        """Return true if the option table for this parser has an
 | 
			
		||||
        option with long name 'long_option'."""
 | 
			
		||||
        return long_option in self.option_index
 | 
			
		||||
 | 
			
		||||
    def get_attr_name(self, long_option):
 | 
			
		||||
        """Translate long option name 'long_option' to the form it
 | 
			
		||||
        has as an attribute of some object: ie., translate hyphens
 | 
			
		||||
        to underscores."""
 | 
			
		||||
        return long_option.translate(longopt_xlate)
 | 
			
		||||
 | 
			
		||||
    def _check_alias_dict(self, aliases, what):
 | 
			
		||||
        assert isinstance(aliases, dict)
 | 
			
		||||
        for (alias, opt) in aliases.items():
 | 
			
		||||
            if alias not in self.option_index:
 | 
			
		||||
                raise DistutilsGetoptError(
 | 
			
		||||
                    ("invalid %s '%s': " "option '%s' not defined")
 | 
			
		||||
                    % (what, alias, alias)
 | 
			
		||||
                )
 | 
			
		||||
            if opt not in self.option_index:
 | 
			
		||||
                raise DistutilsGetoptError(
 | 
			
		||||
                    ("invalid %s '%s': " "aliased option '%s' not defined")
 | 
			
		||||
                    % (what, alias, opt)
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
    def set_aliases(self, alias):
 | 
			
		||||
        """Set the aliases for this option parser."""
 | 
			
		||||
        self._check_alias_dict(alias, "alias")
 | 
			
		||||
        self.alias = alias
 | 
			
		||||
 | 
			
		||||
    def set_negative_aliases(self, negative_alias):
 | 
			
		||||
        """Set the negative aliases for this option parser.
 | 
			
		||||
        'negative_alias' should be a dictionary mapping option names to
 | 
			
		||||
        option names, both the key and value must already be defined
 | 
			
		||||
        in the option table."""
 | 
			
		||||
        self._check_alias_dict(negative_alias, "negative alias")
 | 
			
		||||
        self.negative_alias = negative_alias
 | 
			
		||||
 | 
			
		||||
    def _grok_option_table(self):  # noqa: C901
 | 
			
		||||
        """Populate the various data structures that keep tabs on the
 | 
			
		||||
        option table.  Called by 'getopt()' before it can do anything
 | 
			
		||||
        worthwhile.
 | 
			
		||||
        """
 | 
			
		||||
        self.long_opts = []
 | 
			
		||||
        self.short_opts = []
 | 
			
		||||
        self.short2long.clear()
 | 
			
		||||
        self.repeat = {}
 | 
			
		||||
 | 
			
		||||
        for option in self.option_table:
 | 
			
		||||
            if len(option) == 3:
 | 
			
		||||
                long, short, help = option
 | 
			
		||||
                repeat = 0
 | 
			
		||||
            elif len(option) == 4:
 | 
			
		||||
                long, short, help, repeat = option
 | 
			
		||||
            else:
 | 
			
		||||
                # the option table is part of the code, so simply
 | 
			
		||||
                # assert that it is correct
 | 
			
		||||
                raise ValueError("invalid option tuple: {!r}".format(option))
 | 
			
		||||
 | 
			
		||||
            # Type- and value-check the option names
 | 
			
		||||
            if not isinstance(long, str) or len(long) < 2:
 | 
			
		||||
                raise DistutilsGetoptError(
 | 
			
		||||
                    ("invalid long option '%s': " "must be a string of length >= 2")
 | 
			
		||||
                    % long
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
            if not ((short is None) or (isinstance(short, str) and len(short) == 1)):
 | 
			
		||||
                raise DistutilsGetoptError(
 | 
			
		||||
                    "invalid short option '%s': "
 | 
			
		||||
                    "must a single character or None" % short
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
            self.repeat[long] = repeat
 | 
			
		||||
            self.long_opts.append(long)
 | 
			
		||||
 | 
			
		||||
            if long[-1] == '=':  # option takes an argument?
 | 
			
		||||
                if short:
 | 
			
		||||
                    short = short + ':'
 | 
			
		||||
                long = long[0:-1]
 | 
			
		||||
                self.takes_arg[long] = 1
 | 
			
		||||
            else:
 | 
			
		||||
                # Is option is a "negative alias" for some other option (eg.
 | 
			
		||||
                # "quiet" == "!verbose")?
 | 
			
		||||
                alias_to = self.negative_alias.get(long)
 | 
			
		||||
                if alias_to is not None:
 | 
			
		||||
                    if self.takes_arg[alias_to]:
 | 
			
		||||
                        raise DistutilsGetoptError(
 | 
			
		||||
                            "invalid negative alias '%s': "
 | 
			
		||||
                            "aliased option '%s' takes a value" % (long, alias_to)
 | 
			
		||||
                        )
 | 
			
		||||
 | 
			
		||||
                    self.long_opts[-1] = long  # XXX redundant?!
 | 
			
		||||
                self.takes_arg[long] = 0
 | 
			
		||||
 | 
			
		||||
            # If this is an alias option, make sure its "takes arg" flag is
 | 
			
		||||
            # the same as the option it's aliased to.
 | 
			
		||||
            alias_to = self.alias.get(long)
 | 
			
		||||
            if alias_to is not None:
 | 
			
		||||
                if self.takes_arg[long] != self.takes_arg[alias_to]:
 | 
			
		||||
                    raise DistutilsGetoptError(
 | 
			
		||||
                        "invalid alias '%s': inconsistent with "
 | 
			
		||||
                        "aliased option '%s' (one of them takes a value, "
 | 
			
		||||
                        "the other doesn't" % (long, alias_to)
 | 
			
		||||
                    )
 | 
			
		||||
 | 
			
		||||
            # Now enforce some bondage on the long option name, so we can
 | 
			
		||||
            # later translate it to an attribute name on some object.  Have
 | 
			
		||||
            # to do this a bit late to make sure we've removed any trailing
 | 
			
		||||
            # '='.
 | 
			
		||||
            if not longopt_re.match(long):
 | 
			
		||||
                raise DistutilsGetoptError(
 | 
			
		||||
                    "invalid long option name '%s' "
 | 
			
		||||
                    "(must be letters, numbers, hyphens only" % long
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
            self.attr_name[long] = self.get_attr_name(long)
 | 
			
		||||
            if short:
 | 
			
		||||
                self.short_opts.append(short)
 | 
			
		||||
                self.short2long[short[0]] = long
 | 
			
		||||
 | 
			
		||||
    def getopt(self, args=None, object=None):  # noqa: C901
 | 
			
		||||
        """Parse command-line options in args. Store as attributes on object.
 | 
			
		||||
 | 
			
		||||
        If 'args' is None or not supplied, uses 'sys.argv[1:]'.  If
 | 
			
		||||
        'object' is None or not supplied, creates a new OptionDummy
 | 
			
		||||
        object, stores option values there, and returns a tuple (args,
 | 
			
		||||
        object).  If 'object' is supplied, it is modified in place and
 | 
			
		||||
        'getopt()' just returns 'args'; in both cases, the returned
 | 
			
		||||
        'args' is a modified copy of the passed-in 'args' list, which
 | 
			
		||||
        is left untouched.
 | 
			
		||||
        """
 | 
			
		||||
        if args is None:
 | 
			
		||||
            args = sys.argv[1:]
 | 
			
		||||
        if object is None:
 | 
			
		||||
            object = OptionDummy()
 | 
			
		||||
            created_object = True
 | 
			
		||||
        else:
 | 
			
		||||
            created_object = False
 | 
			
		||||
 | 
			
		||||
        self._grok_option_table()
 | 
			
		||||
 | 
			
		||||
        short_opts = ' '.join(self.short_opts)
 | 
			
		||||
        try:
 | 
			
		||||
            opts, args = getopt.getopt(args, short_opts, self.long_opts)
 | 
			
		||||
        except getopt.error as msg:
 | 
			
		||||
            raise DistutilsArgError(msg)
 | 
			
		||||
 | 
			
		||||
        for opt, val in opts:
 | 
			
		||||
            if len(opt) == 2 and opt[0] == '-':  # it's a short option
 | 
			
		||||
                opt = self.short2long[opt[1]]
 | 
			
		||||
            else:
 | 
			
		||||
                assert len(opt) > 2 and opt[:2] == '--'
 | 
			
		||||
                opt = opt[2:]
 | 
			
		||||
 | 
			
		||||
            alias = self.alias.get(opt)
 | 
			
		||||
            if alias:
 | 
			
		||||
                opt = alias
 | 
			
		||||
 | 
			
		||||
            if not self.takes_arg[opt]:  # boolean option?
 | 
			
		||||
                assert val == '', "boolean option can't have value"
 | 
			
		||||
                alias = self.negative_alias.get(opt)
 | 
			
		||||
                if alias:
 | 
			
		||||
                    opt = alias
 | 
			
		||||
                    val = 0
 | 
			
		||||
                else:
 | 
			
		||||
                    val = 1
 | 
			
		||||
 | 
			
		||||
            attr = self.attr_name[opt]
 | 
			
		||||
            # The only repeating option at the moment is 'verbose'.
 | 
			
		||||
            # It has a negative option -q quiet, which should set verbose = 0.
 | 
			
		||||
            if val and self.repeat.get(attr) is not None:
 | 
			
		||||
                val = getattr(object, attr, 0) + 1
 | 
			
		||||
            setattr(object, attr, val)
 | 
			
		||||
            self.option_order.append((opt, val))
 | 
			
		||||
 | 
			
		||||
        # for opts
 | 
			
		||||
        if created_object:
 | 
			
		||||
            return args, object
 | 
			
		||||
        else:
 | 
			
		||||
            return args
 | 
			
		||||
 | 
			
		||||
    def get_option_order(self):
 | 
			
		||||
        """Returns the list of (option, value) tuples processed by the
 | 
			
		||||
        previous run of 'getopt()'.  Raises RuntimeError if
 | 
			
		||||
        'getopt()' hasn't been called yet.
 | 
			
		||||
        """
 | 
			
		||||
        if self.option_order is None:
 | 
			
		||||
            raise RuntimeError("'getopt()' hasn't been called yet")
 | 
			
		||||
        else:
 | 
			
		||||
            return self.option_order
 | 
			
		||||
 | 
			
		||||
    def generate_help(self, header=None):  # noqa: C901
 | 
			
		||||
        """Generate help text (a list of strings, one per suggested line of
 | 
			
		||||
        output) from the option table for this FancyGetopt object.
 | 
			
		||||
        """
 | 
			
		||||
        # Blithely assume the option table is good: probably wouldn't call
 | 
			
		||||
        # 'generate_help()' unless you've already called 'getopt()'.
 | 
			
		||||
 | 
			
		||||
        # First pass: determine maximum length of long option names
 | 
			
		||||
        max_opt = 0
 | 
			
		||||
        for option in self.option_table:
 | 
			
		||||
            long = option[0]
 | 
			
		||||
            short = option[1]
 | 
			
		||||
            ell = len(long)
 | 
			
		||||
            if long[-1] == '=':
 | 
			
		||||
                ell = ell - 1
 | 
			
		||||
            if short is not None:
 | 
			
		||||
                ell = ell + 5  # " (-x)" where short == 'x'
 | 
			
		||||
            if ell > max_opt:
 | 
			
		||||
                max_opt = ell
 | 
			
		||||
 | 
			
		||||
        opt_width = max_opt + 2 + 2 + 2  # room for indent + dashes + gutter
 | 
			
		||||
 | 
			
		||||
        # Typical help block looks like this:
 | 
			
		||||
        #   --foo       controls foonabulation
 | 
			
		||||
        # Help block for longest option looks like this:
 | 
			
		||||
        #   --flimflam  set the flim-flam level
 | 
			
		||||
        # and with wrapped text:
 | 
			
		||||
        #   --flimflam  set the flim-flam level (must be between
 | 
			
		||||
        #               0 and 100, except on Tuesdays)
 | 
			
		||||
        # Options with short names will have the short name shown (but
 | 
			
		||||
        # it doesn't contribute to max_opt):
 | 
			
		||||
        #   --foo (-f)  controls foonabulation
 | 
			
		||||
        # If adding the short option would make the left column too wide,
 | 
			
		||||
        # we push the explanation off to the next line
 | 
			
		||||
        #   --flimflam (-l)
 | 
			
		||||
        #               set the flim-flam level
 | 
			
		||||
        # Important parameters:
 | 
			
		||||
        #   - 2 spaces before option block start lines
 | 
			
		||||
        #   - 2 dashes for each long option name
 | 
			
		||||
        #   - min. 2 spaces between option and explanation (gutter)
 | 
			
		||||
        #   - 5 characters (incl. space) for short option name
 | 
			
		||||
 | 
			
		||||
        # Now generate lines of help text.  (If 80 columns were good enough
 | 
			
		||||
        # for Jesus, then 78 columns are good enough for me!)
 | 
			
		||||
        line_width = 78
 | 
			
		||||
        text_width = line_width - opt_width
 | 
			
		||||
        big_indent = ' ' * opt_width
 | 
			
		||||
        if header:
 | 
			
		||||
            lines = [header]
 | 
			
		||||
        else:
 | 
			
		||||
            lines = ['Option summary:']
 | 
			
		||||
 | 
			
		||||
        for option in self.option_table:
 | 
			
		||||
            long, short, help = option[:3]
 | 
			
		||||
            text = wrap_text(help, text_width)
 | 
			
		||||
            if long[-1] == '=':
 | 
			
		||||
                long = long[0:-1]
 | 
			
		||||
 | 
			
		||||
            # Case 1: no short option at all (makes life easy)
 | 
			
		||||
            if short is None:
 | 
			
		||||
                if text:
 | 
			
		||||
                    lines.append("  --%-*s  %s" % (max_opt, long, text[0]))
 | 
			
		||||
                else:
 | 
			
		||||
                    lines.append("  --%-*s  " % (max_opt, long))
 | 
			
		||||
 | 
			
		||||
            # Case 2: we have a short option, so we have to include it
 | 
			
		||||
            # just after the long option
 | 
			
		||||
            else:
 | 
			
		||||
                opt_names = "{} (-{})".format(long, short)
 | 
			
		||||
                if text:
 | 
			
		||||
                    lines.append("  --%-*s  %s" % (max_opt, opt_names, text[0]))
 | 
			
		||||
                else:
 | 
			
		||||
                    lines.append("  --%-*s" % opt_names)
 | 
			
		||||
 | 
			
		||||
            for ell in text[1:]:
 | 
			
		||||
                lines.append(big_indent + ell)
 | 
			
		||||
        return lines
 | 
			
		||||
 | 
			
		||||
    def print_help(self, header=None, file=None):
 | 
			
		||||
        if file is None:
 | 
			
		||||
            file = sys.stdout
 | 
			
		||||
        for line in self.generate_help(header):
 | 
			
		||||
            file.write(line + "\n")
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def fancy_getopt(options, negative_opt, object, args):
 | 
			
		||||
    parser = FancyGetopt(options)
 | 
			
		||||
    parser.set_negative_aliases(negative_opt)
 | 
			
		||||
    return parser.getopt(args, object)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
WS_TRANS = {ord(_wschar): ' ' for _wschar in string.whitespace}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def wrap_text(text, width):
 | 
			
		||||
    """wrap_text(text : string, width : int) -> [string]
 | 
			
		||||
 | 
			
		||||
    Split 'text' into multiple lines of no more than 'width' characters
 | 
			
		||||
    each, and return the list of strings that results.
 | 
			
		||||
    """
 | 
			
		||||
    if text is None:
 | 
			
		||||
        return []
 | 
			
		||||
    if len(text) <= width:
 | 
			
		||||
        return [text]
 | 
			
		||||
 | 
			
		||||
    text = text.expandtabs()
 | 
			
		||||
    text = text.translate(WS_TRANS)
 | 
			
		||||
    chunks = re.split(r'( +|-+)', text)
 | 
			
		||||
    chunks = [ch for ch in chunks if ch]  # ' - ' results in empty strings
 | 
			
		||||
    lines = []
 | 
			
		||||
 | 
			
		||||
    while chunks:
 | 
			
		||||
        cur_line = []  # list of chunks (to-be-joined)
 | 
			
		||||
        cur_len = 0  # length of current line
 | 
			
		||||
 | 
			
		||||
        while chunks:
 | 
			
		||||
            ell = len(chunks[0])
 | 
			
		||||
            if cur_len + ell <= width:  # can squeeze (at least) this chunk in
 | 
			
		||||
                cur_line.append(chunks[0])
 | 
			
		||||
                del chunks[0]
 | 
			
		||||
                cur_len = cur_len + ell
 | 
			
		||||
            else:  # this line is full
 | 
			
		||||
                # drop last chunk if all space
 | 
			
		||||
                if cur_line and cur_line[-1][0] == ' ':
 | 
			
		||||
                    del cur_line[-1]
 | 
			
		||||
                break
 | 
			
		||||
 | 
			
		||||
        if chunks:  # any chunks left to process?
 | 
			
		||||
            # if the current line is still empty, then we had a single
 | 
			
		||||
            # chunk that's too big too fit on a line -- so we break
 | 
			
		||||
            # down and break it up at the line width
 | 
			
		||||
            if cur_len == 0:
 | 
			
		||||
                cur_line.append(chunks[0][0:width])
 | 
			
		||||
                chunks[0] = chunks[0][width:]
 | 
			
		||||
 | 
			
		||||
            # all-whitespace chunks at the end of a line can be discarded
 | 
			
		||||
            # (and we know from the re.split above that if a chunk has
 | 
			
		||||
            # *any* whitespace, it is *all* whitespace)
 | 
			
		||||
            if chunks[0][0] == ' ':
 | 
			
		||||
                del chunks[0]
 | 
			
		||||
 | 
			
		||||
        # and store this line in the list-of-all-lines -- as a single
 | 
			
		||||
        # string, of course!
 | 
			
		||||
        lines.append(''.join(cur_line))
 | 
			
		||||
 | 
			
		||||
    return lines
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def translate_longopt(opt):
 | 
			
		||||
    """Convert a long option name to a valid Python identifier by
 | 
			
		||||
    changing "-" to "_".
 | 
			
		||||
    """
 | 
			
		||||
    return opt.translate(longopt_xlate)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class OptionDummy:
 | 
			
		||||
    """Dummy class just used as a place to hold command-line option
 | 
			
		||||
    values as instance attributes."""
 | 
			
		||||
 | 
			
		||||
    def __init__(self, options=[]):
 | 
			
		||||
        """Create a new OptionDummy instance.  The attributes listed in
 | 
			
		||||
        'options' will be initialized to None."""
 | 
			
		||||
        for opt in options:
 | 
			
		||||
            setattr(self, opt, None)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
if __name__ == "__main__":
 | 
			
		||||
    text = """\
 | 
			
		||||
Tra-la-la, supercalifragilisticexpialidocious.
 | 
			
		||||
How *do* you spell that odd word, anyways?
 | 
			
		||||
(Someone ask Mary -- she'll know [or she'll
 | 
			
		||||
say, "How should I know?"].)"""
 | 
			
		||||
 | 
			
		||||
    for w in (10, 20, 30, 40):
 | 
			
		||||
        print("width: %d" % w)
 | 
			
		||||
        print("\n".join(wrap_text(text, w)))
 | 
			
		||||
        print()
 | 
			
		||||
@@ -0,0 +1,249 @@
 | 
			
		||||
"""distutils.file_util
 | 
			
		||||
 | 
			
		||||
Utility functions for operating on single files.
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
import os
 | 
			
		||||
from .errors import DistutilsFileError
 | 
			
		||||
from ._log import log
 | 
			
		||||
 | 
			
		||||
# for generating verbose output in 'copy_file()'
 | 
			
		||||
_copy_action = {None: 'copying', 'hard': 'hard linking', 'sym': 'symbolically linking'}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _copy_file_contents(src, dst, buffer_size=16 * 1024):  # noqa: C901
 | 
			
		||||
    """Copy the file 'src' to 'dst'; both must be filenames.  Any error
 | 
			
		||||
    opening either file, reading from 'src', or writing to 'dst', raises
 | 
			
		||||
    DistutilsFileError.  Data is read/written in chunks of 'buffer_size'
 | 
			
		||||
    bytes (default 16k).  No attempt is made to handle anything apart from
 | 
			
		||||
    regular files.
 | 
			
		||||
    """
 | 
			
		||||
    # Stolen from shutil module in the standard library, but with
 | 
			
		||||
    # custom error-handling added.
 | 
			
		||||
    fsrc = None
 | 
			
		||||
    fdst = None
 | 
			
		||||
    try:
 | 
			
		||||
        try:
 | 
			
		||||
            fsrc = open(src, 'rb')
 | 
			
		||||
        except OSError as e:
 | 
			
		||||
            raise DistutilsFileError("could not open '{}': {}".format(src, e.strerror))
 | 
			
		||||
 | 
			
		||||
        if os.path.exists(dst):
 | 
			
		||||
            try:
 | 
			
		||||
                os.unlink(dst)
 | 
			
		||||
            except OSError as e:
 | 
			
		||||
                raise DistutilsFileError(
 | 
			
		||||
                    "could not delete '{}': {}".format(dst, e.strerror)
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
        try:
 | 
			
		||||
            fdst = open(dst, 'wb')
 | 
			
		||||
        except OSError as e:
 | 
			
		||||
            raise DistutilsFileError(
 | 
			
		||||
                "could not create '{}': {}".format(dst, e.strerror)
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        while True:
 | 
			
		||||
            try:
 | 
			
		||||
                buf = fsrc.read(buffer_size)
 | 
			
		||||
            except OSError as e:
 | 
			
		||||
                raise DistutilsFileError(
 | 
			
		||||
                    "could not read from '{}': {}".format(src, e.strerror)
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
            if not buf:
 | 
			
		||||
                break
 | 
			
		||||
 | 
			
		||||
            try:
 | 
			
		||||
                fdst.write(buf)
 | 
			
		||||
            except OSError as e:
 | 
			
		||||
                raise DistutilsFileError(
 | 
			
		||||
                    "could not write to '{}': {}".format(dst, e.strerror)
 | 
			
		||||
                )
 | 
			
		||||
    finally:
 | 
			
		||||
        if fdst:
 | 
			
		||||
            fdst.close()
 | 
			
		||||
        if fsrc:
 | 
			
		||||
            fsrc.close()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def copy_file(  # noqa: C901
 | 
			
		||||
    src,
 | 
			
		||||
    dst,
 | 
			
		||||
    preserve_mode=1,
 | 
			
		||||
    preserve_times=1,
 | 
			
		||||
    update=0,
 | 
			
		||||
    link=None,
 | 
			
		||||
    verbose=1,
 | 
			
		||||
    dry_run=0,
 | 
			
		||||
):
 | 
			
		||||
    """Copy a file 'src' to 'dst'.  If 'dst' is a directory, then 'src' is
 | 
			
		||||
    copied there with the same name; otherwise, it must be a filename.  (If
 | 
			
		||||
    the file exists, it will be ruthlessly clobbered.)  If 'preserve_mode'
 | 
			
		||||
    is true (the default), the file's mode (type and permission bits, or
 | 
			
		||||
    whatever is analogous on the current platform) is copied.  If
 | 
			
		||||
    'preserve_times' is true (the default), the last-modified and
 | 
			
		||||
    last-access times are copied as well.  If 'update' is true, 'src' will
 | 
			
		||||
    only be copied if 'dst' does not exist, or if 'dst' does exist but is
 | 
			
		||||
    older than 'src'.
 | 
			
		||||
 | 
			
		||||
    'link' allows you to make hard links (os.link) or symbolic links
 | 
			
		||||
    (os.symlink) instead of copying: set it to "hard" or "sym"; if it is
 | 
			
		||||
    None (the default), files are copied.  Don't set 'link' on systems that
 | 
			
		||||
    don't support it: 'copy_file()' doesn't check if hard or symbolic
 | 
			
		||||
    linking is available. If hardlink fails, falls back to
 | 
			
		||||
    _copy_file_contents().
 | 
			
		||||
 | 
			
		||||
    Under Mac OS, uses the native file copy function in macostools; on
 | 
			
		||||
    other systems, uses '_copy_file_contents()' to copy file contents.
 | 
			
		||||
 | 
			
		||||
    Return a tuple (dest_name, copied): 'dest_name' is the actual name of
 | 
			
		||||
    the output file, and 'copied' is true if the file was copied (or would
 | 
			
		||||
    have been copied, if 'dry_run' true).
 | 
			
		||||
    """
 | 
			
		||||
    # XXX if the destination file already exists, we clobber it if
 | 
			
		||||
    # copying, but blow up if linking.  Hmmm.  And I don't know what
 | 
			
		||||
    # macostools.copyfile() does.  Should definitely be consistent, and
 | 
			
		||||
    # should probably blow up if destination exists and we would be
 | 
			
		||||
    # changing it (ie. it's not already a hard/soft link to src OR
 | 
			
		||||
    # (not update) and (src newer than dst).
 | 
			
		||||
 | 
			
		||||
    from distutils.dep_util import newer
 | 
			
		||||
    from stat import ST_ATIME, ST_MTIME, ST_MODE, S_IMODE
 | 
			
		||||
 | 
			
		||||
    if not os.path.isfile(src):
 | 
			
		||||
        raise DistutilsFileError(
 | 
			
		||||
            "can't copy '%s': doesn't exist or not a regular file" % src
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    if os.path.isdir(dst):
 | 
			
		||||
        dir = dst
 | 
			
		||||
        dst = os.path.join(dst, os.path.basename(src))
 | 
			
		||||
    else:
 | 
			
		||||
        dir = os.path.dirname(dst)
 | 
			
		||||
 | 
			
		||||
    if update and not newer(src, dst):
 | 
			
		||||
        if verbose >= 1:
 | 
			
		||||
            log.debug("not copying %s (output up-to-date)", src)
 | 
			
		||||
        return (dst, 0)
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        action = _copy_action[link]
 | 
			
		||||
    except KeyError:
 | 
			
		||||
        raise ValueError("invalid value '%s' for 'link' argument" % link)
 | 
			
		||||
 | 
			
		||||
    if verbose >= 1:
 | 
			
		||||
        if os.path.basename(dst) == os.path.basename(src):
 | 
			
		||||
            log.info("%s %s -> %s", action, src, dir)
 | 
			
		||||
        else:
 | 
			
		||||
            log.info("%s %s -> %s", action, src, dst)
 | 
			
		||||
 | 
			
		||||
    if dry_run:
 | 
			
		||||
        return (dst, 1)
 | 
			
		||||
 | 
			
		||||
    # If linking (hard or symbolic), use the appropriate system call
 | 
			
		||||
    # (Unix only, of course, but that's the caller's responsibility)
 | 
			
		||||
    elif link == 'hard':
 | 
			
		||||
        if not (os.path.exists(dst) and os.path.samefile(src, dst)):
 | 
			
		||||
            try:
 | 
			
		||||
                os.link(src, dst)
 | 
			
		||||
                return (dst, 1)
 | 
			
		||||
            except OSError:
 | 
			
		||||
                # If hard linking fails, fall back on copying file
 | 
			
		||||
                # (some special filesystems don't support hard linking
 | 
			
		||||
                #  even under Unix, see issue #8876).
 | 
			
		||||
                pass
 | 
			
		||||
    elif link == 'sym':
 | 
			
		||||
        if not (os.path.exists(dst) and os.path.samefile(src, dst)):
 | 
			
		||||
            os.symlink(src, dst)
 | 
			
		||||
            return (dst, 1)
 | 
			
		||||
 | 
			
		||||
    # Otherwise (non-Mac, not linking), copy the file contents and
 | 
			
		||||
    # (optionally) copy the times and mode.
 | 
			
		||||
    _copy_file_contents(src, dst)
 | 
			
		||||
    if preserve_mode or preserve_times:
 | 
			
		||||
        st = os.stat(src)
 | 
			
		||||
 | 
			
		||||
        # According to David Ascher <da@ski.org>, utime() should be done
 | 
			
		||||
        # before chmod() (at least under NT).
 | 
			
		||||
        if preserve_times:
 | 
			
		||||
            os.utime(dst, (st[ST_ATIME], st[ST_MTIME]))
 | 
			
		||||
        if preserve_mode:
 | 
			
		||||
            os.chmod(dst, S_IMODE(st[ST_MODE]))
 | 
			
		||||
 | 
			
		||||
    return (dst, 1)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# XXX I suspect this is Unix-specific -- need porting help!
 | 
			
		||||
def move_file(src, dst, verbose=1, dry_run=0):  # noqa: C901
 | 
			
		||||
 | 
			
		||||
    """Move a file 'src' to 'dst'.  If 'dst' is a directory, the file will
 | 
			
		||||
    be moved into it with the same name; otherwise, 'src' is just renamed
 | 
			
		||||
    to 'dst'.  Return the new full name of the file.
 | 
			
		||||
 | 
			
		||||
    Handles cross-device moves on Unix using 'copy_file()'.  What about
 | 
			
		||||
    other systems???
 | 
			
		||||
    """
 | 
			
		||||
    from os.path import exists, isfile, isdir, basename, dirname
 | 
			
		||||
    import errno
 | 
			
		||||
 | 
			
		||||
    if verbose >= 1:
 | 
			
		||||
        log.info("moving %s -> %s", src, dst)
 | 
			
		||||
 | 
			
		||||
    if dry_run:
 | 
			
		||||
        return dst
 | 
			
		||||
 | 
			
		||||
    if not isfile(src):
 | 
			
		||||
        raise DistutilsFileError("can't move '%s': not a regular file" % src)
 | 
			
		||||
 | 
			
		||||
    if isdir(dst):
 | 
			
		||||
        dst = os.path.join(dst, basename(src))
 | 
			
		||||
    elif exists(dst):
 | 
			
		||||
        raise DistutilsFileError(
 | 
			
		||||
            "can't move '{}': destination '{}' already exists".format(src, dst)
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    if not isdir(dirname(dst)):
 | 
			
		||||
        raise DistutilsFileError(
 | 
			
		||||
            "can't move '{}': destination '{}' not a valid path".format(src, dst)
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    copy_it = False
 | 
			
		||||
    try:
 | 
			
		||||
        os.rename(src, dst)
 | 
			
		||||
    except OSError as e:
 | 
			
		||||
        (num, msg) = e.args
 | 
			
		||||
        if num == errno.EXDEV:
 | 
			
		||||
            copy_it = True
 | 
			
		||||
        else:
 | 
			
		||||
            raise DistutilsFileError(
 | 
			
		||||
                "couldn't move '{}' to '{}': {}".format(src, dst, msg)
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
    if copy_it:
 | 
			
		||||
        copy_file(src, dst, verbose=verbose)
 | 
			
		||||
        try:
 | 
			
		||||
            os.unlink(src)
 | 
			
		||||
        except OSError as e:
 | 
			
		||||
            (num, msg) = e.args
 | 
			
		||||
            try:
 | 
			
		||||
                os.unlink(dst)
 | 
			
		||||
            except OSError:
 | 
			
		||||
                pass
 | 
			
		||||
            raise DistutilsFileError(
 | 
			
		||||
                "couldn't move '%s' to '%s' by copy/delete: "
 | 
			
		||||
                "delete '%s' failed: %s" % (src, dst, src, msg)
 | 
			
		||||
            )
 | 
			
		||||
    return dst
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def write_file(filename, contents):
 | 
			
		||||
    """Create a file with the specified name and write 'contents' (a
 | 
			
		||||
    sequence of strings without line terminators) to it.
 | 
			
		||||
    """
 | 
			
		||||
    f = open(filename, "w")
 | 
			
		||||
    try:
 | 
			
		||||
        for line in contents:
 | 
			
		||||
            f.write(line + "\n")
 | 
			
		||||
    finally:
 | 
			
		||||
        f.close()
 | 
			
		||||
@@ -0,0 +1,371 @@
 | 
			
		||||
"""distutils.filelist
 | 
			
		||||
 | 
			
		||||
Provides the FileList class, used for poking about the filesystem
 | 
			
		||||
and building lists of files.
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
import os
 | 
			
		||||
import re
 | 
			
		||||
import fnmatch
 | 
			
		||||
import functools
 | 
			
		||||
 | 
			
		||||
from .util import convert_path
 | 
			
		||||
from .errors import DistutilsTemplateError, DistutilsInternalError
 | 
			
		||||
from ._log import log
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class FileList:
 | 
			
		||||
    """A list of files built by on exploring the filesystem and filtered by
 | 
			
		||||
    applying various patterns to what we find there.
 | 
			
		||||
 | 
			
		||||
    Instance attributes:
 | 
			
		||||
      dir
 | 
			
		||||
        directory from which files will be taken -- only used if
 | 
			
		||||
        'allfiles' not supplied to constructor
 | 
			
		||||
      files
 | 
			
		||||
        list of filenames currently being built/filtered/manipulated
 | 
			
		||||
      allfiles
 | 
			
		||||
        complete list of files under consideration (ie. without any
 | 
			
		||||
        filtering applied)
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    def __init__(self, warn=None, debug_print=None):
 | 
			
		||||
        # ignore argument to FileList, but keep them for backwards
 | 
			
		||||
        # compatibility
 | 
			
		||||
        self.allfiles = None
 | 
			
		||||
        self.files = []
 | 
			
		||||
 | 
			
		||||
    def set_allfiles(self, allfiles):
 | 
			
		||||
        self.allfiles = allfiles
 | 
			
		||||
 | 
			
		||||
    def findall(self, dir=os.curdir):
 | 
			
		||||
        self.allfiles = findall(dir)
 | 
			
		||||
 | 
			
		||||
    def debug_print(self, msg):
 | 
			
		||||
        """Print 'msg' to stdout if the global DEBUG (taken from the
 | 
			
		||||
        DISTUTILS_DEBUG environment variable) flag is true.
 | 
			
		||||
        """
 | 
			
		||||
        from distutils.debug import DEBUG
 | 
			
		||||
 | 
			
		||||
        if DEBUG:
 | 
			
		||||
            print(msg)
 | 
			
		||||
 | 
			
		||||
    # Collection methods
 | 
			
		||||
 | 
			
		||||
    def append(self, item):
 | 
			
		||||
        self.files.append(item)
 | 
			
		||||
 | 
			
		||||
    def extend(self, items):
 | 
			
		||||
        self.files.extend(items)
 | 
			
		||||
 | 
			
		||||
    def sort(self):
 | 
			
		||||
        # Not a strict lexical sort!
 | 
			
		||||
        sortable_files = sorted(map(os.path.split, self.files))
 | 
			
		||||
        self.files = []
 | 
			
		||||
        for sort_tuple in sortable_files:
 | 
			
		||||
            self.files.append(os.path.join(*sort_tuple))
 | 
			
		||||
 | 
			
		||||
    # Other miscellaneous utility methods
 | 
			
		||||
 | 
			
		||||
    def remove_duplicates(self):
 | 
			
		||||
        # Assumes list has been sorted!
 | 
			
		||||
        for i in range(len(self.files) - 1, 0, -1):
 | 
			
		||||
            if self.files[i] == self.files[i - 1]:
 | 
			
		||||
                del self.files[i]
 | 
			
		||||
 | 
			
		||||
    # "File template" methods
 | 
			
		||||
 | 
			
		||||
    def _parse_template_line(self, line):
 | 
			
		||||
        words = line.split()
 | 
			
		||||
        action = words[0]
 | 
			
		||||
 | 
			
		||||
        patterns = dir = dir_pattern = None
 | 
			
		||||
 | 
			
		||||
        if action in ('include', 'exclude', 'global-include', 'global-exclude'):
 | 
			
		||||
            if len(words) < 2:
 | 
			
		||||
                raise DistutilsTemplateError(
 | 
			
		||||
                    "'%s' expects <pattern1> <pattern2> ..." % action
 | 
			
		||||
                )
 | 
			
		||||
            patterns = [convert_path(w) for w in words[1:]]
 | 
			
		||||
        elif action in ('recursive-include', 'recursive-exclude'):
 | 
			
		||||
            if len(words) < 3:
 | 
			
		||||
                raise DistutilsTemplateError(
 | 
			
		||||
                    "'%s' expects <dir> <pattern1> <pattern2> ..." % action
 | 
			
		||||
                )
 | 
			
		||||
            dir = convert_path(words[1])
 | 
			
		||||
            patterns = [convert_path(w) for w in words[2:]]
 | 
			
		||||
        elif action in ('graft', 'prune'):
 | 
			
		||||
            if len(words) != 2:
 | 
			
		||||
                raise DistutilsTemplateError(
 | 
			
		||||
                    "'%s' expects a single <dir_pattern>" % action
 | 
			
		||||
                )
 | 
			
		||||
            dir_pattern = convert_path(words[1])
 | 
			
		||||
        else:
 | 
			
		||||
            raise DistutilsTemplateError("unknown action '%s'" % action)
 | 
			
		||||
 | 
			
		||||
        return (action, patterns, dir, dir_pattern)
 | 
			
		||||
 | 
			
		||||
    def process_template_line(self, line):  # noqa: C901
 | 
			
		||||
        # Parse the line: split it up, make sure the right number of words
 | 
			
		||||
        # is there, and return the relevant words.  'action' is always
 | 
			
		||||
        # defined: it's the first word of the line.  Which of the other
 | 
			
		||||
        # three are defined depends on the action; it'll be either
 | 
			
		||||
        # patterns, (dir and patterns), or (dir_pattern).
 | 
			
		||||
        (action, patterns, dir, dir_pattern) = self._parse_template_line(line)
 | 
			
		||||
 | 
			
		||||
        # OK, now we know that the action is valid and we have the
 | 
			
		||||
        # right number of words on the line for that action -- so we
 | 
			
		||||
        # can proceed with minimal error-checking.
 | 
			
		||||
        if action == 'include':
 | 
			
		||||
            self.debug_print("include " + ' '.join(patterns))
 | 
			
		||||
            for pattern in patterns:
 | 
			
		||||
                if not self.include_pattern(pattern, anchor=1):
 | 
			
		||||
                    log.warning("warning: no files found matching '%s'", pattern)
 | 
			
		||||
 | 
			
		||||
        elif action == 'exclude':
 | 
			
		||||
            self.debug_print("exclude " + ' '.join(patterns))
 | 
			
		||||
            for pattern in patterns:
 | 
			
		||||
                if not self.exclude_pattern(pattern, anchor=1):
 | 
			
		||||
                    log.warning(
 | 
			
		||||
                        (
 | 
			
		||||
                            "warning: no previously-included files "
 | 
			
		||||
                            "found matching '%s'"
 | 
			
		||||
                        ),
 | 
			
		||||
                        pattern,
 | 
			
		||||
                    )
 | 
			
		||||
 | 
			
		||||
        elif action == 'global-include':
 | 
			
		||||
            self.debug_print("global-include " + ' '.join(patterns))
 | 
			
		||||
            for pattern in patterns:
 | 
			
		||||
                if not self.include_pattern(pattern, anchor=0):
 | 
			
		||||
                    log.warning(
 | 
			
		||||
                        (
 | 
			
		||||
                            "warning: no files found matching '%s' "
 | 
			
		||||
                            "anywhere in distribution"
 | 
			
		||||
                        ),
 | 
			
		||||
                        pattern,
 | 
			
		||||
                    )
 | 
			
		||||
 | 
			
		||||
        elif action == 'global-exclude':
 | 
			
		||||
            self.debug_print("global-exclude " + ' '.join(patterns))
 | 
			
		||||
            for pattern in patterns:
 | 
			
		||||
                if not self.exclude_pattern(pattern, anchor=0):
 | 
			
		||||
                    log.warning(
 | 
			
		||||
                        (
 | 
			
		||||
                            "warning: no previously-included files matching "
 | 
			
		||||
                            "'%s' found anywhere in distribution"
 | 
			
		||||
                        ),
 | 
			
		||||
                        pattern,
 | 
			
		||||
                    )
 | 
			
		||||
 | 
			
		||||
        elif action == 'recursive-include':
 | 
			
		||||
            self.debug_print("recursive-include {} {}".format(dir, ' '.join(patterns)))
 | 
			
		||||
            for pattern in patterns:
 | 
			
		||||
                if not self.include_pattern(pattern, prefix=dir):
 | 
			
		||||
                    msg = (
 | 
			
		||||
                        "warning: no files found matching '%s' " "under directory '%s'"
 | 
			
		||||
                    )
 | 
			
		||||
                    log.warning(msg, pattern, dir)
 | 
			
		||||
 | 
			
		||||
        elif action == 'recursive-exclude':
 | 
			
		||||
            self.debug_print("recursive-exclude {} {}".format(dir, ' '.join(patterns)))
 | 
			
		||||
            for pattern in patterns:
 | 
			
		||||
                if not self.exclude_pattern(pattern, prefix=dir):
 | 
			
		||||
                    log.warning(
 | 
			
		||||
                        (
 | 
			
		||||
                            "warning: no previously-included files matching "
 | 
			
		||||
                            "'%s' found under directory '%s'"
 | 
			
		||||
                        ),
 | 
			
		||||
                        pattern,
 | 
			
		||||
                        dir,
 | 
			
		||||
                    )
 | 
			
		||||
 | 
			
		||||
        elif action == 'graft':
 | 
			
		||||
            self.debug_print("graft " + dir_pattern)
 | 
			
		||||
            if not self.include_pattern(None, prefix=dir_pattern):
 | 
			
		||||
                log.warning("warning: no directories found matching '%s'", dir_pattern)
 | 
			
		||||
 | 
			
		||||
        elif action == 'prune':
 | 
			
		||||
            self.debug_print("prune " + dir_pattern)
 | 
			
		||||
            if not self.exclude_pattern(None, prefix=dir_pattern):
 | 
			
		||||
                log.warning(
 | 
			
		||||
                    ("no previously-included directories found " "matching '%s'"),
 | 
			
		||||
                    dir_pattern,
 | 
			
		||||
                )
 | 
			
		||||
        else:
 | 
			
		||||
            raise DistutilsInternalError(
 | 
			
		||||
                "this cannot happen: invalid action '%s'" % action
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
    # Filtering/selection methods
 | 
			
		||||
 | 
			
		||||
    def include_pattern(self, pattern, anchor=1, prefix=None, is_regex=0):
 | 
			
		||||
        """Select strings (presumably filenames) from 'self.files' that
 | 
			
		||||
        match 'pattern', a Unix-style wildcard (glob) pattern.  Patterns
 | 
			
		||||
        are not quite the same as implemented by the 'fnmatch' module: '*'
 | 
			
		||||
        and '?'  match non-special characters, where "special" is platform-
 | 
			
		||||
        dependent: slash on Unix; colon, slash, and backslash on
 | 
			
		||||
        DOS/Windows; and colon on Mac OS.
 | 
			
		||||
 | 
			
		||||
        If 'anchor' is true (the default), then the pattern match is more
 | 
			
		||||
        stringent: "*.py" will match "foo.py" but not "foo/bar.py".  If
 | 
			
		||||
        'anchor' is false, both of these will match.
 | 
			
		||||
 | 
			
		||||
        If 'prefix' is supplied, then only filenames starting with 'prefix'
 | 
			
		||||
        (itself a pattern) and ending with 'pattern', with anything in between
 | 
			
		||||
        them, will match.  'anchor' is ignored in this case.
 | 
			
		||||
 | 
			
		||||
        If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and
 | 
			
		||||
        'pattern' is assumed to be either a string containing a regex or a
 | 
			
		||||
        regex object -- no translation is done, the regex is just compiled
 | 
			
		||||
        and used as-is.
 | 
			
		||||
 | 
			
		||||
        Selected strings will be added to self.files.
 | 
			
		||||
 | 
			
		||||
        Return True if files are found, False otherwise.
 | 
			
		||||
        """
 | 
			
		||||
        # XXX docstring lying about what the special chars are?
 | 
			
		||||
        files_found = False
 | 
			
		||||
        pattern_re = translate_pattern(pattern, anchor, prefix, is_regex)
 | 
			
		||||
        self.debug_print("include_pattern: applying regex r'%s'" % pattern_re.pattern)
 | 
			
		||||
 | 
			
		||||
        # delayed loading of allfiles list
 | 
			
		||||
        if self.allfiles is None:
 | 
			
		||||
            self.findall()
 | 
			
		||||
 | 
			
		||||
        for name in self.allfiles:
 | 
			
		||||
            if pattern_re.search(name):
 | 
			
		||||
                self.debug_print(" adding " + name)
 | 
			
		||||
                self.files.append(name)
 | 
			
		||||
                files_found = True
 | 
			
		||||
        return files_found
 | 
			
		||||
 | 
			
		||||
    def exclude_pattern(self, pattern, anchor=1, prefix=None, is_regex=0):
 | 
			
		||||
        """Remove strings (presumably filenames) from 'files' that match
 | 
			
		||||
        'pattern'.  Other parameters are the same as for
 | 
			
		||||
        'include_pattern()', above.
 | 
			
		||||
        The list 'self.files' is modified in place.
 | 
			
		||||
        Return True if files are found, False otherwise.
 | 
			
		||||
        """
 | 
			
		||||
        files_found = False
 | 
			
		||||
        pattern_re = translate_pattern(pattern, anchor, prefix, is_regex)
 | 
			
		||||
        self.debug_print("exclude_pattern: applying regex r'%s'" % pattern_re.pattern)
 | 
			
		||||
        for i in range(len(self.files) - 1, -1, -1):
 | 
			
		||||
            if pattern_re.search(self.files[i]):
 | 
			
		||||
                self.debug_print(" removing " + self.files[i])
 | 
			
		||||
                del self.files[i]
 | 
			
		||||
                files_found = True
 | 
			
		||||
        return files_found
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Utility functions
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _find_all_simple(path):
 | 
			
		||||
    """
 | 
			
		||||
    Find all files under 'path'
 | 
			
		||||
    """
 | 
			
		||||
    all_unique = _UniqueDirs.filter(os.walk(path, followlinks=True))
 | 
			
		||||
    results = (
 | 
			
		||||
        os.path.join(base, file) for base, dirs, files in all_unique for file in files
 | 
			
		||||
    )
 | 
			
		||||
    return filter(os.path.isfile, results)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class _UniqueDirs(set):
 | 
			
		||||
    """
 | 
			
		||||
    Exclude previously-seen dirs from walk results,
 | 
			
		||||
    avoiding infinite recursion.
 | 
			
		||||
    Ref https://bugs.python.org/issue44497.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    def __call__(self, walk_item):
 | 
			
		||||
        """
 | 
			
		||||
        Given an item from an os.walk result, determine
 | 
			
		||||
        if the item represents a unique dir for this instance
 | 
			
		||||
        and if not, prevent further traversal.
 | 
			
		||||
        """
 | 
			
		||||
        base, dirs, files = walk_item
 | 
			
		||||
        stat = os.stat(base)
 | 
			
		||||
        candidate = stat.st_dev, stat.st_ino
 | 
			
		||||
        found = candidate in self
 | 
			
		||||
        if found:
 | 
			
		||||
            del dirs[:]
 | 
			
		||||
        self.add(candidate)
 | 
			
		||||
        return not found
 | 
			
		||||
 | 
			
		||||
    @classmethod
 | 
			
		||||
    def filter(cls, items):
 | 
			
		||||
        return filter(cls(), items)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def findall(dir=os.curdir):
 | 
			
		||||
    """
 | 
			
		||||
    Find all files under 'dir' and return the list of full filenames.
 | 
			
		||||
    Unless dir is '.', return full filenames with dir prepended.
 | 
			
		||||
    """
 | 
			
		||||
    files = _find_all_simple(dir)
 | 
			
		||||
    if dir == os.curdir:
 | 
			
		||||
        make_rel = functools.partial(os.path.relpath, start=dir)
 | 
			
		||||
        files = map(make_rel, files)
 | 
			
		||||
    return list(files)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def glob_to_re(pattern):
 | 
			
		||||
    """Translate a shell-like glob pattern to a regular expression; return
 | 
			
		||||
    a string containing the regex.  Differs from 'fnmatch.translate()' in
 | 
			
		||||
    that '*' does not match "special characters" (which are
 | 
			
		||||
    platform-specific).
 | 
			
		||||
    """
 | 
			
		||||
    pattern_re = fnmatch.translate(pattern)
 | 
			
		||||
 | 
			
		||||
    # '?' and '*' in the glob pattern become '.' and '.*' in the RE, which
 | 
			
		||||
    # IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix,
 | 
			
		||||
    # and by extension they shouldn't match such "special characters" under
 | 
			
		||||
    # any OS.  So change all non-escaped dots in the RE to match any
 | 
			
		||||
    # character except the special characters (currently: just os.sep).
 | 
			
		||||
    sep = os.sep
 | 
			
		||||
    if os.sep == '\\':
 | 
			
		||||
        # we're using a regex to manipulate a regex, so we need
 | 
			
		||||
        # to escape the backslash twice
 | 
			
		||||
        sep = r'\\\\'
 | 
			
		||||
    escaped = r'\1[^%s]' % sep
 | 
			
		||||
    pattern_re = re.sub(r'((?<!\\)(\\\\)*)\.', escaped, pattern_re)
 | 
			
		||||
    return pattern_re
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def translate_pattern(pattern, anchor=1, prefix=None, is_regex=0):
 | 
			
		||||
    """Translate a shell-like wildcard pattern to a compiled regular
 | 
			
		||||
    expression.  Return the compiled regex.  If 'is_regex' true,
 | 
			
		||||
    then 'pattern' is directly compiled to a regex (if it's a string)
 | 
			
		||||
    or just returned as-is (assumes it's a regex object).
 | 
			
		||||
    """
 | 
			
		||||
    if is_regex:
 | 
			
		||||
        if isinstance(pattern, str):
 | 
			
		||||
            return re.compile(pattern)
 | 
			
		||||
        else:
 | 
			
		||||
            return pattern
 | 
			
		||||
 | 
			
		||||
    # ditch start and end characters
 | 
			
		||||
    start, _, end = glob_to_re('_').partition('_')
 | 
			
		||||
 | 
			
		||||
    if pattern:
 | 
			
		||||
        pattern_re = glob_to_re(pattern)
 | 
			
		||||
        assert pattern_re.startswith(start) and pattern_re.endswith(end)
 | 
			
		||||
    else:
 | 
			
		||||
        pattern_re = ''
 | 
			
		||||
 | 
			
		||||
    if prefix is not None:
 | 
			
		||||
        prefix_re = glob_to_re(prefix)
 | 
			
		||||
        assert prefix_re.startswith(start) and prefix_re.endswith(end)
 | 
			
		||||
        prefix_re = prefix_re[len(start) : len(prefix_re) - len(end)]
 | 
			
		||||
        sep = os.sep
 | 
			
		||||
        if os.sep == '\\':
 | 
			
		||||
            sep = r'\\'
 | 
			
		||||
        pattern_re = pattern_re[len(start) : len(pattern_re) - len(end)]
 | 
			
		||||
        pattern_re = r'{}\A{}{}.*{}{}'.format(start, prefix_re, sep, pattern_re, end)
 | 
			
		||||
    else:  # no prefix -- respect anchor flag
 | 
			
		||||
        if anchor:
 | 
			
		||||
            pattern_re = r'{}\A{}'.format(start, pattern_re[len(start) :])
 | 
			
		||||
 | 
			
		||||
    return re.compile(pattern_re)
 | 
			
		||||
@@ -0,0 +1,57 @@
 | 
			
		||||
"""
 | 
			
		||||
A simple log mechanism styled after PEP 282.
 | 
			
		||||
 | 
			
		||||
Retained for compatibility and should not be used.
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
import logging
 | 
			
		||||
import warnings
 | 
			
		||||
 | 
			
		||||
from ._log import log as _global_log
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
DEBUG = logging.DEBUG
 | 
			
		||||
INFO = logging.INFO
 | 
			
		||||
WARN = logging.WARN
 | 
			
		||||
ERROR = logging.ERROR
 | 
			
		||||
FATAL = logging.FATAL
 | 
			
		||||
 | 
			
		||||
log = _global_log.log
 | 
			
		||||
debug = _global_log.debug
 | 
			
		||||
info = _global_log.info
 | 
			
		||||
warn = _global_log.warning
 | 
			
		||||
error = _global_log.error
 | 
			
		||||
fatal = _global_log.fatal
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def set_threshold(level):
 | 
			
		||||
    orig = _global_log.level
 | 
			
		||||
    _global_log.setLevel(level)
 | 
			
		||||
    return orig
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def set_verbosity(v):
 | 
			
		||||
    if v <= 0:
 | 
			
		||||
        set_threshold(logging.WARN)
 | 
			
		||||
    elif v == 1:
 | 
			
		||||
        set_threshold(logging.INFO)
 | 
			
		||||
    elif v >= 2:
 | 
			
		||||
        set_threshold(logging.DEBUG)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Log(logging.Logger):
 | 
			
		||||
    """distutils.log.Log is deprecated, please use an alternative from `logging`."""
 | 
			
		||||
 | 
			
		||||
    def __init__(self, threshold=WARN):
 | 
			
		||||
        warnings.warn(Log.__doc__)  # avoid DeprecationWarning to ensure warn is shown
 | 
			
		||||
        super().__init__(__name__, level=threshold)
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def threshold(self):
 | 
			
		||||
        return self.level
 | 
			
		||||
 | 
			
		||||
    @threshold.setter
 | 
			
		||||
    def threshold(self, level):
 | 
			
		||||
        self.setLevel(level)
 | 
			
		||||
 | 
			
		||||
    warn = logging.Logger.warning
 | 
			
		||||
@@ -0,0 +1,832 @@
 | 
			
		||||
"""distutils.msvc9compiler
 | 
			
		||||
 | 
			
		||||
Contains MSVCCompiler, an implementation of the abstract CCompiler class
 | 
			
		||||
for the Microsoft Visual Studio 2008.
 | 
			
		||||
 | 
			
		||||
The module is compatible with VS 2005 and VS 2008. You can find legacy support
 | 
			
		||||
for older versions of VS in distutils.msvccompiler.
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
# Written by Perry Stoll
 | 
			
		||||
# hacked by Robin Becker and Thomas Heller to do a better job of
 | 
			
		||||
#   finding DevStudio (through the registry)
 | 
			
		||||
# ported to VS2005 and VS 2008 by Christian Heimes
 | 
			
		||||
 | 
			
		||||
import os
 | 
			
		||||
import subprocess
 | 
			
		||||
import sys
 | 
			
		||||
import re
 | 
			
		||||
import warnings
 | 
			
		||||
 | 
			
		||||
from .errors import (
 | 
			
		||||
    DistutilsExecError,
 | 
			
		||||
    DistutilsPlatformError,
 | 
			
		||||
    CompileError,
 | 
			
		||||
    LibError,
 | 
			
		||||
    LinkError,
 | 
			
		||||
)
 | 
			
		||||
from .ccompiler import CCompiler, gen_lib_options
 | 
			
		||||
from ._log import log
 | 
			
		||||
from .util import get_platform
 | 
			
		||||
 | 
			
		||||
import winreg
 | 
			
		||||
 | 
			
		||||
warnings.warn(
 | 
			
		||||
    "msvc9compiler is deprecated and slated to be removed "
 | 
			
		||||
    "in the future. Please discontinue use or file an issue "
 | 
			
		||||
    "with pypa/distutils describing your use case.",
 | 
			
		||||
    DeprecationWarning,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
RegOpenKeyEx = winreg.OpenKeyEx
 | 
			
		||||
RegEnumKey = winreg.EnumKey
 | 
			
		||||
RegEnumValue = winreg.EnumValue
 | 
			
		||||
RegError = winreg.error
 | 
			
		||||
 | 
			
		||||
HKEYS = (
 | 
			
		||||
    winreg.HKEY_USERS,
 | 
			
		||||
    winreg.HKEY_CURRENT_USER,
 | 
			
		||||
    winreg.HKEY_LOCAL_MACHINE,
 | 
			
		||||
    winreg.HKEY_CLASSES_ROOT,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
NATIVE_WIN64 = sys.platform == 'win32' and sys.maxsize > 2**32
 | 
			
		||||
if NATIVE_WIN64:
 | 
			
		||||
    # Visual C++ is a 32-bit application, so we need to look in
 | 
			
		||||
    # the corresponding registry branch, if we're running a
 | 
			
		||||
    # 64-bit Python on Win64
 | 
			
		||||
    VS_BASE = r"Software\Wow6432Node\Microsoft\VisualStudio\%0.1f"
 | 
			
		||||
    WINSDK_BASE = r"Software\Wow6432Node\Microsoft\Microsoft SDKs\Windows"
 | 
			
		||||
    NET_BASE = r"Software\Wow6432Node\Microsoft\.NETFramework"
 | 
			
		||||
else:
 | 
			
		||||
    VS_BASE = r"Software\Microsoft\VisualStudio\%0.1f"
 | 
			
		||||
    WINSDK_BASE = r"Software\Microsoft\Microsoft SDKs\Windows"
 | 
			
		||||
    NET_BASE = r"Software\Microsoft\.NETFramework"
 | 
			
		||||
 | 
			
		||||
# A map keyed by get_platform() return values to values accepted by
 | 
			
		||||
# 'vcvarsall.bat'.  Note a cross-compile may combine these (eg, 'x86_amd64' is
 | 
			
		||||
# the param to cross-compile on x86 targeting amd64.)
 | 
			
		||||
PLAT_TO_VCVARS = {
 | 
			
		||||
    'win32': 'x86',
 | 
			
		||||
    'win-amd64': 'amd64',
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Reg:
 | 
			
		||||
    """Helper class to read values from the registry"""
 | 
			
		||||
 | 
			
		||||
    def get_value(cls, path, key):
 | 
			
		||||
        for base in HKEYS:
 | 
			
		||||
            d = cls.read_values(base, path)
 | 
			
		||||
            if d and key in d:
 | 
			
		||||
                return d[key]
 | 
			
		||||
        raise KeyError(key)
 | 
			
		||||
 | 
			
		||||
    get_value = classmethod(get_value)
 | 
			
		||||
 | 
			
		||||
    def read_keys(cls, base, key):
 | 
			
		||||
        """Return list of registry keys."""
 | 
			
		||||
        try:
 | 
			
		||||
            handle = RegOpenKeyEx(base, key)
 | 
			
		||||
        except RegError:
 | 
			
		||||
            return None
 | 
			
		||||
        L = []
 | 
			
		||||
        i = 0
 | 
			
		||||
        while True:
 | 
			
		||||
            try:
 | 
			
		||||
                k = RegEnumKey(handle, i)
 | 
			
		||||
            except RegError:
 | 
			
		||||
                break
 | 
			
		||||
            L.append(k)
 | 
			
		||||
            i += 1
 | 
			
		||||
        return L
 | 
			
		||||
 | 
			
		||||
    read_keys = classmethod(read_keys)
 | 
			
		||||
 | 
			
		||||
    def read_values(cls, base, key):
 | 
			
		||||
        """Return dict of registry keys and values.
 | 
			
		||||
 | 
			
		||||
        All names are converted to lowercase.
 | 
			
		||||
        """
 | 
			
		||||
        try:
 | 
			
		||||
            handle = RegOpenKeyEx(base, key)
 | 
			
		||||
        except RegError:
 | 
			
		||||
            return None
 | 
			
		||||
        d = {}
 | 
			
		||||
        i = 0
 | 
			
		||||
        while True:
 | 
			
		||||
            try:
 | 
			
		||||
                name, value, type = RegEnumValue(handle, i)
 | 
			
		||||
            except RegError:
 | 
			
		||||
                break
 | 
			
		||||
            name = name.lower()
 | 
			
		||||
            d[cls.convert_mbcs(name)] = cls.convert_mbcs(value)
 | 
			
		||||
            i += 1
 | 
			
		||||
        return d
 | 
			
		||||
 | 
			
		||||
    read_values = classmethod(read_values)
 | 
			
		||||
 | 
			
		||||
    def convert_mbcs(s):
 | 
			
		||||
        dec = getattr(s, "decode", None)
 | 
			
		||||
        if dec is not None:
 | 
			
		||||
            try:
 | 
			
		||||
                s = dec("mbcs")
 | 
			
		||||
            except UnicodeError:
 | 
			
		||||
                pass
 | 
			
		||||
        return s
 | 
			
		||||
 | 
			
		||||
    convert_mbcs = staticmethod(convert_mbcs)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class MacroExpander:
 | 
			
		||||
    def __init__(self, version):
 | 
			
		||||
        self.macros = {}
 | 
			
		||||
        self.vsbase = VS_BASE % version
 | 
			
		||||
        self.load_macros(version)
 | 
			
		||||
 | 
			
		||||
    def set_macro(self, macro, path, key):
 | 
			
		||||
        self.macros["$(%s)" % macro] = Reg.get_value(path, key)
 | 
			
		||||
 | 
			
		||||
    def load_macros(self, version):
 | 
			
		||||
        self.set_macro("VCInstallDir", self.vsbase + r"\Setup\VC", "productdir")
 | 
			
		||||
        self.set_macro("VSInstallDir", self.vsbase + r"\Setup\VS", "productdir")
 | 
			
		||||
        self.set_macro("FrameworkDir", NET_BASE, "installroot")
 | 
			
		||||
        try:
 | 
			
		||||
            if version >= 8.0:
 | 
			
		||||
                self.set_macro("FrameworkSDKDir", NET_BASE, "sdkinstallrootv2.0")
 | 
			
		||||
            else:
 | 
			
		||||
                raise KeyError("sdkinstallrootv2.0")
 | 
			
		||||
        except KeyError:
 | 
			
		||||
            raise DistutilsPlatformError(
 | 
			
		||||
                """Python was built with Visual Studio 2008;
 | 
			
		||||
extensions must be built with a compiler than can generate compatible binaries.
 | 
			
		||||
Visual Studio 2008 was not found on this system. If you have Cygwin installed,
 | 
			
		||||
you can try compiling with MingW32, by passing "-c mingw32" to setup.py."""
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        if version >= 9.0:
 | 
			
		||||
            self.set_macro("FrameworkVersion", self.vsbase, "clr version")
 | 
			
		||||
            self.set_macro("WindowsSdkDir", WINSDK_BASE, "currentinstallfolder")
 | 
			
		||||
        else:
 | 
			
		||||
            p = r"Software\Microsoft\NET Framework Setup\Product"
 | 
			
		||||
            for base in HKEYS:
 | 
			
		||||
                try:
 | 
			
		||||
                    h = RegOpenKeyEx(base, p)
 | 
			
		||||
                except RegError:
 | 
			
		||||
                    continue
 | 
			
		||||
                key = RegEnumKey(h, 0)
 | 
			
		||||
                d = Reg.get_value(base, r"{}\{}".format(p, key))
 | 
			
		||||
                self.macros["$(FrameworkVersion)"] = d["version"]
 | 
			
		||||
 | 
			
		||||
    def sub(self, s):
 | 
			
		||||
        for k, v in self.macros.items():
 | 
			
		||||
            s = s.replace(k, v)
 | 
			
		||||
        return s
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_build_version():
 | 
			
		||||
    """Return the version of MSVC that was used to build Python.
 | 
			
		||||
 | 
			
		||||
    For Python 2.3 and up, the version number is included in
 | 
			
		||||
    sys.version.  For earlier versions, assume the compiler is MSVC 6.
 | 
			
		||||
    """
 | 
			
		||||
    prefix = "MSC v."
 | 
			
		||||
    i = sys.version.find(prefix)
 | 
			
		||||
    if i == -1:
 | 
			
		||||
        return 6
 | 
			
		||||
    i = i + len(prefix)
 | 
			
		||||
    s, rest = sys.version[i:].split(" ", 1)
 | 
			
		||||
    majorVersion = int(s[:-2]) - 6
 | 
			
		||||
    if majorVersion >= 13:
 | 
			
		||||
        # v13 was skipped and should be v14
 | 
			
		||||
        majorVersion += 1
 | 
			
		||||
    minorVersion = int(s[2:3]) / 10.0
 | 
			
		||||
    # I don't think paths are affected by minor version in version 6
 | 
			
		||||
    if majorVersion == 6:
 | 
			
		||||
        minorVersion = 0
 | 
			
		||||
    if majorVersion >= 6:
 | 
			
		||||
        return majorVersion + minorVersion
 | 
			
		||||
    # else we don't know what version of the compiler this is
 | 
			
		||||
    return None
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def normalize_and_reduce_paths(paths):
 | 
			
		||||
    """Return a list of normalized paths with duplicates removed.
 | 
			
		||||
 | 
			
		||||
    The current order of paths is maintained.
 | 
			
		||||
    """
 | 
			
		||||
    # Paths are normalized so things like:  /a and /a/ aren't both preserved.
 | 
			
		||||
    reduced_paths = []
 | 
			
		||||
    for p in paths:
 | 
			
		||||
        np = os.path.normpath(p)
 | 
			
		||||
        # XXX(nnorwitz): O(n**2), if reduced_paths gets long perhaps use a set.
 | 
			
		||||
        if np not in reduced_paths:
 | 
			
		||||
            reduced_paths.append(np)
 | 
			
		||||
    return reduced_paths
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def removeDuplicates(variable):
 | 
			
		||||
    """Remove duplicate values of an environment variable."""
 | 
			
		||||
    oldList = variable.split(os.pathsep)
 | 
			
		||||
    newList = []
 | 
			
		||||
    for i in oldList:
 | 
			
		||||
        if i not in newList:
 | 
			
		||||
            newList.append(i)
 | 
			
		||||
    newVariable = os.pathsep.join(newList)
 | 
			
		||||
    return newVariable
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def find_vcvarsall(version):
 | 
			
		||||
    """Find the vcvarsall.bat file
 | 
			
		||||
 | 
			
		||||
    At first it tries to find the productdir of VS 2008 in the registry. If
 | 
			
		||||
    that fails it falls back to the VS90COMNTOOLS env var.
 | 
			
		||||
    """
 | 
			
		||||
    vsbase = VS_BASE % version
 | 
			
		||||
    try:
 | 
			
		||||
        productdir = Reg.get_value(r"%s\Setup\VC" % vsbase, "productdir")
 | 
			
		||||
    except KeyError:
 | 
			
		||||
        log.debug("Unable to find productdir in registry")
 | 
			
		||||
        productdir = None
 | 
			
		||||
 | 
			
		||||
    if not productdir or not os.path.isdir(productdir):
 | 
			
		||||
        toolskey = "VS%0.f0COMNTOOLS" % version
 | 
			
		||||
        toolsdir = os.environ.get(toolskey, None)
 | 
			
		||||
 | 
			
		||||
        if toolsdir and os.path.isdir(toolsdir):
 | 
			
		||||
            productdir = os.path.join(toolsdir, os.pardir, os.pardir, "VC")
 | 
			
		||||
            productdir = os.path.abspath(productdir)
 | 
			
		||||
            if not os.path.isdir(productdir):
 | 
			
		||||
                log.debug("%s is not a valid directory" % productdir)
 | 
			
		||||
                return None
 | 
			
		||||
        else:
 | 
			
		||||
            log.debug("Env var %s is not set or invalid" % toolskey)
 | 
			
		||||
    if not productdir:
 | 
			
		||||
        log.debug("No productdir found")
 | 
			
		||||
        return None
 | 
			
		||||
    vcvarsall = os.path.join(productdir, "vcvarsall.bat")
 | 
			
		||||
    if os.path.isfile(vcvarsall):
 | 
			
		||||
        return vcvarsall
 | 
			
		||||
    log.debug("Unable to find vcvarsall.bat")
 | 
			
		||||
    return None
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def query_vcvarsall(version, arch="x86"):
 | 
			
		||||
    """Launch vcvarsall.bat and read the settings from its environment"""
 | 
			
		||||
    vcvarsall = find_vcvarsall(version)
 | 
			
		||||
    interesting = {"include", "lib", "libpath", "path"}
 | 
			
		||||
    result = {}
 | 
			
		||||
 | 
			
		||||
    if vcvarsall is None:
 | 
			
		||||
        raise DistutilsPlatformError("Unable to find vcvarsall.bat")
 | 
			
		||||
    log.debug("Calling 'vcvarsall.bat %s' (version=%s)", arch, version)
 | 
			
		||||
    popen = subprocess.Popen(
 | 
			
		||||
        '"{}" {} & set'.format(vcvarsall, arch),
 | 
			
		||||
        stdout=subprocess.PIPE,
 | 
			
		||||
        stderr=subprocess.PIPE,
 | 
			
		||||
    )
 | 
			
		||||
    try:
 | 
			
		||||
        stdout, stderr = popen.communicate()
 | 
			
		||||
        if popen.wait() != 0:
 | 
			
		||||
            raise DistutilsPlatformError(stderr.decode("mbcs"))
 | 
			
		||||
 | 
			
		||||
        stdout = stdout.decode("mbcs")
 | 
			
		||||
        for line in stdout.split("\n"):
 | 
			
		||||
            line = Reg.convert_mbcs(line)
 | 
			
		||||
            if '=' not in line:
 | 
			
		||||
                continue
 | 
			
		||||
            line = line.strip()
 | 
			
		||||
            key, value = line.split('=', 1)
 | 
			
		||||
            key = key.lower()
 | 
			
		||||
            if key in interesting:
 | 
			
		||||
                if value.endswith(os.pathsep):
 | 
			
		||||
                    value = value[:-1]
 | 
			
		||||
                result[key] = removeDuplicates(value)
 | 
			
		||||
 | 
			
		||||
    finally:
 | 
			
		||||
        popen.stdout.close()
 | 
			
		||||
        popen.stderr.close()
 | 
			
		||||
 | 
			
		||||
    if len(result) != len(interesting):
 | 
			
		||||
        raise ValueError(str(list(result.keys())))
 | 
			
		||||
 | 
			
		||||
    return result
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# More globals
 | 
			
		||||
VERSION = get_build_version()
 | 
			
		||||
# MACROS = MacroExpander(VERSION)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class MSVCCompiler(CCompiler):
 | 
			
		||||
    """Concrete class that implements an interface to Microsoft Visual C++,
 | 
			
		||||
    as defined by the CCompiler abstract class."""
 | 
			
		||||
 | 
			
		||||
    compiler_type = 'msvc'
 | 
			
		||||
 | 
			
		||||
    # Just set this so CCompiler's constructor doesn't barf.  We currently
 | 
			
		||||
    # don't use the 'set_executables()' bureaucracy provided by CCompiler,
 | 
			
		||||
    # as it really isn't necessary for this sort of single-compiler class.
 | 
			
		||||
    # Would be nice to have a consistent interface with UnixCCompiler,
 | 
			
		||||
    # though, so it's worth thinking about.
 | 
			
		||||
    executables = {}
 | 
			
		||||
 | 
			
		||||
    # Private class data (need to distinguish C from C++ source for compiler)
 | 
			
		||||
    _c_extensions = ['.c']
 | 
			
		||||
    _cpp_extensions = ['.cc', '.cpp', '.cxx']
 | 
			
		||||
    _rc_extensions = ['.rc']
 | 
			
		||||
    _mc_extensions = ['.mc']
 | 
			
		||||
 | 
			
		||||
    # Needed for the filename generation methods provided by the
 | 
			
		||||
    # base class, CCompiler.
 | 
			
		||||
    src_extensions = _c_extensions + _cpp_extensions + _rc_extensions + _mc_extensions
 | 
			
		||||
    res_extension = '.res'
 | 
			
		||||
    obj_extension = '.obj'
 | 
			
		||||
    static_lib_extension = '.lib'
 | 
			
		||||
    shared_lib_extension = '.dll'
 | 
			
		||||
    static_lib_format = shared_lib_format = '%s%s'
 | 
			
		||||
    exe_extension = '.exe'
 | 
			
		||||
 | 
			
		||||
    def __init__(self, verbose=0, dry_run=0, force=0):
 | 
			
		||||
        super().__init__(verbose, dry_run, force)
 | 
			
		||||
        self.__version = VERSION
 | 
			
		||||
        self.__root = r"Software\Microsoft\VisualStudio"
 | 
			
		||||
        # self.__macros = MACROS
 | 
			
		||||
        self.__paths = []
 | 
			
		||||
        # target platform (.plat_name is consistent with 'bdist')
 | 
			
		||||
        self.plat_name = None
 | 
			
		||||
        self.__arch = None  # deprecated name
 | 
			
		||||
        self.initialized = False
 | 
			
		||||
 | 
			
		||||
    def initialize(self, plat_name=None):  # noqa: C901
 | 
			
		||||
        # multi-init means we would need to check platform same each time...
 | 
			
		||||
        assert not self.initialized, "don't init multiple times"
 | 
			
		||||
        if self.__version < 8.0:
 | 
			
		||||
            raise DistutilsPlatformError(
 | 
			
		||||
                "VC %0.1f is not supported by this module" % self.__version
 | 
			
		||||
            )
 | 
			
		||||
        if plat_name is None:
 | 
			
		||||
            plat_name = get_platform()
 | 
			
		||||
        # sanity check for platforms to prevent obscure errors later.
 | 
			
		||||
        ok_plats = 'win32', 'win-amd64'
 | 
			
		||||
        if plat_name not in ok_plats:
 | 
			
		||||
            raise DistutilsPlatformError(
 | 
			
		||||
                "--plat-name must be one of {}".format(ok_plats)
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        if (
 | 
			
		||||
            "DISTUTILS_USE_SDK" in os.environ
 | 
			
		||||
            and "MSSdk" in os.environ
 | 
			
		||||
            and self.find_exe("cl.exe")
 | 
			
		||||
        ):
 | 
			
		||||
            # Assume that the SDK set up everything alright; don't try to be
 | 
			
		||||
            # smarter
 | 
			
		||||
            self.cc = "cl.exe"
 | 
			
		||||
            self.linker = "link.exe"
 | 
			
		||||
            self.lib = "lib.exe"
 | 
			
		||||
            self.rc = "rc.exe"
 | 
			
		||||
            self.mc = "mc.exe"
 | 
			
		||||
        else:
 | 
			
		||||
            # On x86, 'vcvars32.bat amd64' creates an env that doesn't work;
 | 
			
		||||
            # to cross compile, you use 'x86_amd64'.
 | 
			
		||||
            # On AMD64, 'vcvars32.bat amd64' is a native build env; to cross
 | 
			
		||||
            # compile use 'x86' (ie, it runs the x86 compiler directly)
 | 
			
		||||
            if plat_name == get_platform() or plat_name == 'win32':
 | 
			
		||||
                # native build or cross-compile to win32
 | 
			
		||||
                plat_spec = PLAT_TO_VCVARS[plat_name]
 | 
			
		||||
            else:
 | 
			
		||||
                # cross compile from win32 -> some 64bit
 | 
			
		||||
                plat_spec = (
 | 
			
		||||
                    PLAT_TO_VCVARS[get_platform()] + '_' + PLAT_TO_VCVARS[plat_name]
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
            vc_env = query_vcvarsall(VERSION, plat_spec)
 | 
			
		||||
 | 
			
		||||
            self.__paths = vc_env['path'].split(os.pathsep)
 | 
			
		||||
            os.environ['lib'] = vc_env['lib']
 | 
			
		||||
            os.environ['include'] = vc_env['include']
 | 
			
		||||
 | 
			
		||||
            if len(self.__paths) == 0:
 | 
			
		||||
                raise DistutilsPlatformError(
 | 
			
		||||
                    "Python was built with %s, "
 | 
			
		||||
                    "and extensions need to be built with the same "
 | 
			
		||||
                    "version of the compiler, but it isn't installed." % self.__product
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
            self.cc = self.find_exe("cl.exe")
 | 
			
		||||
            self.linker = self.find_exe("link.exe")
 | 
			
		||||
            self.lib = self.find_exe("lib.exe")
 | 
			
		||||
            self.rc = self.find_exe("rc.exe")  # resource compiler
 | 
			
		||||
            self.mc = self.find_exe("mc.exe")  # message compiler
 | 
			
		||||
            # self.set_path_env_var('lib')
 | 
			
		||||
            # self.set_path_env_var('include')
 | 
			
		||||
 | 
			
		||||
        # extend the MSVC path with the current path
 | 
			
		||||
        try:
 | 
			
		||||
            for p in os.environ['path'].split(';'):
 | 
			
		||||
                self.__paths.append(p)
 | 
			
		||||
        except KeyError:
 | 
			
		||||
            pass
 | 
			
		||||
        self.__paths = normalize_and_reduce_paths(self.__paths)
 | 
			
		||||
        os.environ['path'] = ";".join(self.__paths)
 | 
			
		||||
 | 
			
		||||
        self.preprocess_options = None
 | 
			
		||||
        if self.__arch == "x86":
 | 
			
		||||
            self.compile_options = ['/nologo', '/O2', '/MD', '/W3', '/DNDEBUG']
 | 
			
		||||
            self.compile_options_debug = [
 | 
			
		||||
                '/nologo',
 | 
			
		||||
                '/Od',
 | 
			
		||||
                '/MDd',
 | 
			
		||||
                '/W3',
 | 
			
		||||
                '/Z7',
 | 
			
		||||
                '/D_DEBUG',
 | 
			
		||||
            ]
 | 
			
		||||
        else:
 | 
			
		||||
            # Win64
 | 
			
		||||
            self.compile_options = ['/nologo', '/O2', '/MD', '/W3', '/GS-', '/DNDEBUG']
 | 
			
		||||
            self.compile_options_debug = [
 | 
			
		||||
                '/nologo',
 | 
			
		||||
                '/Od',
 | 
			
		||||
                '/MDd',
 | 
			
		||||
                '/W3',
 | 
			
		||||
                '/GS-',
 | 
			
		||||
                '/Z7',
 | 
			
		||||
                '/D_DEBUG',
 | 
			
		||||
            ]
 | 
			
		||||
 | 
			
		||||
        self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO']
 | 
			
		||||
        if self.__version >= 7:
 | 
			
		||||
            self.ldflags_shared_debug = ['/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG']
 | 
			
		||||
        self.ldflags_static = ['/nologo']
 | 
			
		||||
 | 
			
		||||
        self.initialized = True
 | 
			
		||||
 | 
			
		||||
    # -- Worker methods ------------------------------------------------
 | 
			
		||||
 | 
			
		||||
    def object_filenames(self, source_filenames, strip_dir=0, output_dir=''):
 | 
			
		||||
        # Copied from ccompiler.py, extended to return .res as 'object'-file
 | 
			
		||||
        # for .rc input file
 | 
			
		||||
        if output_dir is None:
 | 
			
		||||
            output_dir = ''
 | 
			
		||||
        obj_names = []
 | 
			
		||||
        for src_name in source_filenames:
 | 
			
		||||
            (base, ext) = os.path.splitext(src_name)
 | 
			
		||||
            base = os.path.splitdrive(base)[1]  # Chop off the drive
 | 
			
		||||
            base = base[os.path.isabs(base) :]  # If abs, chop off leading /
 | 
			
		||||
            if ext not in self.src_extensions:
 | 
			
		||||
                # Better to raise an exception instead of silently continuing
 | 
			
		||||
                # and later complain about sources and targets having
 | 
			
		||||
                # different lengths
 | 
			
		||||
                raise CompileError("Don't know how to compile %s" % src_name)
 | 
			
		||||
            if strip_dir:
 | 
			
		||||
                base = os.path.basename(base)
 | 
			
		||||
            if ext in self._rc_extensions:
 | 
			
		||||
                obj_names.append(os.path.join(output_dir, base + self.res_extension))
 | 
			
		||||
            elif ext in self._mc_extensions:
 | 
			
		||||
                obj_names.append(os.path.join(output_dir, base + self.res_extension))
 | 
			
		||||
            else:
 | 
			
		||||
                obj_names.append(os.path.join(output_dir, base + self.obj_extension))
 | 
			
		||||
        return obj_names
 | 
			
		||||
 | 
			
		||||
    def compile(  # noqa: C901
 | 
			
		||||
        self,
 | 
			
		||||
        sources,
 | 
			
		||||
        output_dir=None,
 | 
			
		||||
        macros=None,
 | 
			
		||||
        include_dirs=None,
 | 
			
		||||
        debug=0,
 | 
			
		||||
        extra_preargs=None,
 | 
			
		||||
        extra_postargs=None,
 | 
			
		||||
        depends=None,
 | 
			
		||||
    ):
 | 
			
		||||
 | 
			
		||||
        if not self.initialized:
 | 
			
		||||
            self.initialize()
 | 
			
		||||
        compile_info = self._setup_compile(
 | 
			
		||||
            output_dir, macros, include_dirs, sources, depends, extra_postargs
 | 
			
		||||
        )
 | 
			
		||||
        macros, objects, extra_postargs, pp_opts, build = compile_info
 | 
			
		||||
 | 
			
		||||
        compile_opts = extra_preargs or []
 | 
			
		||||
        compile_opts.append('/c')
 | 
			
		||||
        if debug:
 | 
			
		||||
            compile_opts.extend(self.compile_options_debug)
 | 
			
		||||
        else:
 | 
			
		||||
            compile_opts.extend(self.compile_options)
 | 
			
		||||
 | 
			
		||||
        for obj in objects:
 | 
			
		||||
            try:
 | 
			
		||||
                src, ext = build[obj]
 | 
			
		||||
            except KeyError:
 | 
			
		||||
                continue
 | 
			
		||||
            if debug:
 | 
			
		||||
                # pass the full pathname to MSVC in debug mode,
 | 
			
		||||
                # this allows the debugger to find the source file
 | 
			
		||||
                # without asking the user to browse for it
 | 
			
		||||
                src = os.path.abspath(src)
 | 
			
		||||
 | 
			
		||||
            if ext in self._c_extensions:
 | 
			
		||||
                input_opt = "/Tc" + src
 | 
			
		||||
            elif ext in self._cpp_extensions:
 | 
			
		||||
                input_opt = "/Tp" + src
 | 
			
		||||
            elif ext in self._rc_extensions:
 | 
			
		||||
                # compile .RC to .RES file
 | 
			
		||||
                input_opt = src
 | 
			
		||||
                output_opt = "/fo" + obj
 | 
			
		||||
                try:
 | 
			
		||||
                    self.spawn([self.rc] + pp_opts + [output_opt] + [input_opt])
 | 
			
		||||
                except DistutilsExecError as msg:
 | 
			
		||||
                    raise CompileError(msg)
 | 
			
		||||
                continue
 | 
			
		||||
            elif ext in self._mc_extensions:
 | 
			
		||||
                # Compile .MC to .RC file to .RES file.
 | 
			
		||||
                #   * '-h dir' specifies the directory for the
 | 
			
		||||
                #     generated include file
 | 
			
		||||
                #   * '-r dir' specifies the target directory of the
 | 
			
		||||
                #     generated RC file and the binary message resource
 | 
			
		||||
                #     it includes
 | 
			
		||||
                #
 | 
			
		||||
                # For now (since there are no options to change this),
 | 
			
		||||
                # we use the source-directory for the include file and
 | 
			
		||||
                # the build directory for the RC file and message
 | 
			
		||||
                # resources. This works at least for win32all.
 | 
			
		||||
                h_dir = os.path.dirname(src)
 | 
			
		||||
                rc_dir = os.path.dirname(obj)
 | 
			
		||||
                try:
 | 
			
		||||
                    # first compile .MC to .RC and .H file
 | 
			
		||||
                    self.spawn([self.mc] + ['-h', h_dir, '-r', rc_dir] + [src])
 | 
			
		||||
                    base, _ = os.path.splitext(os.path.basename(src))
 | 
			
		||||
                    rc_file = os.path.join(rc_dir, base + '.rc')
 | 
			
		||||
                    # then compile .RC to .RES file
 | 
			
		||||
                    self.spawn([self.rc] + ["/fo" + obj] + [rc_file])
 | 
			
		||||
 | 
			
		||||
                except DistutilsExecError as msg:
 | 
			
		||||
                    raise CompileError(msg)
 | 
			
		||||
                continue
 | 
			
		||||
            else:
 | 
			
		||||
                # how to handle this file?
 | 
			
		||||
                raise CompileError(
 | 
			
		||||
                    "Don't know how to compile {} to {}".format(src, obj)
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
            output_opt = "/Fo" + obj
 | 
			
		||||
            try:
 | 
			
		||||
                self.spawn(
 | 
			
		||||
                    [self.cc]
 | 
			
		||||
                    + compile_opts
 | 
			
		||||
                    + pp_opts
 | 
			
		||||
                    + [input_opt, output_opt]
 | 
			
		||||
                    + extra_postargs
 | 
			
		||||
                )
 | 
			
		||||
            except DistutilsExecError as msg:
 | 
			
		||||
                raise CompileError(msg)
 | 
			
		||||
 | 
			
		||||
        return objects
 | 
			
		||||
 | 
			
		||||
    def create_static_lib(
 | 
			
		||||
        self, objects, output_libname, output_dir=None, debug=0, target_lang=None
 | 
			
		||||
    ):
 | 
			
		||||
 | 
			
		||||
        if not self.initialized:
 | 
			
		||||
            self.initialize()
 | 
			
		||||
        (objects, output_dir) = self._fix_object_args(objects, output_dir)
 | 
			
		||||
        output_filename = self.library_filename(output_libname, output_dir=output_dir)
 | 
			
		||||
 | 
			
		||||
        if self._need_link(objects, output_filename):
 | 
			
		||||
            lib_args = objects + ['/OUT:' + output_filename]
 | 
			
		||||
            if debug:
 | 
			
		||||
                pass  # XXX what goes here?
 | 
			
		||||
            try:
 | 
			
		||||
                self.spawn([self.lib] + lib_args)
 | 
			
		||||
            except DistutilsExecError as msg:
 | 
			
		||||
                raise LibError(msg)
 | 
			
		||||
        else:
 | 
			
		||||
            log.debug("skipping %s (up-to-date)", output_filename)
 | 
			
		||||
 | 
			
		||||
    def link(  # noqa: C901
 | 
			
		||||
        self,
 | 
			
		||||
        target_desc,
 | 
			
		||||
        objects,
 | 
			
		||||
        output_filename,
 | 
			
		||||
        output_dir=None,
 | 
			
		||||
        libraries=None,
 | 
			
		||||
        library_dirs=None,
 | 
			
		||||
        runtime_library_dirs=None,
 | 
			
		||||
        export_symbols=None,
 | 
			
		||||
        debug=0,
 | 
			
		||||
        extra_preargs=None,
 | 
			
		||||
        extra_postargs=None,
 | 
			
		||||
        build_temp=None,
 | 
			
		||||
        target_lang=None,
 | 
			
		||||
    ):
 | 
			
		||||
 | 
			
		||||
        if not self.initialized:
 | 
			
		||||
            self.initialize()
 | 
			
		||||
        (objects, output_dir) = self._fix_object_args(objects, output_dir)
 | 
			
		||||
        fixed_args = self._fix_lib_args(libraries, library_dirs, runtime_library_dirs)
 | 
			
		||||
        (libraries, library_dirs, runtime_library_dirs) = fixed_args
 | 
			
		||||
 | 
			
		||||
        if runtime_library_dirs:
 | 
			
		||||
            self.warn(
 | 
			
		||||
                "I don't know what to do with 'runtime_library_dirs': "
 | 
			
		||||
                + str(runtime_library_dirs)
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs, libraries)
 | 
			
		||||
        if output_dir is not None:
 | 
			
		||||
            output_filename = os.path.join(output_dir, output_filename)
 | 
			
		||||
 | 
			
		||||
        if self._need_link(objects, output_filename):
 | 
			
		||||
            if target_desc == CCompiler.EXECUTABLE:
 | 
			
		||||
                if debug:
 | 
			
		||||
                    ldflags = self.ldflags_shared_debug[1:]
 | 
			
		||||
                else:
 | 
			
		||||
                    ldflags = self.ldflags_shared[1:]
 | 
			
		||||
            else:
 | 
			
		||||
                if debug:
 | 
			
		||||
                    ldflags = self.ldflags_shared_debug
 | 
			
		||||
                else:
 | 
			
		||||
                    ldflags = self.ldflags_shared
 | 
			
		||||
 | 
			
		||||
            export_opts = []
 | 
			
		||||
            for sym in export_symbols or []:
 | 
			
		||||
                export_opts.append("/EXPORT:" + sym)
 | 
			
		||||
 | 
			
		||||
            ld_args = (
 | 
			
		||||
                ldflags + lib_opts + export_opts + objects + ['/OUT:' + output_filename]
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
            # The MSVC linker generates .lib and .exp files, which cannot be
 | 
			
		||||
            # suppressed by any linker switches. The .lib files may even be
 | 
			
		||||
            # needed! Make sure they are generated in the temporary build
 | 
			
		||||
            # directory. Since they have different names for debug and release
 | 
			
		||||
            # builds, they can go into the same directory.
 | 
			
		||||
            build_temp = os.path.dirname(objects[0])
 | 
			
		||||
            if export_symbols is not None:
 | 
			
		||||
                (dll_name, dll_ext) = os.path.splitext(
 | 
			
		||||
                    os.path.basename(output_filename)
 | 
			
		||||
                )
 | 
			
		||||
                implib_file = os.path.join(build_temp, self.library_filename(dll_name))
 | 
			
		||||
                ld_args.append('/IMPLIB:' + implib_file)
 | 
			
		||||
 | 
			
		||||
            self.manifest_setup_ldargs(output_filename, build_temp, ld_args)
 | 
			
		||||
 | 
			
		||||
            if extra_preargs:
 | 
			
		||||
                ld_args[:0] = extra_preargs
 | 
			
		||||
            if extra_postargs:
 | 
			
		||||
                ld_args.extend(extra_postargs)
 | 
			
		||||
 | 
			
		||||
            self.mkpath(os.path.dirname(output_filename))
 | 
			
		||||
            try:
 | 
			
		||||
                self.spawn([self.linker] + ld_args)
 | 
			
		||||
            except DistutilsExecError as msg:
 | 
			
		||||
                raise LinkError(msg)
 | 
			
		||||
 | 
			
		||||
            # embed the manifest
 | 
			
		||||
            # XXX - this is somewhat fragile - if mt.exe fails, distutils
 | 
			
		||||
            # will still consider the DLL up-to-date, but it will not have a
 | 
			
		||||
            # manifest.  Maybe we should link to a temp file?  OTOH, that
 | 
			
		||||
            # implies a build environment error that shouldn't go undetected.
 | 
			
		||||
            mfinfo = self.manifest_get_embed_info(target_desc, ld_args)
 | 
			
		||||
            if mfinfo is not None:
 | 
			
		||||
                mffilename, mfid = mfinfo
 | 
			
		||||
                out_arg = '-outputresource:{};{}'.format(output_filename, mfid)
 | 
			
		||||
                try:
 | 
			
		||||
                    self.spawn(['mt.exe', '-nologo', '-manifest', mffilename, out_arg])
 | 
			
		||||
                except DistutilsExecError as msg:
 | 
			
		||||
                    raise LinkError(msg)
 | 
			
		||||
        else:
 | 
			
		||||
            log.debug("skipping %s (up-to-date)", output_filename)
 | 
			
		||||
 | 
			
		||||
    def manifest_setup_ldargs(self, output_filename, build_temp, ld_args):
 | 
			
		||||
        # If we need a manifest at all, an embedded manifest is recommended.
 | 
			
		||||
        # See MSDN article titled
 | 
			
		||||
        # "How to: Embed a Manifest Inside a C/C++ Application"
 | 
			
		||||
        # (currently at http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx)
 | 
			
		||||
        # Ask the linker to generate the manifest in the temp dir, so
 | 
			
		||||
        # we can check it, and possibly embed it, later.
 | 
			
		||||
        temp_manifest = os.path.join(
 | 
			
		||||
            build_temp, os.path.basename(output_filename) + ".manifest"
 | 
			
		||||
        )
 | 
			
		||||
        ld_args.append('/MANIFESTFILE:' + temp_manifest)
 | 
			
		||||
 | 
			
		||||
    def manifest_get_embed_info(self, target_desc, ld_args):
 | 
			
		||||
        # If a manifest should be embedded, return a tuple of
 | 
			
		||||
        # (manifest_filename, resource_id).  Returns None if no manifest
 | 
			
		||||
        # should be embedded.  See http://bugs.python.org/issue7833 for why
 | 
			
		||||
        # we want to avoid any manifest for extension modules if we can)
 | 
			
		||||
        for arg in ld_args:
 | 
			
		||||
            if arg.startswith("/MANIFESTFILE:"):
 | 
			
		||||
                temp_manifest = arg.split(":", 1)[1]
 | 
			
		||||
                break
 | 
			
		||||
        else:
 | 
			
		||||
            # no /MANIFESTFILE so nothing to do.
 | 
			
		||||
            return None
 | 
			
		||||
        if target_desc == CCompiler.EXECUTABLE:
 | 
			
		||||
            # by default, executables always get the manifest with the
 | 
			
		||||
            # CRT referenced.
 | 
			
		||||
            mfid = 1
 | 
			
		||||
        else:
 | 
			
		||||
            # Extension modules try and avoid any manifest if possible.
 | 
			
		||||
            mfid = 2
 | 
			
		||||
            temp_manifest = self._remove_visual_c_ref(temp_manifest)
 | 
			
		||||
        if temp_manifest is None:
 | 
			
		||||
            return None
 | 
			
		||||
        return temp_manifest, mfid
 | 
			
		||||
 | 
			
		||||
    def _remove_visual_c_ref(self, manifest_file):
 | 
			
		||||
        try:
 | 
			
		||||
            # Remove references to the Visual C runtime, so they will
 | 
			
		||||
            # fall through to the Visual C dependency of Python.exe.
 | 
			
		||||
            # This way, when installed for a restricted user (e.g.
 | 
			
		||||
            # runtimes are not in WinSxS folder, but in Python's own
 | 
			
		||||
            # folder), the runtimes do not need to be in every folder
 | 
			
		||||
            # with .pyd's.
 | 
			
		||||
            # Returns either the filename of the modified manifest or
 | 
			
		||||
            # None if no manifest should be embedded.
 | 
			
		||||
            manifest_f = open(manifest_file)
 | 
			
		||||
            try:
 | 
			
		||||
                manifest_buf = manifest_f.read()
 | 
			
		||||
            finally:
 | 
			
		||||
                manifest_f.close()
 | 
			
		||||
            pattern = re.compile(
 | 
			
		||||
                r"""<assemblyIdentity.*?name=("|')Microsoft\."""
 | 
			
		||||
                r"""VC\d{2}\.CRT("|').*?(/>|</assemblyIdentity>)""",
 | 
			
		||||
                re.DOTALL,
 | 
			
		||||
            )
 | 
			
		||||
            manifest_buf = re.sub(pattern, "", manifest_buf)
 | 
			
		||||
            pattern = r"<dependentAssembly>\s*</dependentAssembly>"
 | 
			
		||||
            manifest_buf = re.sub(pattern, "", manifest_buf)
 | 
			
		||||
            # Now see if any other assemblies are referenced - if not, we
 | 
			
		||||
            # don't want a manifest embedded.
 | 
			
		||||
            pattern = re.compile(
 | 
			
		||||
                r"""<assemblyIdentity.*?name=(?:"|')(.+?)(?:"|')"""
 | 
			
		||||
                r""".*?(?:/>|</assemblyIdentity>)""",
 | 
			
		||||
                re.DOTALL,
 | 
			
		||||
            )
 | 
			
		||||
            if re.search(pattern, manifest_buf) is None:
 | 
			
		||||
                return None
 | 
			
		||||
 | 
			
		||||
            manifest_f = open(manifest_file, 'w')
 | 
			
		||||
            try:
 | 
			
		||||
                manifest_f.write(manifest_buf)
 | 
			
		||||
                return manifest_file
 | 
			
		||||
            finally:
 | 
			
		||||
                manifest_f.close()
 | 
			
		||||
        except OSError:
 | 
			
		||||
            pass
 | 
			
		||||
 | 
			
		||||
    # -- Miscellaneous methods -----------------------------------------
 | 
			
		||||
    # These are all used by the 'gen_lib_options() function, in
 | 
			
		||||
    # ccompiler.py.
 | 
			
		||||
 | 
			
		||||
    def library_dir_option(self, dir):
 | 
			
		||||
        return "/LIBPATH:" + dir
 | 
			
		||||
 | 
			
		||||
    def runtime_library_dir_option(self, dir):
 | 
			
		||||
        raise DistutilsPlatformError(
 | 
			
		||||
            "don't know how to set runtime library search path for MSVC++"
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def library_option(self, lib):
 | 
			
		||||
        return self.library_filename(lib)
 | 
			
		||||
 | 
			
		||||
    def find_library_file(self, dirs, lib, debug=0):
 | 
			
		||||
        # Prefer a debugging library if found (and requested), but deal
 | 
			
		||||
        # with it if we don't have one.
 | 
			
		||||
        if debug:
 | 
			
		||||
            try_names = [lib + "_d", lib]
 | 
			
		||||
        else:
 | 
			
		||||
            try_names = [lib]
 | 
			
		||||
        for dir in dirs:
 | 
			
		||||
            for name in try_names:
 | 
			
		||||
                libfile = os.path.join(dir, self.library_filename(name))
 | 
			
		||||
                if os.path.exists(libfile):
 | 
			
		||||
                    return libfile
 | 
			
		||||
        else:
 | 
			
		||||
            # Oops, didn't find it in *any* of 'dirs'
 | 
			
		||||
            return None
 | 
			
		||||
 | 
			
		||||
    # Helper methods for using the MSVC registry settings
 | 
			
		||||
 | 
			
		||||
    def find_exe(self, exe):
 | 
			
		||||
        """Return path to an MSVC executable program.
 | 
			
		||||
 | 
			
		||||
        Tries to find the program in several places: first, one of the
 | 
			
		||||
        MSVC program search paths from the registry; next, the directories
 | 
			
		||||
        in the PATH environment variable.  If any of those work, return an
 | 
			
		||||
        absolute path that is known to exist.  If none of them work, just
 | 
			
		||||
        return the original program name, 'exe'.
 | 
			
		||||
        """
 | 
			
		||||
        for p in self.__paths:
 | 
			
		||||
            fn = os.path.join(os.path.abspath(p), exe)
 | 
			
		||||
            if os.path.isfile(fn):
 | 
			
		||||
                return fn
 | 
			
		||||
 | 
			
		||||
        # didn't find it; try existing path
 | 
			
		||||
        for p in os.environ['Path'].split(';'):
 | 
			
		||||
            fn = os.path.join(os.path.abspath(p), exe)
 | 
			
		||||
            if os.path.isfile(fn):
 | 
			
		||||
                return fn
 | 
			
		||||
 | 
			
		||||
        return exe
 | 
			
		||||
@@ -0,0 +1,695 @@
 | 
			
		||||
"""distutils.msvccompiler
 | 
			
		||||
 | 
			
		||||
Contains MSVCCompiler, an implementation of the abstract CCompiler class
 | 
			
		||||
for the Microsoft Visual Studio.
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
# Written by Perry Stoll
 | 
			
		||||
# hacked by Robin Becker and Thomas Heller to do a better job of
 | 
			
		||||
#   finding DevStudio (through the registry)
 | 
			
		||||
 | 
			
		||||
import sys
 | 
			
		||||
import os
 | 
			
		||||
import warnings
 | 
			
		||||
from .errors import (
 | 
			
		||||
    DistutilsExecError,
 | 
			
		||||
    DistutilsPlatformError,
 | 
			
		||||
    CompileError,
 | 
			
		||||
    LibError,
 | 
			
		||||
    LinkError,
 | 
			
		||||
)
 | 
			
		||||
from .ccompiler import CCompiler, gen_lib_options
 | 
			
		||||
from ._log import log
 | 
			
		||||
 | 
			
		||||
_can_read_reg = False
 | 
			
		||||
try:
 | 
			
		||||
    import winreg
 | 
			
		||||
 | 
			
		||||
    _can_read_reg = True
 | 
			
		||||
    hkey_mod = winreg
 | 
			
		||||
 | 
			
		||||
    RegOpenKeyEx = winreg.OpenKeyEx
 | 
			
		||||
    RegEnumKey = winreg.EnumKey
 | 
			
		||||
    RegEnumValue = winreg.EnumValue
 | 
			
		||||
    RegError = winreg.error
 | 
			
		||||
 | 
			
		||||
except ImportError:
 | 
			
		||||
    try:
 | 
			
		||||
        import win32api
 | 
			
		||||
        import win32con
 | 
			
		||||
 | 
			
		||||
        _can_read_reg = True
 | 
			
		||||
        hkey_mod = win32con
 | 
			
		||||
 | 
			
		||||
        RegOpenKeyEx = win32api.RegOpenKeyEx
 | 
			
		||||
        RegEnumKey = win32api.RegEnumKey
 | 
			
		||||
        RegEnumValue = win32api.RegEnumValue
 | 
			
		||||
        RegError = win32api.error
 | 
			
		||||
    except ImportError:
 | 
			
		||||
        log.info(
 | 
			
		||||
            "Warning: Can't read registry to find the "
 | 
			
		||||
            "necessary compiler setting\n"
 | 
			
		||||
            "Make sure that Python modules winreg, "
 | 
			
		||||
            "win32api or win32con are installed."
 | 
			
		||||
        )
 | 
			
		||||
        pass
 | 
			
		||||
 | 
			
		||||
if _can_read_reg:
 | 
			
		||||
    HKEYS = (
 | 
			
		||||
        hkey_mod.HKEY_USERS,
 | 
			
		||||
        hkey_mod.HKEY_CURRENT_USER,
 | 
			
		||||
        hkey_mod.HKEY_LOCAL_MACHINE,
 | 
			
		||||
        hkey_mod.HKEY_CLASSES_ROOT,
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
warnings.warn(
 | 
			
		||||
    "msvccompiler is deprecated and slated to be removed "
 | 
			
		||||
    "in the future. Please discontinue use or file an issue "
 | 
			
		||||
    "with pypa/distutils describing your use case.",
 | 
			
		||||
    DeprecationWarning,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def read_keys(base, key):
 | 
			
		||||
    """Return list of registry keys."""
 | 
			
		||||
    try:
 | 
			
		||||
        handle = RegOpenKeyEx(base, key)
 | 
			
		||||
    except RegError:
 | 
			
		||||
        return None
 | 
			
		||||
    L = []
 | 
			
		||||
    i = 0
 | 
			
		||||
    while True:
 | 
			
		||||
        try:
 | 
			
		||||
            k = RegEnumKey(handle, i)
 | 
			
		||||
        except RegError:
 | 
			
		||||
            break
 | 
			
		||||
        L.append(k)
 | 
			
		||||
        i += 1
 | 
			
		||||
    return L
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def read_values(base, key):
 | 
			
		||||
    """Return dict of registry keys and values.
 | 
			
		||||
 | 
			
		||||
    All names are converted to lowercase.
 | 
			
		||||
    """
 | 
			
		||||
    try:
 | 
			
		||||
        handle = RegOpenKeyEx(base, key)
 | 
			
		||||
    except RegError:
 | 
			
		||||
        return None
 | 
			
		||||
    d = {}
 | 
			
		||||
    i = 0
 | 
			
		||||
    while True:
 | 
			
		||||
        try:
 | 
			
		||||
            name, value, type = RegEnumValue(handle, i)
 | 
			
		||||
        except RegError:
 | 
			
		||||
            break
 | 
			
		||||
        name = name.lower()
 | 
			
		||||
        d[convert_mbcs(name)] = convert_mbcs(value)
 | 
			
		||||
        i += 1
 | 
			
		||||
    return d
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def convert_mbcs(s):
 | 
			
		||||
    dec = getattr(s, "decode", None)
 | 
			
		||||
    if dec is not None:
 | 
			
		||||
        try:
 | 
			
		||||
            s = dec("mbcs")
 | 
			
		||||
        except UnicodeError:
 | 
			
		||||
            pass
 | 
			
		||||
    return s
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class MacroExpander:
 | 
			
		||||
    def __init__(self, version):
 | 
			
		||||
        self.macros = {}
 | 
			
		||||
        self.load_macros(version)
 | 
			
		||||
 | 
			
		||||
    def set_macro(self, macro, path, key):
 | 
			
		||||
        for base in HKEYS:
 | 
			
		||||
            d = read_values(base, path)
 | 
			
		||||
            if d:
 | 
			
		||||
                self.macros["$(%s)" % macro] = d[key]
 | 
			
		||||
                break
 | 
			
		||||
 | 
			
		||||
    def load_macros(self, version):
 | 
			
		||||
        vsbase = r"Software\Microsoft\VisualStudio\%0.1f" % version
 | 
			
		||||
        self.set_macro("VCInstallDir", vsbase + r"\Setup\VC", "productdir")
 | 
			
		||||
        self.set_macro("VSInstallDir", vsbase + r"\Setup\VS", "productdir")
 | 
			
		||||
        net = r"Software\Microsoft\.NETFramework"
 | 
			
		||||
        self.set_macro("FrameworkDir", net, "installroot")
 | 
			
		||||
        try:
 | 
			
		||||
            if version > 7.0:
 | 
			
		||||
                self.set_macro("FrameworkSDKDir", net, "sdkinstallrootv1.1")
 | 
			
		||||
            else:
 | 
			
		||||
                self.set_macro("FrameworkSDKDir", net, "sdkinstallroot")
 | 
			
		||||
        except KeyError:
 | 
			
		||||
            raise DistutilsPlatformError(
 | 
			
		||||
                """Python was built with Visual Studio 2003;
 | 
			
		||||
extensions must be built with a compiler than can generate compatible binaries.
 | 
			
		||||
Visual Studio 2003 was not found on this system. If you have Cygwin installed,
 | 
			
		||||
you can try compiling with MingW32, by passing "-c mingw32" to setup.py."""
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        p = r"Software\Microsoft\NET Framework Setup\Product"
 | 
			
		||||
        for base in HKEYS:
 | 
			
		||||
            try:
 | 
			
		||||
                h = RegOpenKeyEx(base, p)
 | 
			
		||||
            except RegError:
 | 
			
		||||
                continue
 | 
			
		||||
            key = RegEnumKey(h, 0)
 | 
			
		||||
            d = read_values(base, r"{}\{}".format(p, key))
 | 
			
		||||
            self.macros["$(FrameworkVersion)"] = d["version"]
 | 
			
		||||
 | 
			
		||||
    def sub(self, s):
 | 
			
		||||
        for k, v in self.macros.items():
 | 
			
		||||
            s = s.replace(k, v)
 | 
			
		||||
        return s
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_build_version():
 | 
			
		||||
    """Return the version of MSVC that was used to build Python.
 | 
			
		||||
 | 
			
		||||
    For Python 2.3 and up, the version number is included in
 | 
			
		||||
    sys.version.  For earlier versions, assume the compiler is MSVC 6.
 | 
			
		||||
    """
 | 
			
		||||
    prefix = "MSC v."
 | 
			
		||||
    i = sys.version.find(prefix)
 | 
			
		||||
    if i == -1:
 | 
			
		||||
        return 6
 | 
			
		||||
    i = i + len(prefix)
 | 
			
		||||
    s, rest = sys.version[i:].split(" ", 1)
 | 
			
		||||
    majorVersion = int(s[:-2]) - 6
 | 
			
		||||
    if majorVersion >= 13:
 | 
			
		||||
        # v13 was skipped and should be v14
 | 
			
		||||
        majorVersion += 1
 | 
			
		||||
    minorVersion = int(s[2:3]) / 10.0
 | 
			
		||||
    # I don't think paths are affected by minor version in version 6
 | 
			
		||||
    if majorVersion == 6:
 | 
			
		||||
        minorVersion = 0
 | 
			
		||||
    if majorVersion >= 6:
 | 
			
		||||
        return majorVersion + minorVersion
 | 
			
		||||
    # else we don't know what version of the compiler this is
 | 
			
		||||
    return None
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_build_architecture():
 | 
			
		||||
    """Return the processor architecture.
 | 
			
		||||
 | 
			
		||||
    Possible results are "Intel" or "AMD64".
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    prefix = " bit ("
 | 
			
		||||
    i = sys.version.find(prefix)
 | 
			
		||||
    if i == -1:
 | 
			
		||||
        return "Intel"
 | 
			
		||||
    j = sys.version.find(")", i)
 | 
			
		||||
    return sys.version[i + len(prefix) : j]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def normalize_and_reduce_paths(paths):
 | 
			
		||||
    """Return a list of normalized paths with duplicates removed.
 | 
			
		||||
 | 
			
		||||
    The current order of paths is maintained.
 | 
			
		||||
    """
 | 
			
		||||
    # Paths are normalized so things like:  /a and /a/ aren't both preserved.
 | 
			
		||||
    reduced_paths = []
 | 
			
		||||
    for p in paths:
 | 
			
		||||
        np = os.path.normpath(p)
 | 
			
		||||
        # XXX(nnorwitz): O(n**2), if reduced_paths gets long perhaps use a set.
 | 
			
		||||
        if np not in reduced_paths:
 | 
			
		||||
            reduced_paths.append(np)
 | 
			
		||||
    return reduced_paths
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class MSVCCompiler(CCompiler):
 | 
			
		||||
    """Concrete class that implements an interface to Microsoft Visual C++,
 | 
			
		||||
    as defined by the CCompiler abstract class."""
 | 
			
		||||
 | 
			
		||||
    compiler_type = 'msvc'
 | 
			
		||||
 | 
			
		||||
    # Just set this so CCompiler's constructor doesn't barf.  We currently
 | 
			
		||||
    # don't use the 'set_executables()' bureaucracy provided by CCompiler,
 | 
			
		||||
    # as it really isn't necessary for this sort of single-compiler class.
 | 
			
		||||
    # Would be nice to have a consistent interface with UnixCCompiler,
 | 
			
		||||
    # though, so it's worth thinking about.
 | 
			
		||||
    executables = {}
 | 
			
		||||
 | 
			
		||||
    # Private class data (need to distinguish C from C++ source for compiler)
 | 
			
		||||
    _c_extensions = ['.c']
 | 
			
		||||
    _cpp_extensions = ['.cc', '.cpp', '.cxx']
 | 
			
		||||
    _rc_extensions = ['.rc']
 | 
			
		||||
    _mc_extensions = ['.mc']
 | 
			
		||||
 | 
			
		||||
    # Needed for the filename generation methods provided by the
 | 
			
		||||
    # base class, CCompiler.
 | 
			
		||||
    src_extensions = _c_extensions + _cpp_extensions + _rc_extensions + _mc_extensions
 | 
			
		||||
    res_extension = '.res'
 | 
			
		||||
    obj_extension = '.obj'
 | 
			
		||||
    static_lib_extension = '.lib'
 | 
			
		||||
    shared_lib_extension = '.dll'
 | 
			
		||||
    static_lib_format = shared_lib_format = '%s%s'
 | 
			
		||||
    exe_extension = '.exe'
 | 
			
		||||
 | 
			
		||||
    def __init__(self, verbose=0, dry_run=0, force=0):
 | 
			
		||||
        super().__init__(verbose, dry_run, force)
 | 
			
		||||
        self.__version = get_build_version()
 | 
			
		||||
        self.__arch = get_build_architecture()
 | 
			
		||||
        if self.__arch == "Intel":
 | 
			
		||||
            # x86
 | 
			
		||||
            if self.__version >= 7:
 | 
			
		||||
                self.__root = r"Software\Microsoft\VisualStudio"
 | 
			
		||||
                self.__macros = MacroExpander(self.__version)
 | 
			
		||||
            else:
 | 
			
		||||
                self.__root = r"Software\Microsoft\Devstudio"
 | 
			
		||||
            self.__product = "Visual Studio version %s" % self.__version
 | 
			
		||||
        else:
 | 
			
		||||
            # Win64. Assume this was built with the platform SDK
 | 
			
		||||
            self.__product = "Microsoft SDK compiler %s" % (self.__version + 6)
 | 
			
		||||
 | 
			
		||||
        self.initialized = False
 | 
			
		||||
 | 
			
		||||
    def initialize(self):
 | 
			
		||||
        self.__paths = []
 | 
			
		||||
        if (
 | 
			
		||||
            "DISTUTILS_USE_SDK" in os.environ
 | 
			
		||||
            and "MSSdk" in os.environ
 | 
			
		||||
            and self.find_exe("cl.exe")
 | 
			
		||||
        ):
 | 
			
		||||
            # Assume that the SDK set up everything alright; don't try to be
 | 
			
		||||
            # smarter
 | 
			
		||||
            self.cc = "cl.exe"
 | 
			
		||||
            self.linker = "link.exe"
 | 
			
		||||
            self.lib = "lib.exe"
 | 
			
		||||
            self.rc = "rc.exe"
 | 
			
		||||
            self.mc = "mc.exe"
 | 
			
		||||
        else:
 | 
			
		||||
            self.__paths = self.get_msvc_paths("path")
 | 
			
		||||
 | 
			
		||||
            if len(self.__paths) == 0:
 | 
			
		||||
                raise DistutilsPlatformError(
 | 
			
		||||
                    "Python was built with %s, "
 | 
			
		||||
                    "and extensions need to be built with the same "
 | 
			
		||||
                    "version of the compiler, but it isn't installed." % self.__product
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
            self.cc = self.find_exe("cl.exe")
 | 
			
		||||
            self.linker = self.find_exe("link.exe")
 | 
			
		||||
            self.lib = self.find_exe("lib.exe")
 | 
			
		||||
            self.rc = self.find_exe("rc.exe")  # resource compiler
 | 
			
		||||
            self.mc = self.find_exe("mc.exe")  # message compiler
 | 
			
		||||
            self.set_path_env_var('lib')
 | 
			
		||||
            self.set_path_env_var('include')
 | 
			
		||||
 | 
			
		||||
        # extend the MSVC path with the current path
 | 
			
		||||
        try:
 | 
			
		||||
            for p in os.environ['path'].split(';'):
 | 
			
		||||
                self.__paths.append(p)
 | 
			
		||||
        except KeyError:
 | 
			
		||||
            pass
 | 
			
		||||
        self.__paths = normalize_and_reduce_paths(self.__paths)
 | 
			
		||||
        os.environ['path'] = ";".join(self.__paths)
 | 
			
		||||
 | 
			
		||||
        self.preprocess_options = None
 | 
			
		||||
        if self.__arch == "Intel":
 | 
			
		||||
            self.compile_options = ['/nologo', '/O2', '/MD', '/W3', '/GX', '/DNDEBUG']
 | 
			
		||||
            self.compile_options_debug = [
 | 
			
		||||
                '/nologo',
 | 
			
		||||
                '/Od',
 | 
			
		||||
                '/MDd',
 | 
			
		||||
                '/W3',
 | 
			
		||||
                '/GX',
 | 
			
		||||
                '/Z7',
 | 
			
		||||
                '/D_DEBUG',
 | 
			
		||||
            ]
 | 
			
		||||
        else:
 | 
			
		||||
            # Win64
 | 
			
		||||
            self.compile_options = ['/nologo', '/O2', '/MD', '/W3', '/GS-', '/DNDEBUG']
 | 
			
		||||
            self.compile_options_debug = [
 | 
			
		||||
                '/nologo',
 | 
			
		||||
                '/Od',
 | 
			
		||||
                '/MDd',
 | 
			
		||||
                '/W3',
 | 
			
		||||
                '/GS-',
 | 
			
		||||
                '/Z7',
 | 
			
		||||
                '/D_DEBUG',
 | 
			
		||||
            ]
 | 
			
		||||
 | 
			
		||||
        self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO']
 | 
			
		||||
        if self.__version >= 7:
 | 
			
		||||
            self.ldflags_shared_debug = ['/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG']
 | 
			
		||||
        else:
 | 
			
		||||
            self.ldflags_shared_debug = [
 | 
			
		||||
                '/DLL',
 | 
			
		||||
                '/nologo',
 | 
			
		||||
                '/INCREMENTAL:no',
 | 
			
		||||
                '/pdb:None',
 | 
			
		||||
                '/DEBUG',
 | 
			
		||||
            ]
 | 
			
		||||
        self.ldflags_static = ['/nologo']
 | 
			
		||||
 | 
			
		||||
        self.initialized = True
 | 
			
		||||
 | 
			
		||||
    # -- Worker methods ------------------------------------------------
 | 
			
		||||
 | 
			
		||||
    def object_filenames(self, source_filenames, strip_dir=0, output_dir=''):
 | 
			
		||||
        # Copied from ccompiler.py, extended to return .res as 'object'-file
 | 
			
		||||
        # for .rc input file
 | 
			
		||||
        if output_dir is None:
 | 
			
		||||
            output_dir = ''
 | 
			
		||||
        obj_names = []
 | 
			
		||||
        for src_name in source_filenames:
 | 
			
		||||
            (base, ext) = os.path.splitext(src_name)
 | 
			
		||||
            base = os.path.splitdrive(base)[1]  # Chop off the drive
 | 
			
		||||
            base = base[os.path.isabs(base) :]  # If abs, chop off leading /
 | 
			
		||||
            if ext not in self.src_extensions:
 | 
			
		||||
                # Better to raise an exception instead of silently continuing
 | 
			
		||||
                # and later complain about sources and targets having
 | 
			
		||||
                # different lengths
 | 
			
		||||
                raise CompileError("Don't know how to compile %s" % src_name)
 | 
			
		||||
            if strip_dir:
 | 
			
		||||
                base = os.path.basename(base)
 | 
			
		||||
            if ext in self._rc_extensions:
 | 
			
		||||
                obj_names.append(os.path.join(output_dir, base + self.res_extension))
 | 
			
		||||
            elif ext in self._mc_extensions:
 | 
			
		||||
                obj_names.append(os.path.join(output_dir, base + self.res_extension))
 | 
			
		||||
            else:
 | 
			
		||||
                obj_names.append(os.path.join(output_dir, base + self.obj_extension))
 | 
			
		||||
        return obj_names
 | 
			
		||||
 | 
			
		||||
    def compile(  # noqa: C901
 | 
			
		||||
        self,
 | 
			
		||||
        sources,
 | 
			
		||||
        output_dir=None,
 | 
			
		||||
        macros=None,
 | 
			
		||||
        include_dirs=None,
 | 
			
		||||
        debug=0,
 | 
			
		||||
        extra_preargs=None,
 | 
			
		||||
        extra_postargs=None,
 | 
			
		||||
        depends=None,
 | 
			
		||||
    ):
 | 
			
		||||
 | 
			
		||||
        if not self.initialized:
 | 
			
		||||
            self.initialize()
 | 
			
		||||
        compile_info = self._setup_compile(
 | 
			
		||||
            output_dir, macros, include_dirs, sources, depends, extra_postargs
 | 
			
		||||
        )
 | 
			
		||||
        macros, objects, extra_postargs, pp_opts, build = compile_info
 | 
			
		||||
 | 
			
		||||
        compile_opts = extra_preargs or []
 | 
			
		||||
        compile_opts.append('/c')
 | 
			
		||||
        if debug:
 | 
			
		||||
            compile_opts.extend(self.compile_options_debug)
 | 
			
		||||
        else:
 | 
			
		||||
            compile_opts.extend(self.compile_options)
 | 
			
		||||
 | 
			
		||||
        for obj in objects:
 | 
			
		||||
            try:
 | 
			
		||||
                src, ext = build[obj]
 | 
			
		||||
            except KeyError:
 | 
			
		||||
                continue
 | 
			
		||||
            if debug:
 | 
			
		||||
                # pass the full pathname to MSVC in debug mode,
 | 
			
		||||
                # this allows the debugger to find the source file
 | 
			
		||||
                # without asking the user to browse for it
 | 
			
		||||
                src = os.path.abspath(src)
 | 
			
		||||
 | 
			
		||||
            if ext in self._c_extensions:
 | 
			
		||||
                input_opt = "/Tc" + src
 | 
			
		||||
            elif ext in self._cpp_extensions:
 | 
			
		||||
                input_opt = "/Tp" + src
 | 
			
		||||
            elif ext in self._rc_extensions:
 | 
			
		||||
                # compile .RC to .RES file
 | 
			
		||||
                input_opt = src
 | 
			
		||||
                output_opt = "/fo" + obj
 | 
			
		||||
                try:
 | 
			
		||||
                    self.spawn([self.rc] + pp_opts + [output_opt] + [input_opt])
 | 
			
		||||
                except DistutilsExecError as msg:
 | 
			
		||||
                    raise CompileError(msg)
 | 
			
		||||
                continue
 | 
			
		||||
            elif ext in self._mc_extensions:
 | 
			
		||||
                # Compile .MC to .RC file to .RES file.
 | 
			
		||||
                #   * '-h dir' specifies the directory for the
 | 
			
		||||
                #     generated include file
 | 
			
		||||
                #   * '-r dir' specifies the target directory of the
 | 
			
		||||
                #     generated RC file and the binary message resource
 | 
			
		||||
                #     it includes
 | 
			
		||||
                #
 | 
			
		||||
                # For now (since there are no options to change this),
 | 
			
		||||
                # we use the source-directory for the include file and
 | 
			
		||||
                # the build directory for the RC file and message
 | 
			
		||||
                # resources. This works at least for win32all.
 | 
			
		||||
                h_dir = os.path.dirname(src)
 | 
			
		||||
                rc_dir = os.path.dirname(obj)
 | 
			
		||||
                try:
 | 
			
		||||
                    # first compile .MC to .RC and .H file
 | 
			
		||||
                    self.spawn([self.mc] + ['-h', h_dir, '-r', rc_dir] + [src])
 | 
			
		||||
                    base, _ = os.path.splitext(os.path.basename(src))
 | 
			
		||||
                    rc_file = os.path.join(rc_dir, base + '.rc')
 | 
			
		||||
                    # then compile .RC to .RES file
 | 
			
		||||
                    self.spawn([self.rc] + ["/fo" + obj] + [rc_file])
 | 
			
		||||
 | 
			
		||||
                except DistutilsExecError as msg:
 | 
			
		||||
                    raise CompileError(msg)
 | 
			
		||||
                continue
 | 
			
		||||
            else:
 | 
			
		||||
                # how to handle this file?
 | 
			
		||||
                raise CompileError(
 | 
			
		||||
                    "Don't know how to compile {} to {}".format(src, obj)
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
            output_opt = "/Fo" + obj
 | 
			
		||||
            try:
 | 
			
		||||
                self.spawn(
 | 
			
		||||
                    [self.cc]
 | 
			
		||||
                    + compile_opts
 | 
			
		||||
                    + pp_opts
 | 
			
		||||
                    + [input_opt, output_opt]
 | 
			
		||||
                    + extra_postargs
 | 
			
		||||
                )
 | 
			
		||||
            except DistutilsExecError as msg:
 | 
			
		||||
                raise CompileError(msg)
 | 
			
		||||
 | 
			
		||||
        return objects
 | 
			
		||||
 | 
			
		||||
    def create_static_lib(
 | 
			
		||||
        self, objects, output_libname, output_dir=None, debug=0, target_lang=None
 | 
			
		||||
    ):
 | 
			
		||||
 | 
			
		||||
        if not self.initialized:
 | 
			
		||||
            self.initialize()
 | 
			
		||||
        (objects, output_dir) = self._fix_object_args(objects, output_dir)
 | 
			
		||||
        output_filename = self.library_filename(output_libname, output_dir=output_dir)
 | 
			
		||||
 | 
			
		||||
        if self._need_link(objects, output_filename):
 | 
			
		||||
            lib_args = objects + ['/OUT:' + output_filename]
 | 
			
		||||
            if debug:
 | 
			
		||||
                pass  # XXX what goes here?
 | 
			
		||||
            try:
 | 
			
		||||
                self.spawn([self.lib] + lib_args)
 | 
			
		||||
            except DistutilsExecError as msg:
 | 
			
		||||
                raise LibError(msg)
 | 
			
		||||
        else:
 | 
			
		||||
            log.debug("skipping %s (up-to-date)", output_filename)
 | 
			
		||||
 | 
			
		||||
    def link(  # noqa: C901
 | 
			
		||||
        self,
 | 
			
		||||
        target_desc,
 | 
			
		||||
        objects,
 | 
			
		||||
        output_filename,
 | 
			
		||||
        output_dir=None,
 | 
			
		||||
        libraries=None,
 | 
			
		||||
        library_dirs=None,
 | 
			
		||||
        runtime_library_dirs=None,
 | 
			
		||||
        export_symbols=None,
 | 
			
		||||
        debug=0,
 | 
			
		||||
        extra_preargs=None,
 | 
			
		||||
        extra_postargs=None,
 | 
			
		||||
        build_temp=None,
 | 
			
		||||
        target_lang=None,
 | 
			
		||||
    ):
 | 
			
		||||
 | 
			
		||||
        if not self.initialized:
 | 
			
		||||
            self.initialize()
 | 
			
		||||
        (objects, output_dir) = self._fix_object_args(objects, output_dir)
 | 
			
		||||
        fixed_args = self._fix_lib_args(libraries, library_dirs, runtime_library_dirs)
 | 
			
		||||
        (libraries, library_dirs, runtime_library_dirs) = fixed_args
 | 
			
		||||
 | 
			
		||||
        if runtime_library_dirs:
 | 
			
		||||
            self.warn(
 | 
			
		||||
                "I don't know what to do with 'runtime_library_dirs': "
 | 
			
		||||
                + str(runtime_library_dirs)
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs, libraries)
 | 
			
		||||
        if output_dir is not None:
 | 
			
		||||
            output_filename = os.path.join(output_dir, output_filename)
 | 
			
		||||
 | 
			
		||||
        if self._need_link(objects, output_filename):
 | 
			
		||||
            if target_desc == CCompiler.EXECUTABLE:
 | 
			
		||||
                if debug:
 | 
			
		||||
                    ldflags = self.ldflags_shared_debug[1:]
 | 
			
		||||
                else:
 | 
			
		||||
                    ldflags = self.ldflags_shared[1:]
 | 
			
		||||
            else:
 | 
			
		||||
                if debug:
 | 
			
		||||
                    ldflags = self.ldflags_shared_debug
 | 
			
		||||
                else:
 | 
			
		||||
                    ldflags = self.ldflags_shared
 | 
			
		||||
 | 
			
		||||
            export_opts = []
 | 
			
		||||
            for sym in export_symbols or []:
 | 
			
		||||
                export_opts.append("/EXPORT:" + sym)
 | 
			
		||||
 | 
			
		||||
            ld_args = (
 | 
			
		||||
                ldflags + lib_opts + export_opts + objects + ['/OUT:' + output_filename]
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
            # The MSVC linker generates .lib and .exp files, which cannot be
 | 
			
		||||
            # suppressed by any linker switches. The .lib files may even be
 | 
			
		||||
            # needed! Make sure they are generated in the temporary build
 | 
			
		||||
            # directory. Since they have different names for debug and release
 | 
			
		||||
            # builds, they can go into the same directory.
 | 
			
		||||
            if export_symbols is not None:
 | 
			
		||||
                (dll_name, dll_ext) = os.path.splitext(
 | 
			
		||||
                    os.path.basename(output_filename)
 | 
			
		||||
                )
 | 
			
		||||
                implib_file = os.path.join(
 | 
			
		||||
                    os.path.dirname(objects[0]), self.library_filename(dll_name)
 | 
			
		||||
                )
 | 
			
		||||
                ld_args.append('/IMPLIB:' + implib_file)
 | 
			
		||||
 | 
			
		||||
            if extra_preargs:
 | 
			
		||||
                ld_args[:0] = extra_preargs
 | 
			
		||||
            if extra_postargs:
 | 
			
		||||
                ld_args.extend(extra_postargs)
 | 
			
		||||
 | 
			
		||||
            self.mkpath(os.path.dirname(output_filename))
 | 
			
		||||
            try:
 | 
			
		||||
                self.spawn([self.linker] + ld_args)
 | 
			
		||||
            except DistutilsExecError as msg:
 | 
			
		||||
                raise LinkError(msg)
 | 
			
		||||
 | 
			
		||||
        else:
 | 
			
		||||
            log.debug("skipping %s (up-to-date)", output_filename)
 | 
			
		||||
 | 
			
		||||
    # -- Miscellaneous methods -----------------------------------------
 | 
			
		||||
    # These are all used by the 'gen_lib_options() function, in
 | 
			
		||||
    # ccompiler.py.
 | 
			
		||||
 | 
			
		||||
    def library_dir_option(self, dir):
 | 
			
		||||
        return "/LIBPATH:" + dir
 | 
			
		||||
 | 
			
		||||
    def runtime_library_dir_option(self, dir):
 | 
			
		||||
        raise DistutilsPlatformError(
 | 
			
		||||
            "don't know how to set runtime library search path for MSVC++"
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def library_option(self, lib):
 | 
			
		||||
        return self.library_filename(lib)
 | 
			
		||||
 | 
			
		||||
    def find_library_file(self, dirs, lib, debug=0):
 | 
			
		||||
        # Prefer a debugging library if found (and requested), but deal
 | 
			
		||||
        # with it if we don't have one.
 | 
			
		||||
        if debug:
 | 
			
		||||
            try_names = [lib + "_d", lib]
 | 
			
		||||
        else:
 | 
			
		||||
            try_names = [lib]
 | 
			
		||||
        for dir in dirs:
 | 
			
		||||
            for name in try_names:
 | 
			
		||||
                libfile = os.path.join(dir, self.library_filename(name))
 | 
			
		||||
                if os.path.exists(libfile):
 | 
			
		||||
                    return libfile
 | 
			
		||||
        else:
 | 
			
		||||
            # Oops, didn't find it in *any* of 'dirs'
 | 
			
		||||
            return None
 | 
			
		||||
 | 
			
		||||
    # Helper methods for using the MSVC registry settings
 | 
			
		||||
 | 
			
		||||
    def find_exe(self, exe):
 | 
			
		||||
        """Return path to an MSVC executable program.
 | 
			
		||||
 | 
			
		||||
        Tries to find the program in several places: first, one of the
 | 
			
		||||
        MSVC program search paths from the registry; next, the directories
 | 
			
		||||
        in the PATH environment variable.  If any of those work, return an
 | 
			
		||||
        absolute path that is known to exist.  If none of them work, just
 | 
			
		||||
        return the original program name, 'exe'.
 | 
			
		||||
        """
 | 
			
		||||
        for p in self.__paths:
 | 
			
		||||
            fn = os.path.join(os.path.abspath(p), exe)
 | 
			
		||||
            if os.path.isfile(fn):
 | 
			
		||||
                return fn
 | 
			
		||||
 | 
			
		||||
        # didn't find it; try existing path
 | 
			
		||||
        for p in os.environ['Path'].split(';'):
 | 
			
		||||
            fn = os.path.join(os.path.abspath(p), exe)
 | 
			
		||||
            if os.path.isfile(fn):
 | 
			
		||||
                return fn
 | 
			
		||||
 | 
			
		||||
        return exe
 | 
			
		||||
 | 
			
		||||
    def get_msvc_paths(self, path, platform='x86'):
 | 
			
		||||
        """Get a list of devstudio directories (include, lib or path).
 | 
			
		||||
 | 
			
		||||
        Return a list of strings.  The list will be empty if unable to
 | 
			
		||||
        access the registry or appropriate registry keys not found.
 | 
			
		||||
        """
 | 
			
		||||
        if not _can_read_reg:
 | 
			
		||||
            return []
 | 
			
		||||
 | 
			
		||||
        path = path + " dirs"
 | 
			
		||||
        if self.__version >= 7:
 | 
			
		||||
            key = r"{}\{:0.1f}\VC\VC_OBJECTS_PLATFORM_INFO\Win32\Directories".format(
 | 
			
		||||
                self.__root,
 | 
			
		||||
                self.__version,
 | 
			
		||||
            )
 | 
			
		||||
        else:
 | 
			
		||||
            key = (
 | 
			
		||||
                r"%s\6.0\Build System\Components\Platforms"
 | 
			
		||||
                r"\Win32 (%s)\Directories" % (self.__root, platform)
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        for base in HKEYS:
 | 
			
		||||
            d = read_values(base, key)
 | 
			
		||||
            if d:
 | 
			
		||||
                if self.__version >= 7:
 | 
			
		||||
                    return self.__macros.sub(d[path]).split(";")
 | 
			
		||||
                else:
 | 
			
		||||
                    return d[path].split(";")
 | 
			
		||||
        # MSVC 6 seems to create the registry entries we need only when
 | 
			
		||||
        # the GUI is run.
 | 
			
		||||
        if self.__version == 6:
 | 
			
		||||
            for base in HKEYS:
 | 
			
		||||
                if read_values(base, r"%s\6.0" % self.__root) is not None:
 | 
			
		||||
                    self.warn(
 | 
			
		||||
                        "It seems you have Visual Studio 6 installed, "
 | 
			
		||||
                        "but the expected registry settings are not present.\n"
 | 
			
		||||
                        "You must at least run the Visual Studio GUI once "
 | 
			
		||||
                        "so that these entries are created."
 | 
			
		||||
                    )
 | 
			
		||||
                    break
 | 
			
		||||
        return []
 | 
			
		||||
 | 
			
		||||
    def set_path_env_var(self, name):
 | 
			
		||||
        """Set environment variable 'name' to an MSVC path type value.
 | 
			
		||||
 | 
			
		||||
        This is equivalent to a SET command prior to execution of spawned
 | 
			
		||||
        commands.
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
        if name == "lib":
 | 
			
		||||
            p = self.get_msvc_paths("library")
 | 
			
		||||
        else:
 | 
			
		||||
            p = self.get_msvc_paths(name)
 | 
			
		||||
        if p:
 | 
			
		||||
            os.environ[name] = ';'.join(p)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
if get_build_version() >= 8.0:
 | 
			
		||||
    log.debug("Importing new compiler from distutils.msvc9compiler")
 | 
			
		||||
    OldMSVCCompiler = MSVCCompiler
 | 
			
		||||
    from distutils.msvc9compiler import MSVCCompiler
 | 
			
		||||
 | 
			
		||||
    # get_build_architecture not really relevant now we support cross-compile
 | 
			
		||||
    from distutils.msvc9compiler import MacroExpander  # noqa: F811
 | 
			
		||||
@@ -0,0 +1,8 @@
 | 
			
		||||
def aix_platform(osname, version, release):
 | 
			
		||||
    try:
 | 
			
		||||
        import _aix_support
 | 
			
		||||
 | 
			
		||||
        return _aix_support.aix_platform()
 | 
			
		||||
    except ImportError:
 | 
			
		||||
        pass
 | 
			
		||||
    return "{}-{}.{}".format(osname, version, release)
 | 
			
		||||
@@ -0,0 +1,22 @@
 | 
			
		||||
import sys
 | 
			
		||||
import platform
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def add_ext_suffix_39(vars):
 | 
			
		||||
    """
 | 
			
		||||
    Ensure vars contains 'EXT_SUFFIX'. pypa/distutils#130
 | 
			
		||||
    """
 | 
			
		||||
    import _imp
 | 
			
		||||
 | 
			
		||||
    ext_suffix = _imp.extension_suffixes()[0]
 | 
			
		||||
    vars.update(
 | 
			
		||||
        EXT_SUFFIX=ext_suffix,
 | 
			
		||||
        # sysconfig sets SO to match EXT_SUFFIX, so maintain
 | 
			
		||||
        # that expectation.
 | 
			
		||||
        # https://github.com/python/cpython/blob/785cc6770588de087d09e89a69110af2542be208/Lib/sysconfig.py#L671-L673
 | 
			
		||||
        SO=ext_suffix,
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
needs_ext_suffix = sys.version_info < (3, 10) and platform.system() == 'Windows'
 | 
			
		||||
add_ext_suffix = add_ext_suffix_39 if needs_ext_suffix else lambda vars: None
 | 
			
		||||
@@ -0,0 +1,109 @@
 | 
			
		||||
"""distutils.spawn
 | 
			
		||||
 | 
			
		||||
Provides the 'spawn()' function, a front-end to various platform-
 | 
			
		||||
specific functions for launching another program in a sub-process.
 | 
			
		||||
Also provides the 'find_executable()' to search the path for a given
 | 
			
		||||
executable name.
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
import sys
 | 
			
		||||
import os
 | 
			
		||||
import subprocess
 | 
			
		||||
 | 
			
		||||
from .errors import DistutilsExecError
 | 
			
		||||
from .debug import DEBUG
 | 
			
		||||
from ._log import log
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def spawn(cmd, search_path=1, verbose=0, dry_run=0, env=None):  # noqa: C901
 | 
			
		||||
    """Run another program, specified as a command list 'cmd', in a new process.
 | 
			
		||||
 | 
			
		||||
    'cmd' is just the argument list for the new process, ie.
 | 
			
		||||
    cmd[0] is the program to run and cmd[1:] are the rest of its arguments.
 | 
			
		||||
    There is no way to run a program with a name different from that of its
 | 
			
		||||
    executable.
 | 
			
		||||
 | 
			
		||||
    If 'search_path' is true (the default), the system's executable
 | 
			
		||||
    search path will be used to find the program; otherwise, cmd[0]
 | 
			
		||||
    must be the exact path to the executable.  If 'dry_run' is true,
 | 
			
		||||
    the command will not actually be run.
 | 
			
		||||
 | 
			
		||||
    Raise DistutilsExecError if running the program fails in any way; just
 | 
			
		||||
    return on success.
 | 
			
		||||
    """
 | 
			
		||||
    # cmd is documented as a list, but just in case some code passes a tuple
 | 
			
		||||
    # in, protect our %-formatting code against horrible death
 | 
			
		||||
    cmd = list(cmd)
 | 
			
		||||
 | 
			
		||||
    log.info(subprocess.list2cmdline(cmd))
 | 
			
		||||
    if dry_run:
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    if search_path:
 | 
			
		||||
        executable = find_executable(cmd[0])
 | 
			
		||||
        if executable is not None:
 | 
			
		||||
            cmd[0] = executable
 | 
			
		||||
 | 
			
		||||
    env = env if env is not None else dict(os.environ)
 | 
			
		||||
 | 
			
		||||
    if sys.platform == 'darwin':
 | 
			
		||||
        from distutils.util import MACOSX_VERSION_VAR, get_macosx_target_ver
 | 
			
		||||
 | 
			
		||||
        macosx_target_ver = get_macosx_target_ver()
 | 
			
		||||
        if macosx_target_ver:
 | 
			
		||||
            env[MACOSX_VERSION_VAR] = macosx_target_ver
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        proc = subprocess.Popen(cmd, env=env)
 | 
			
		||||
        proc.wait()
 | 
			
		||||
        exitcode = proc.returncode
 | 
			
		||||
    except OSError as exc:
 | 
			
		||||
        if not DEBUG:
 | 
			
		||||
            cmd = cmd[0]
 | 
			
		||||
        raise DistutilsExecError(
 | 
			
		||||
            "command {!r} failed: {}".format(cmd, exc.args[-1])
 | 
			
		||||
        ) from exc
 | 
			
		||||
 | 
			
		||||
    if exitcode:
 | 
			
		||||
        if not DEBUG:
 | 
			
		||||
            cmd = cmd[0]
 | 
			
		||||
        raise DistutilsExecError(
 | 
			
		||||
            "command {!r} failed with exit code {}".format(cmd, exitcode)
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def find_executable(executable, path=None):
 | 
			
		||||
    """Tries to find 'executable' in the directories listed in 'path'.
 | 
			
		||||
 | 
			
		||||
    A string listing directories separated by 'os.pathsep'; defaults to
 | 
			
		||||
    os.environ['PATH'].  Returns the complete filename or None if not found.
 | 
			
		||||
    """
 | 
			
		||||
    _, ext = os.path.splitext(executable)
 | 
			
		||||
    if (sys.platform == 'win32') and (ext != '.exe'):
 | 
			
		||||
        executable = executable + '.exe'
 | 
			
		||||
 | 
			
		||||
    if os.path.isfile(executable):
 | 
			
		||||
        return executable
 | 
			
		||||
 | 
			
		||||
    if path is None:
 | 
			
		||||
        path = os.environ.get('PATH', None)
 | 
			
		||||
        if path is None:
 | 
			
		||||
            try:
 | 
			
		||||
                path = os.confstr("CS_PATH")
 | 
			
		||||
            except (AttributeError, ValueError):
 | 
			
		||||
                # os.confstr() or CS_PATH is not available
 | 
			
		||||
                path = os.defpath
 | 
			
		||||
        # bpo-35755: Don't use os.defpath if the PATH environment variable is
 | 
			
		||||
        # set to an empty string
 | 
			
		||||
 | 
			
		||||
    # PATH='' doesn't match, whereas PATH=':' looks in the current directory
 | 
			
		||||
    if not path:
 | 
			
		||||
        return None
 | 
			
		||||
 | 
			
		||||
    paths = path.split(os.pathsep)
 | 
			
		||||
    for p in paths:
 | 
			
		||||
        f = os.path.join(p, executable)
 | 
			
		||||
        if os.path.isfile(f):
 | 
			
		||||
            # the file exists, we have a shot at spawn working
 | 
			
		||||
            return f
 | 
			
		||||
    return None
 | 
			
		||||
@@ -0,0 +1,552 @@
 | 
			
		||||
"""Provide access to Python's configuration information.  The specific
 | 
			
		||||
configuration variables available depend heavily on the platform and
 | 
			
		||||
configuration.  The values may be retrieved using
 | 
			
		||||
get_config_var(name), and the list of variables is available via
 | 
			
		||||
get_config_vars().keys().  Additional convenience functions are also
 | 
			
		||||
available.
 | 
			
		||||
 | 
			
		||||
Written by:   Fred L. Drake, Jr.
 | 
			
		||||
Email:        <fdrake@acm.org>
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
import os
 | 
			
		||||
import re
 | 
			
		||||
import sys
 | 
			
		||||
import sysconfig
 | 
			
		||||
import pathlib
 | 
			
		||||
 | 
			
		||||
from .errors import DistutilsPlatformError
 | 
			
		||||
from . import py39compat
 | 
			
		||||
from ._functools import pass_none
 | 
			
		||||
 | 
			
		||||
IS_PYPY = '__pypy__' in sys.builtin_module_names
 | 
			
		||||
 | 
			
		||||
# These are needed in a couple of spots, so just compute them once.
 | 
			
		||||
PREFIX = os.path.normpath(sys.prefix)
 | 
			
		||||
EXEC_PREFIX = os.path.normpath(sys.exec_prefix)
 | 
			
		||||
BASE_PREFIX = os.path.normpath(sys.base_prefix)
 | 
			
		||||
BASE_EXEC_PREFIX = os.path.normpath(sys.base_exec_prefix)
 | 
			
		||||
 | 
			
		||||
# Path to the base directory of the project. On Windows the binary may
 | 
			
		||||
# live in project/PCbuild/win32 or project/PCbuild/amd64.
 | 
			
		||||
# set for cross builds
 | 
			
		||||
if "_PYTHON_PROJECT_BASE" in os.environ:
 | 
			
		||||
    project_base = os.path.abspath(os.environ["_PYTHON_PROJECT_BASE"])
 | 
			
		||||
else:
 | 
			
		||||
    if sys.executable:
 | 
			
		||||
        project_base = os.path.dirname(os.path.abspath(sys.executable))
 | 
			
		||||
    else:
 | 
			
		||||
        # sys.executable can be empty if argv[0] has been changed and Python is
 | 
			
		||||
        # unable to retrieve the real program name
 | 
			
		||||
        project_base = os.getcwd()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _is_python_source_dir(d):
 | 
			
		||||
    """
 | 
			
		||||
    Return True if the target directory appears to point to an
 | 
			
		||||
    un-installed Python.
 | 
			
		||||
    """
 | 
			
		||||
    modules = pathlib.Path(d).joinpath('Modules')
 | 
			
		||||
    return any(modules.joinpath(fn).is_file() for fn in ('Setup', 'Setup.local'))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
_sys_home = getattr(sys, '_home', None)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _is_parent(dir_a, dir_b):
 | 
			
		||||
    """
 | 
			
		||||
    Return True if a is a parent of b.
 | 
			
		||||
    """
 | 
			
		||||
    return os.path.normcase(dir_a).startswith(os.path.normcase(dir_b))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
if os.name == 'nt':
 | 
			
		||||
 | 
			
		||||
    @pass_none
 | 
			
		||||
    def _fix_pcbuild(d):
 | 
			
		||||
        # In a venv, sys._home will be inside BASE_PREFIX rather than PREFIX.
 | 
			
		||||
        prefixes = PREFIX, BASE_PREFIX
 | 
			
		||||
        matched = (
 | 
			
		||||
            prefix
 | 
			
		||||
            for prefix in prefixes
 | 
			
		||||
            if _is_parent(d, os.path.join(prefix, "PCbuild"))
 | 
			
		||||
        )
 | 
			
		||||
        return next(matched, d)
 | 
			
		||||
 | 
			
		||||
    project_base = _fix_pcbuild(project_base)
 | 
			
		||||
    _sys_home = _fix_pcbuild(_sys_home)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _python_build():
 | 
			
		||||
    if _sys_home:
 | 
			
		||||
        return _is_python_source_dir(_sys_home)
 | 
			
		||||
    return _is_python_source_dir(project_base)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
python_build = _python_build()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Calculate the build qualifier flags if they are defined.  Adding the flags
 | 
			
		||||
# to the include and lib directories only makes sense for an installation, not
 | 
			
		||||
# an in-source build.
 | 
			
		||||
build_flags = ''
 | 
			
		||||
try:
 | 
			
		||||
    if not python_build:
 | 
			
		||||
        build_flags = sys.abiflags
 | 
			
		||||
except AttributeError:
 | 
			
		||||
    # It's not a configure-based build, so the sys module doesn't have
 | 
			
		||||
    # this attribute, which is fine.
 | 
			
		||||
    pass
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_python_version():
 | 
			
		||||
    """Return a string containing the major and minor Python version,
 | 
			
		||||
    leaving off the patchlevel.  Sample return values could be '1.5'
 | 
			
		||||
    or '2.2'.
 | 
			
		||||
    """
 | 
			
		||||
    return '%d.%d' % sys.version_info[:2]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_python_inc(plat_specific=0, prefix=None):
 | 
			
		||||
    """Return the directory containing installed Python header files.
 | 
			
		||||
 | 
			
		||||
    If 'plat_specific' is false (the default), this is the path to the
 | 
			
		||||
    non-platform-specific header files, i.e. Python.h and so on;
 | 
			
		||||
    otherwise, this is the path to platform-specific header files
 | 
			
		||||
    (namely pyconfig.h).
 | 
			
		||||
 | 
			
		||||
    If 'prefix' is supplied, use it instead of sys.base_prefix or
 | 
			
		||||
    sys.base_exec_prefix -- i.e., ignore 'plat_specific'.
 | 
			
		||||
    """
 | 
			
		||||
    default_prefix = BASE_EXEC_PREFIX if plat_specific else BASE_PREFIX
 | 
			
		||||
    resolved_prefix = prefix if prefix is not None else default_prefix
 | 
			
		||||
    try:
 | 
			
		||||
        getter = globals()[f'_get_python_inc_{os.name}']
 | 
			
		||||
    except KeyError:
 | 
			
		||||
        raise DistutilsPlatformError(
 | 
			
		||||
            "I don't know where Python installs its C header files "
 | 
			
		||||
            "on platform '%s'" % os.name
 | 
			
		||||
        )
 | 
			
		||||
    return getter(resolved_prefix, prefix, plat_specific)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _get_python_inc_posix(prefix, spec_prefix, plat_specific):
 | 
			
		||||
    if IS_PYPY and sys.version_info < (3, 8):
 | 
			
		||||
        return os.path.join(prefix, 'include')
 | 
			
		||||
    return (
 | 
			
		||||
        _get_python_inc_posix_python(plat_specific)
 | 
			
		||||
        or _get_python_inc_from_config(plat_specific, spec_prefix)
 | 
			
		||||
        or _get_python_inc_posix_prefix(prefix)
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _get_python_inc_posix_python(plat_specific):
 | 
			
		||||
    """
 | 
			
		||||
    Assume the executable is in the build directory. The
 | 
			
		||||
    pyconfig.h file should be in the same directory. Since
 | 
			
		||||
    the build directory may not be the source directory,
 | 
			
		||||
    use "srcdir" from the makefile to find the "Include"
 | 
			
		||||
    directory.
 | 
			
		||||
    """
 | 
			
		||||
    if not python_build:
 | 
			
		||||
        return
 | 
			
		||||
    if plat_specific:
 | 
			
		||||
        return _sys_home or project_base
 | 
			
		||||
    incdir = os.path.join(get_config_var('srcdir'), 'Include')
 | 
			
		||||
    return os.path.normpath(incdir)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _get_python_inc_from_config(plat_specific, spec_prefix):
 | 
			
		||||
    """
 | 
			
		||||
    If no prefix was explicitly specified, provide the include
 | 
			
		||||
    directory from the config vars. Useful when
 | 
			
		||||
    cross-compiling, since the config vars may come from
 | 
			
		||||
    the host
 | 
			
		||||
    platform Python installation, while the current Python
 | 
			
		||||
    executable is from the build platform installation.
 | 
			
		||||
 | 
			
		||||
    >>> monkeypatch = getfixture('monkeypatch')
 | 
			
		||||
    >>> gpifc = _get_python_inc_from_config
 | 
			
		||||
    >>> monkeypatch.setitem(gpifc.__globals__, 'get_config_var', str.lower)
 | 
			
		||||
    >>> gpifc(False, '/usr/bin/')
 | 
			
		||||
    >>> gpifc(False, '')
 | 
			
		||||
    >>> gpifc(False, None)
 | 
			
		||||
    'includepy'
 | 
			
		||||
    >>> gpifc(True, None)
 | 
			
		||||
    'confincludepy'
 | 
			
		||||
    """
 | 
			
		||||
    if spec_prefix is None:
 | 
			
		||||
        return get_config_var('CONF' * plat_specific + 'INCLUDEPY')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _get_python_inc_posix_prefix(prefix):
 | 
			
		||||
    implementation = 'pypy' if IS_PYPY else 'python'
 | 
			
		||||
    python_dir = implementation + get_python_version() + build_flags
 | 
			
		||||
    return os.path.join(prefix, "include", python_dir)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _get_python_inc_nt(prefix, spec_prefix, plat_specific):
 | 
			
		||||
    if python_build:
 | 
			
		||||
        # Include both the include and PC dir to ensure we can find
 | 
			
		||||
        # pyconfig.h
 | 
			
		||||
        return (
 | 
			
		||||
            os.path.join(prefix, "include")
 | 
			
		||||
            + os.path.pathsep
 | 
			
		||||
            + os.path.join(prefix, "PC")
 | 
			
		||||
        )
 | 
			
		||||
    return os.path.join(prefix, "include")
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# allow this behavior to be monkey-patched. Ref pypa/distutils#2.
 | 
			
		||||
def _posix_lib(standard_lib, libpython, early_prefix, prefix):
 | 
			
		||||
    if standard_lib:
 | 
			
		||||
        return libpython
 | 
			
		||||
    else:
 | 
			
		||||
        return os.path.join(libpython, "site-packages")
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
 | 
			
		||||
    """Return the directory containing the Python library (standard or
 | 
			
		||||
    site additions).
 | 
			
		||||
 | 
			
		||||
    If 'plat_specific' is true, return the directory containing
 | 
			
		||||
    platform-specific modules, i.e. any module from a non-pure-Python
 | 
			
		||||
    module distribution; otherwise, return the platform-shared library
 | 
			
		||||
    directory.  If 'standard_lib' is true, return the directory
 | 
			
		||||
    containing standard Python library modules; otherwise, return the
 | 
			
		||||
    directory for site-specific modules.
 | 
			
		||||
 | 
			
		||||
    If 'prefix' is supplied, use it instead of sys.base_prefix or
 | 
			
		||||
    sys.base_exec_prefix -- i.e., ignore 'plat_specific'.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    if IS_PYPY and sys.version_info < (3, 8):
 | 
			
		||||
        # PyPy-specific schema
 | 
			
		||||
        if prefix is None:
 | 
			
		||||
            prefix = PREFIX
 | 
			
		||||
        if standard_lib:
 | 
			
		||||
            return os.path.join(prefix, "lib-python", sys.version[0])
 | 
			
		||||
        return os.path.join(prefix, 'site-packages')
 | 
			
		||||
 | 
			
		||||
    early_prefix = prefix
 | 
			
		||||
 | 
			
		||||
    if prefix is None:
 | 
			
		||||
        if standard_lib:
 | 
			
		||||
            prefix = plat_specific and BASE_EXEC_PREFIX or BASE_PREFIX
 | 
			
		||||
        else:
 | 
			
		||||
            prefix = plat_specific and EXEC_PREFIX or PREFIX
 | 
			
		||||
 | 
			
		||||
    if os.name == "posix":
 | 
			
		||||
        if plat_specific or standard_lib:
 | 
			
		||||
            # Platform-specific modules (any module from a non-pure-Python
 | 
			
		||||
            # module distribution) or standard Python library modules.
 | 
			
		||||
            libdir = getattr(sys, "platlibdir", "lib")
 | 
			
		||||
        else:
 | 
			
		||||
            # Pure Python
 | 
			
		||||
            libdir = "lib"
 | 
			
		||||
        implementation = 'pypy' if IS_PYPY else 'python'
 | 
			
		||||
        libpython = os.path.join(prefix, libdir, implementation + get_python_version())
 | 
			
		||||
        return _posix_lib(standard_lib, libpython, early_prefix, prefix)
 | 
			
		||||
    elif os.name == "nt":
 | 
			
		||||
        if standard_lib:
 | 
			
		||||
            return os.path.join(prefix, "Lib")
 | 
			
		||||
        else:
 | 
			
		||||
            return os.path.join(prefix, "Lib", "site-packages")
 | 
			
		||||
    else:
 | 
			
		||||
        raise DistutilsPlatformError(
 | 
			
		||||
            "I don't know where Python installs its library "
 | 
			
		||||
            "on platform '%s'" % os.name
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def customize_compiler(compiler):  # noqa: C901
 | 
			
		||||
    """Do any platform-specific customization of a CCompiler instance.
 | 
			
		||||
 | 
			
		||||
    Mainly needed on Unix, so we can plug in the information that
 | 
			
		||||
    varies across Unices and is stored in Python's Makefile.
 | 
			
		||||
    """
 | 
			
		||||
    if compiler.compiler_type == "unix":
 | 
			
		||||
        if sys.platform == "darwin":
 | 
			
		||||
            # Perform first-time customization of compiler-related
 | 
			
		||||
            # config vars on OS X now that we know we need a compiler.
 | 
			
		||||
            # This is primarily to support Pythons from binary
 | 
			
		||||
            # installers.  The kind and paths to build tools on
 | 
			
		||||
            # the user system may vary significantly from the system
 | 
			
		||||
            # that Python itself was built on.  Also the user OS
 | 
			
		||||
            # version and build tools may not support the same set
 | 
			
		||||
            # of CPU architectures for universal builds.
 | 
			
		||||
            global _config_vars
 | 
			
		||||
            # Use get_config_var() to ensure _config_vars is initialized.
 | 
			
		||||
            if not get_config_var('CUSTOMIZED_OSX_COMPILER'):
 | 
			
		||||
                import _osx_support
 | 
			
		||||
 | 
			
		||||
                _osx_support.customize_compiler(_config_vars)
 | 
			
		||||
                _config_vars['CUSTOMIZED_OSX_COMPILER'] = 'True'
 | 
			
		||||
 | 
			
		||||
        (
 | 
			
		||||
            cc,
 | 
			
		||||
            cxx,
 | 
			
		||||
            cflags,
 | 
			
		||||
            ccshared,
 | 
			
		||||
            ldshared,
 | 
			
		||||
            shlib_suffix,
 | 
			
		||||
            ar,
 | 
			
		||||
            ar_flags,
 | 
			
		||||
        ) = get_config_vars(
 | 
			
		||||
            'CC',
 | 
			
		||||
            'CXX',
 | 
			
		||||
            'CFLAGS',
 | 
			
		||||
            'CCSHARED',
 | 
			
		||||
            'LDSHARED',
 | 
			
		||||
            'SHLIB_SUFFIX',
 | 
			
		||||
            'AR',
 | 
			
		||||
            'ARFLAGS',
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        if 'CC' in os.environ:
 | 
			
		||||
            newcc = os.environ['CC']
 | 
			
		||||
            if 'LDSHARED' not in os.environ and ldshared.startswith(cc):
 | 
			
		||||
                # If CC is overridden, use that as the default
 | 
			
		||||
                #       command for LDSHARED as well
 | 
			
		||||
                ldshared = newcc + ldshared[len(cc) :]
 | 
			
		||||
            cc = newcc
 | 
			
		||||
        if 'CXX' in os.environ:
 | 
			
		||||
            cxx = os.environ['CXX']
 | 
			
		||||
        if 'LDSHARED' in os.environ:
 | 
			
		||||
            ldshared = os.environ['LDSHARED']
 | 
			
		||||
        if 'CPP' in os.environ:
 | 
			
		||||
            cpp = os.environ['CPP']
 | 
			
		||||
        else:
 | 
			
		||||
            cpp = cc + " -E"  # not always
 | 
			
		||||
        if 'LDFLAGS' in os.environ:
 | 
			
		||||
            ldshared = ldshared + ' ' + os.environ['LDFLAGS']
 | 
			
		||||
        if 'CFLAGS' in os.environ:
 | 
			
		||||
            cflags = cflags + ' ' + os.environ['CFLAGS']
 | 
			
		||||
            ldshared = ldshared + ' ' + os.environ['CFLAGS']
 | 
			
		||||
        if 'CPPFLAGS' in os.environ:
 | 
			
		||||
            cpp = cpp + ' ' + os.environ['CPPFLAGS']
 | 
			
		||||
            cflags = cflags + ' ' + os.environ['CPPFLAGS']
 | 
			
		||||
            ldshared = ldshared + ' ' + os.environ['CPPFLAGS']
 | 
			
		||||
        if 'AR' in os.environ:
 | 
			
		||||
            ar = os.environ['AR']
 | 
			
		||||
        if 'ARFLAGS' in os.environ:
 | 
			
		||||
            archiver = ar + ' ' + os.environ['ARFLAGS']
 | 
			
		||||
        else:
 | 
			
		||||
            archiver = ar + ' ' + ar_flags
 | 
			
		||||
 | 
			
		||||
        cc_cmd = cc + ' ' + cflags
 | 
			
		||||
        compiler.set_executables(
 | 
			
		||||
            preprocessor=cpp,
 | 
			
		||||
            compiler=cc_cmd,
 | 
			
		||||
            compiler_so=cc_cmd + ' ' + ccshared,
 | 
			
		||||
            compiler_cxx=cxx,
 | 
			
		||||
            linker_so=ldshared,
 | 
			
		||||
            linker_exe=cc,
 | 
			
		||||
            archiver=archiver,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        if 'RANLIB' in os.environ and compiler.executables.get('ranlib', None):
 | 
			
		||||
            compiler.set_executables(ranlib=os.environ['RANLIB'])
 | 
			
		||||
 | 
			
		||||
        compiler.shared_lib_extension = shlib_suffix
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_config_h_filename():
 | 
			
		||||
    """Return full pathname of installed pyconfig.h file."""
 | 
			
		||||
    if python_build:
 | 
			
		||||
        if os.name == "nt":
 | 
			
		||||
            inc_dir = os.path.join(_sys_home or project_base, "PC")
 | 
			
		||||
        else:
 | 
			
		||||
            inc_dir = _sys_home or project_base
 | 
			
		||||
        return os.path.join(inc_dir, 'pyconfig.h')
 | 
			
		||||
    else:
 | 
			
		||||
        return sysconfig.get_config_h_filename()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_makefile_filename():
 | 
			
		||||
    """Return full pathname of installed Makefile from the Python build."""
 | 
			
		||||
    return sysconfig.get_makefile_filename()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def parse_config_h(fp, g=None):
 | 
			
		||||
    """Parse a config.h-style file.
 | 
			
		||||
 | 
			
		||||
    A dictionary containing name/value pairs is returned.  If an
 | 
			
		||||
    optional dictionary is passed in as the second argument, it is
 | 
			
		||||
    used instead of a new dictionary.
 | 
			
		||||
    """
 | 
			
		||||
    return sysconfig.parse_config_h(fp, vars=g)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Regexes needed for parsing Makefile (and similar syntaxes,
 | 
			
		||||
# like old-style Setup files).
 | 
			
		||||
_variable_rx = re.compile(r"([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)")
 | 
			
		||||
_findvar1_rx = re.compile(r"\$\(([A-Za-z][A-Za-z0-9_]*)\)")
 | 
			
		||||
_findvar2_rx = re.compile(r"\${([A-Za-z][A-Za-z0-9_]*)}")
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def parse_makefile(fn, g=None):  # noqa: C901
 | 
			
		||||
    """Parse a Makefile-style file.
 | 
			
		||||
 | 
			
		||||
    A dictionary containing name/value pairs is returned.  If an
 | 
			
		||||
    optional dictionary is passed in as the second argument, it is
 | 
			
		||||
    used instead of a new dictionary.
 | 
			
		||||
    """
 | 
			
		||||
    from distutils.text_file import TextFile
 | 
			
		||||
 | 
			
		||||
    fp = TextFile(
 | 
			
		||||
        fn, strip_comments=1, skip_blanks=1, join_lines=1, errors="surrogateescape"
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    if g is None:
 | 
			
		||||
        g = {}
 | 
			
		||||
    done = {}
 | 
			
		||||
    notdone = {}
 | 
			
		||||
 | 
			
		||||
    while True:
 | 
			
		||||
        line = fp.readline()
 | 
			
		||||
        if line is None:  # eof
 | 
			
		||||
            break
 | 
			
		||||
        m = _variable_rx.match(line)
 | 
			
		||||
        if m:
 | 
			
		||||
            n, v = m.group(1, 2)
 | 
			
		||||
            v = v.strip()
 | 
			
		||||
            # `$$' is a literal `$' in make
 | 
			
		||||
            tmpv = v.replace('$$', '')
 | 
			
		||||
 | 
			
		||||
            if "$" in tmpv:
 | 
			
		||||
                notdone[n] = v
 | 
			
		||||
            else:
 | 
			
		||||
                try:
 | 
			
		||||
                    v = int(v)
 | 
			
		||||
                except ValueError:
 | 
			
		||||
                    # insert literal `$'
 | 
			
		||||
                    done[n] = v.replace('$$', '$')
 | 
			
		||||
                else:
 | 
			
		||||
                    done[n] = v
 | 
			
		||||
 | 
			
		||||
    # Variables with a 'PY_' prefix in the makefile. These need to
 | 
			
		||||
    # be made available without that prefix through sysconfig.
 | 
			
		||||
    # Special care is needed to ensure that variable expansion works, even
 | 
			
		||||
    # if the expansion uses the name without a prefix.
 | 
			
		||||
    renamed_variables = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS')
 | 
			
		||||
 | 
			
		||||
    # do variable interpolation here
 | 
			
		||||
    while notdone:
 | 
			
		||||
        for name in list(notdone):
 | 
			
		||||
            value = notdone[name]
 | 
			
		||||
            m = _findvar1_rx.search(value) or _findvar2_rx.search(value)
 | 
			
		||||
            if m:
 | 
			
		||||
                n = m.group(1)
 | 
			
		||||
                found = True
 | 
			
		||||
                if n in done:
 | 
			
		||||
                    item = str(done[n])
 | 
			
		||||
                elif n in notdone:
 | 
			
		||||
                    # get it on a subsequent round
 | 
			
		||||
                    found = False
 | 
			
		||||
                elif n in os.environ:
 | 
			
		||||
                    # do it like make: fall back to environment
 | 
			
		||||
                    item = os.environ[n]
 | 
			
		||||
 | 
			
		||||
                elif n in renamed_variables:
 | 
			
		||||
                    if name.startswith('PY_') and name[3:] in renamed_variables:
 | 
			
		||||
                        item = ""
 | 
			
		||||
 | 
			
		||||
                    elif 'PY_' + n in notdone:
 | 
			
		||||
                        found = False
 | 
			
		||||
 | 
			
		||||
                    else:
 | 
			
		||||
                        item = str(done['PY_' + n])
 | 
			
		||||
                else:
 | 
			
		||||
                    done[n] = item = ""
 | 
			
		||||
                if found:
 | 
			
		||||
                    after = value[m.end() :]
 | 
			
		||||
                    value = value[: m.start()] + item + after
 | 
			
		||||
                    if "$" in after:
 | 
			
		||||
                        notdone[name] = value
 | 
			
		||||
                    else:
 | 
			
		||||
                        try:
 | 
			
		||||
                            value = int(value)
 | 
			
		||||
                        except ValueError:
 | 
			
		||||
                            done[name] = value.strip()
 | 
			
		||||
                        else:
 | 
			
		||||
                            done[name] = value
 | 
			
		||||
                        del notdone[name]
 | 
			
		||||
 | 
			
		||||
                        if name.startswith('PY_') and name[3:] in renamed_variables:
 | 
			
		||||
 | 
			
		||||
                            name = name[3:]
 | 
			
		||||
                            if name not in done:
 | 
			
		||||
                                done[name] = value
 | 
			
		||||
            else:
 | 
			
		||||
                # bogus variable reference; just drop it since we can't deal
 | 
			
		||||
                del notdone[name]
 | 
			
		||||
 | 
			
		||||
    fp.close()
 | 
			
		||||
 | 
			
		||||
    # strip spurious spaces
 | 
			
		||||
    for k, v in done.items():
 | 
			
		||||
        if isinstance(v, str):
 | 
			
		||||
            done[k] = v.strip()
 | 
			
		||||
 | 
			
		||||
    # save the results in the global dictionary
 | 
			
		||||
    g.update(done)
 | 
			
		||||
    return g
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def expand_makefile_vars(s, vars):
 | 
			
		||||
    """Expand Makefile-style variables -- "${foo}" or "$(foo)" -- in
 | 
			
		||||
    'string' according to 'vars' (a dictionary mapping variable names to
 | 
			
		||||
    values).  Variables not present in 'vars' are silently expanded to the
 | 
			
		||||
    empty string.  The variable values in 'vars' should not contain further
 | 
			
		||||
    variable expansions; if 'vars' is the output of 'parse_makefile()',
 | 
			
		||||
    you're fine.  Returns a variable-expanded version of 's'.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    # This algorithm does multiple expansion, so if vars['foo'] contains
 | 
			
		||||
    # "${bar}", it will expand ${foo} to ${bar}, and then expand
 | 
			
		||||
    # ${bar}... and so forth.  This is fine as long as 'vars' comes from
 | 
			
		||||
    # 'parse_makefile()', which takes care of such expansions eagerly,
 | 
			
		||||
    # according to make's variable expansion semantics.
 | 
			
		||||
 | 
			
		||||
    while True:
 | 
			
		||||
        m = _findvar1_rx.search(s) or _findvar2_rx.search(s)
 | 
			
		||||
        if m:
 | 
			
		||||
            (beg, end) = m.span()
 | 
			
		||||
            s = s[0:beg] + vars.get(m.group(1)) + s[end:]
 | 
			
		||||
        else:
 | 
			
		||||
            break
 | 
			
		||||
    return s
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
_config_vars = None
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_config_vars(*args):
 | 
			
		||||
    """With no arguments, return a dictionary of all configuration
 | 
			
		||||
    variables relevant for the current platform.  Generally this includes
 | 
			
		||||
    everything needed to build extensions and install both pure modules and
 | 
			
		||||
    extensions.  On Unix, this means every variable defined in Python's
 | 
			
		||||
    installed Makefile; on Windows it's a much smaller set.
 | 
			
		||||
 | 
			
		||||
    With arguments, return a list of values that result from looking up
 | 
			
		||||
    each argument in the configuration variable dictionary.
 | 
			
		||||
    """
 | 
			
		||||
    global _config_vars
 | 
			
		||||
    if _config_vars is None:
 | 
			
		||||
        _config_vars = sysconfig.get_config_vars().copy()
 | 
			
		||||
        py39compat.add_ext_suffix(_config_vars)
 | 
			
		||||
 | 
			
		||||
    return [_config_vars.get(name) for name in args] if args else _config_vars
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_config_var(name):
 | 
			
		||||
    """Return the value of a single variable using the dictionary
 | 
			
		||||
    returned by 'get_config_vars()'.  Equivalent to
 | 
			
		||||
    get_config_vars().get(name)
 | 
			
		||||
    """
 | 
			
		||||
    if name == 'SO':
 | 
			
		||||
        import warnings
 | 
			
		||||
 | 
			
		||||
        warnings.warn('SO is deprecated, use EXT_SUFFIX', DeprecationWarning, 2)
 | 
			
		||||
    return get_config_vars().get(name)
 | 
			
		||||
@@ -0,0 +1,287 @@
 | 
			
		||||
"""text_file
 | 
			
		||||
 | 
			
		||||
provides the TextFile class, which gives an interface to text files
 | 
			
		||||
that (optionally) takes care of stripping comments, ignoring blank
 | 
			
		||||
lines, and joining lines with backslashes."""
 | 
			
		||||
 | 
			
		||||
import sys
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class TextFile:
 | 
			
		||||
    """Provides a file-like object that takes care of all the things you
 | 
			
		||||
    commonly want to do when processing a text file that has some
 | 
			
		||||
    line-by-line syntax: strip comments (as long as "#" is your
 | 
			
		||||
    comment character), skip blank lines, join adjacent lines by
 | 
			
		||||
    escaping the newline (ie. backslash at end of line), strip
 | 
			
		||||
    leading and/or trailing whitespace.  All of these are optional
 | 
			
		||||
    and independently controllable.
 | 
			
		||||
 | 
			
		||||
    Provides a 'warn()' method so you can generate warning messages that
 | 
			
		||||
    report physical line number, even if the logical line in question
 | 
			
		||||
    spans multiple physical lines.  Also provides 'unreadline()' for
 | 
			
		||||
    implementing line-at-a-time lookahead.
 | 
			
		||||
 | 
			
		||||
    Constructor is called as:
 | 
			
		||||
 | 
			
		||||
        TextFile (filename=None, file=None, **options)
 | 
			
		||||
 | 
			
		||||
    It bombs (RuntimeError) if both 'filename' and 'file' are None;
 | 
			
		||||
    'filename' should be a string, and 'file' a file object (or
 | 
			
		||||
    something that provides 'readline()' and 'close()' methods).  It is
 | 
			
		||||
    recommended that you supply at least 'filename', so that TextFile
 | 
			
		||||
    can include it in warning messages.  If 'file' is not supplied,
 | 
			
		||||
    TextFile creates its own using 'io.open()'.
 | 
			
		||||
 | 
			
		||||
    The options are all boolean, and affect the value returned by
 | 
			
		||||
    'readline()':
 | 
			
		||||
      strip_comments [default: true]
 | 
			
		||||
        strip from "#" to end-of-line, as well as any whitespace
 | 
			
		||||
        leading up to the "#" -- unless it is escaped by a backslash
 | 
			
		||||
      lstrip_ws [default: false]
 | 
			
		||||
        strip leading whitespace from each line before returning it
 | 
			
		||||
      rstrip_ws [default: true]
 | 
			
		||||
        strip trailing whitespace (including line terminator!) from
 | 
			
		||||
        each line before returning it
 | 
			
		||||
      skip_blanks [default: true}
 | 
			
		||||
        skip lines that are empty *after* stripping comments and
 | 
			
		||||
        whitespace.  (If both lstrip_ws and rstrip_ws are false,
 | 
			
		||||
        then some lines may consist of solely whitespace: these will
 | 
			
		||||
        *not* be skipped, even if 'skip_blanks' is true.)
 | 
			
		||||
      join_lines [default: false]
 | 
			
		||||
        if a backslash is the last non-newline character on a line
 | 
			
		||||
        after stripping comments and whitespace, join the following line
 | 
			
		||||
        to it to form one "logical line"; if N consecutive lines end
 | 
			
		||||
        with a backslash, then N+1 physical lines will be joined to
 | 
			
		||||
        form one logical line.
 | 
			
		||||
      collapse_join [default: false]
 | 
			
		||||
        strip leading whitespace from lines that are joined to their
 | 
			
		||||
        predecessor; only matters if (join_lines and not lstrip_ws)
 | 
			
		||||
      errors [default: 'strict']
 | 
			
		||||
        error handler used to decode the file content
 | 
			
		||||
 | 
			
		||||
    Note that since 'rstrip_ws' can strip the trailing newline, the
 | 
			
		||||
    semantics of 'readline()' must differ from those of the builtin file
 | 
			
		||||
    object's 'readline()' method!  In particular, 'readline()' returns
 | 
			
		||||
    None for end-of-file: an empty string might just be a blank line (or
 | 
			
		||||
    an all-whitespace line), if 'rstrip_ws' is true but 'skip_blanks' is
 | 
			
		||||
    not."""
 | 
			
		||||
 | 
			
		||||
    default_options = {
 | 
			
		||||
        'strip_comments': 1,
 | 
			
		||||
        'skip_blanks': 1,
 | 
			
		||||
        'lstrip_ws': 0,
 | 
			
		||||
        'rstrip_ws': 1,
 | 
			
		||||
        'join_lines': 0,
 | 
			
		||||
        'collapse_join': 0,
 | 
			
		||||
        'errors': 'strict',
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    def __init__(self, filename=None, file=None, **options):
 | 
			
		||||
        """Construct a new TextFile object.  At least one of 'filename'
 | 
			
		||||
        (a string) and 'file' (a file-like object) must be supplied.
 | 
			
		||||
        They keyword argument options are described above and affect
 | 
			
		||||
        the values returned by 'readline()'."""
 | 
			
		||||
        if filename is None and file is None:
 | 
			
		||||
            raise RuntimeError(
 | 
			
		||||
                "you must supply either or both of 'filename' and 'file'"
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        # set values for all options -- either from client option hash
 | 
			
		||||
        # or fallback to default_options
 | 
			
		||||
        for opt in self.default_options.keys():
 | 
			
		||||
            if opt in options:
 | 
			
		||||
                setattr(self, opt, options[opt])
 | 
			
		||||
            else:
 | 
			
		||||
                setattr(self, opt, self.default_options[opt])
 | 
			
		||||
 | 
			
		||||
        # sanity check client option hash
 | 
			
		||||
        for opt in options.keys():
 | 
			
		||||
            if opt not in self.default_options:
 | 
			
		||||
                raise KeyError("invalid TextFile option '%s'" % opt)
 | 
			
		||||
 | 
			
		||||
        if file is None:
 | 
			
		||||
            self.open(filename)
 | 
			
		||||
        else:
 | 
			
		||||
            self.filename = filename
 | 
			
		||||
            self.file = file
 | 
			
		||||
            self.current_line = 0  # assuming that file is at BOF!
 | 
			
		||||
 | 
			
		||||
        # 'linebuf' is a stack of lines that will be emptied before we
 | 
			
		||||
        # actually read from the file; it's only populated by an
 | 
			
		||||
        # 'unreadline()' operation
 | 
			
		||||
        self.linebuf = []
 | 
			
		||||
 | 
			
		||||
    def open(self, filename):
 | 
			
		||||
        """Open a new file named 'filename'.  This overrides both the
 | 
			
		||||
        'filename' and 'file' arguments to the constructor."""
 | 
			
		||||
        self.filename = filename
 | 
			
		||||
        self.file = open(self.filename, errors=self.errors)
 | 
			
		||||
        self.current_line = 0
 | 
			
		||||
 | 
			
		||||
    def close(self):
 | 
			
		||||
        """Close the current file and forget everything we know about it
 | 
			
		||||
        (filename, current line number)."""
 | 
			
		||||
        file = self.file
 | 
			
		||||
        self.file = None
 | 
			
		||||
        self.filename = None
 | 
			
		||||
        self.current_line = None
 | 
			
		||||
        file.close()
 | 
			
		||||
 | 
			
		||||
    def gen_error(self, msg, line=None):
 | 
			
		||||
        outmsg = []
 | 
			
		||||
        if line is None:
 | 
			
		||||
            line = self.current_line
 | 
			
		||||
        outmsg.append(self.filename + ", ")
 | 
			
		||||
        if isinstance(line, (list, tuple)):
 | 
			
		||||
            outmsg.append("lines %d-%d: " % tuple(line))
 | 
			
		||||
        else:
 | 
			
		||||
            outmsg.append("line %d: " % line)
 | 
			
		||||
        outmsg.append(str(msg))
 | 
			
		||||
        return "".join(outmsg)
 | 
			
		||||
 | 
			
		||||
    def error(self, msg, line=None):
 | 
			
		||||
        raise ValueError("error: " + self.gen_error(msg, line))
 | 
			
		||||
 | 
			
		||||
    def warn(self, msg, line=None):
 | 
			
		||||
        """Print (to stderr) a warning message tied to the current logical
 | 
			
		||||
        line in the current file.  If the current logical line in the
 | 
			
		||||
        file spans multiple physical lines, the warning refers to the
 | 
			
		||||
        whole range, eg. "lines 3-5".  If 'line' supplied, it overrides
 | 
			
		||||
        the current line number; it may be a list or tuple to indicate a
 | 
			
		||||
        range of physical lines, or an integer for a single physical
 | 
			
		||||
        line."""
 | 
			
		||||
        sys.stderr.write("warning: " + self.gen_error(msg, line) + "\n")
 | 
			
		||||
 | 
			
		||||
    def readline(self):  # noqa: C901
 | 
			
		||||
        """Read and return a single logical line from the current file (or
 | 
			
		||||
        from an internal buffer if lines have previously been "unread"
 | 
			
		||||
        with 'unreadline()').  If the 'join_lines' option is true, this
 | 
			
		||||
        may involve reading multiple physical lines concatenated into a
 | 
			
		||||
        single string.  Updates the current line number, so calling
 | 
			
		||||
        'warn()' after 'readline()' emits a warning about the physical
 | 
			
		||||
        line(s) just read.  Returns None on end-of-file, since the empty
 | 
			
		||||
        string can occur if 'rstrip_ws' is true but 'strip_blanks' is
 | 
			
		||||
        not."""
 | 
			
		||||
        # If any "unread" lines waiting in 'linebuf', return the top
 | 
			
		||||
        # one.  (We don't actually buffer read-ahead data -- lines only
 | 
			
		||||
        # get put in 'linebuf' if the client explicitly does an
 | 
			
		||||
        # 'unreadline()'.
 | 
			
		||||
        if self.linebuf:
 | 
			
		||||
            line = self.linebuf[-1]
 | 
			
		||||
            del self.linebuf[-1]
 | 
			
		||||
            return line
 | 
			
		||||
 | 
			
		||||
        buildup_line = ''
 | 
			
		||||
 | 
			
		||||
        while True:
 | 
			
		||||
            # read the line, make it None if EOF
 | 
			
		||||
            line = self.file.readline()
 | 
			
		||||
            if line == '':
 | 
			
		||||
                line = None
 | 
			
		||||
 | 
			
		||||
            if self.strip_comments and line:
 | 
			
		||||
 | 
			
		||||
                # Look for the first "#" in the line.  If none, never
 | 
			
		||||
                # mind.  If we find one and it's the first character, or
 | 
			
		||||
                # is not preceded by "\", then it starts a comment --
 | 
			
		||||
                # strip the comment, strip whitespace before it, and
 | 
			
		||||
                # carry on.  Otherwise, it's just an escaped "#", so
 | 
			
		||||
                # unescape it (and any other escaped "#"'s that might be
 | 
			
		||||
                # lurking in there) and otherwise leave the line alone.
 | 
			
		||||
 | 
			
		||||
                pos = line.find("#")
 | 
			
		||||
                if pos == -1:  # no "#" -- no comments
 | 
			
		||||
                    pass
 | 
			
		||||
 | 
			
		||||
                # It's definitely a comment -- either "#" is the first
 | 
			
		||||
                # character, or it's elsewhere and unescaped.
 | 
			
		||||
                elif pos == 0 or line[pos - 1] != "\\":
 | 
			
		||||
                    # Have to preserve the trailing newline, because it's
 | 
			
		||||
                    # the job of a later step (rstrip_ws) to remove it --
 | 
			
		||||
                    # and if rstrip_ws is false, we'd better preserve it!
 | 
			
		||||
                    # (NB. this means that if the final line is all comment
 | 
			
		||||
                    # and has no trailing newline, we will think that it's
 | 
			
		||||
                    # EOF; I think that's OK.)
 | 
			
		||||
                    eol = (line[-1] == '\n') and '\n' or ''
 | 
			
		||||
                    line = line[0:pos] + eol
 | 
			
		||||
 | 
			
		||||
                    # If all that's left is whitespace, then skip line
 | 
			
		||||
                    # *now*, before we try to join it to 'buildup_line' --
 | 
			
		||||
                    # that way constructs like
 | 
			
		||||
                    #   hello \\
 | 
			
		||||
                    #   # comment that should be ignored
 | 
			
		||||
                    #   there
 | 
			
		||||
                    # result in "hello there".
 | 
			
		||||
                    if line.strip() == "":
 | 
			
		||||
                        continue
 | 
			
		||||
                else:  # it's an escaped "#"
 | 
			
		||||
                    line = line.replace("\\#", "#")
 | 
			
		||||
 | 
			
		||||
            # did previous line end with a backslash? then accumulate
 | 
			
		||||
            if self.join_lines and buildup_line:
 | 
			
		||||
                # oops: end of file
 | 
			
		||||
                if line is None:
 | 
			
		||||
                    self.warn("continuation line immediately precedes " "end-of-file")
 | 
			
		||||
                    return buildup_line
 | 
			
		||||
 | 
			
		||||
                if self.collapse_join:
 | 
			
		||||
                    line = line.lstrip()
 | 
			
		||||
                line = buildup_line + line
 | 
			
		||||
 | 
			
		||||
                # careful: pay attention to line number when incrementing it
 | 
			
		||||
                if isinstance(self.current_line, list):
 | 
			
		||||
                    self.current_line[1] = self.current_line[1] + 1
 | 
			
		||||
                else:
 | 
			
		||||
                    self.current_line = [self.current_line, self.current_line + 1]
 | 
			
		||||
            # just an ordinary line, read it as usual
 | 
			
		||||
            else:
 | 
			
		||||
                if line is None:  # eof
 | 
			
		||||
                    return None
 | 
			
		||||
 | 
			
		||||
                # still have to be careful about incrementing the line number!
 | 
			
		||||
                if isinstance(self.current_line, list):
 | 
			
		||||
                    self.current_line = self.current_line[1] + 1
 | 
			
		||||
                else:
 | 
			
		||||
                    self.current_line = self.current_line + 1
 | 
			
		||||
 | 
			
		||||
            # strip whitespace however the client wants (leading and
 | 
			
		||||
            # trailing, or one or the other, or neither)
 | 
			
		||||
            if self.lstrip_ws and self.rstrip_ws:
 | 
			
		||||
                line = line.strip()
 | 
			
		||||
            elif self.lstrip_ws:
 | 
			
		||||
                line = line.lstrip()
 | 
			
		||||
            elif self.rstrip_ws:
 | 
			
		||||
                line = line.rstrip()
 | 
			
		||||
 | 
			
		||||
            # blank line (whether we rstrip'ed or not)? skip to next line
 | 
			
		||||
            # if appropriate
 | 
			
		||||
            if (line == '' or line == '\n') and self.skip_blanks:
 | 
			
		||||
                continue
 | 
			
		||||
 | 
			
		||||
            if self.join_lines:
 | 
			
		||||
                if line[-1] == '\\':
 | 
			
		||||
                    buildup_line = line[:-1]
 | 
			
		||||
                    continue
 | 
			
		||||
 | 
			
		||||
                if line[-2:] == '\\\n':
 | 
			
		||||
                    buildup_line = line[0:-2] + '\n'
 | 
			
		||||
                    continue
 | 
			
		||||
 | 
			
		||||
            # well, I guess there's some actual content there: return it
 | 
			
		||||
            return line
 | 
			
		||||
 | 
			
		||||
    def readlines(self):
 | 
			
		||||
        """Read and return the list of all logical lines remaining in the
 | 
			
		||||
        current file."""
 | 
			
		||||
        lines = []
 | 
			
		||||
        while True:
 | 
			
		||||
            line = self.readline()
 | 
			
		||||
            if line is None:
 | 
			
		||||
                return lines
 | 
			
		||||
            lines.append(line)
 | 
			
		||||
 | 
			
		||||
    def unreadline(self, line):
 | 
			
		||||
        """Push 'line' (a string) onto an internal buffer that will be
 | 
			
		||||
        checked by future 'readline()' calls.  Handy for implementing
 | 
			
		||||
        a parser with line-at-a-time lookahead."""
 | 
			
		||||
        self.linebuf.append(line)
 | 
			
		||||
@@ -0,0 +1,401 @@
 | 
			
		||||
"""distutils.unixccompiler
 | 
			
		||||
 | 
			
		||||
Contains the UnixCCompiler class, a subclass of CCompiler that handles
 | 
			
		||||
the "typical" Unix-style command-line C compiler:
 | 
			
		||||
  * macros defined with -Dname[=value]
 | 
			
		||||
  * macros undefined with -Uname
 | 
			
		||||
  * include search directories specified with -Idir
 | 
			
		||||
  * libraries specified with -lllib
 | 
			
		||||
  * library search directories specified with -Ldir
 | 
			
		||||
  * compile handled by 'cc' (or similar) executable with -c option:
 | 
			
		||||
    compiles .c to .o
 | 
			
		||||
  * link static library handled by 'ar' command (possibly with 'ranlib')
 | 
			
		||||
  * link shared library handled by 'cc -shared'
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
import os
 | 
			
		||||
import sys
 | 
			
		||||
import re
 | 
			
		||||
import shlex
 | 
			
		||||
import itertools
 | 
			
		||||
 | 
			
		||||
from . import sysconfig
 | 
			
		||||
from .dep_util import newer
 | 
			
		||||
from .ccompiler import CCompiler, gen_preprocess_options, gen_lib_options
 | 
			
		||||
from .errors import DistutilsExecError, CompileError, LibError, LinkError
 | 
			
		||||
from ._log import log
 | 
			
		||||
from ._macos_compat import compiler_fixup
 | 
			
		||||
 | 
			
		||||
# XXX Things not currently handled:
 | 
			
		||||
#   * optimization/debug/warning flags; we just use whatever's in Python's
 | 
			
		||||
#     Makefile and live with it.  Is this adequate?  If not, we might
 | 
			
		||||
#     have to have a bunch of subclasses GNUCCompiler, SGICCompiler,
 | 
			
		||||
#     SunCCompiler, and I suspect down that road lies madness.
 | 
			
		||||
#   * even if we don't know a warning flag from an optimization flag,
 | 
			
		||||
#     we need some way for outsiders to feed preprocessor/compiler/linker
 | 
			
		||||
#     flags in to us -- eg. a sysadmin might want to mandate certain flags
 | 
			
		||||
#     via a site config file, or a user might want to set something for
 | 
			
		||||
#     compiling this module distribution only via the setup.py command
 | 
			
		||||
#     line, whatever.  As long as these options come from something on the
 | 
			
		||||
#     current system, they can be as system-dependent as they like, and we
 | 
			
		||||
#     should just happily stuff them into the preprocessor/compiler/linker
 | 
			
		||||
#     options and carry on.
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _split_env(cmd):
 | 
			
		||||
    """
 | 
			
		||||
    For macOS, split command into 'env' portion (if any)
 | 
			
		||||
    and the rest of the linker command.
 | 
			
		||||
 | 
			
		||||
    >>> _split_env(['a', 'b', 'c'])
 | 
			
		||||
    ([], ['a', 'b', 'c'])
 | 
			
		||||
    >>> _split_env(['/usr/bin/env', 'A=3', 'gcc'])
 | 
			
		||||
    (['/usr/bin/env', 'A=3'], ['gcc'])
 | 
			
		||||
    """
 | 
			
		||||
    pivot = 0
 | 
			
		||||
    if os.path.basename(cmd[0]) == "env":
 | 
			
		||||
        pivot = 1
 | 
			
		||||
        while '=' in cmd[pivot]:
 | 
			
		||||
            pivot += 1
 | 
			
		||||
    return cmd[:pivot], cmd[pivot:]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _split_aix(cmd):
 | 
			
		||||
    """
 | 
			
		||||
    AIX platforms prefix the compiler with the ld_so_aix
 | 
			
		||||
    script, so split that from the linker command.
 | 
			
		||||
 | 
			
		||||
    >>> _split_aix(['a', 'b', 'c'])
 | 
			
		||||
    ([], ['a', 'b', 'c'])
 | 
			
		||||
    >>> _split_aix(['/bin/foo/ld_so_aix', 'gcc'])
 | 
			
		||||
    (['/bin/foo/ld_so_aix'], ['gcc'])
 | 
			
		||||
    """
 | 
			
		||||
    pivot = os.path.basename(cmd[0]) == 'ld_so_aix'
 | 
			
		||||
    return cmd[:pivot], cmd[pivot:]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _linker_params(linker_cmd, compiler_cmd):
 | 
			
		||||
    """
 | 
			
		||||
    The linker command usually begins with the compiler
 | 
			
		||||
    command (possibly multiple elements), followed by zero or more
 | 
			
		||||
    params for shared library building.
 | 
			
		||||
 | 
			
		||||
    If the LDSHARED env variable overrides the linker command,
 | 
			
		||||
    however, the commands may not match.
 | 
			
		||||
 | 
			
		||||
    Return the best guess of the linker parameters by stripping
 | 
			
		||||
    the linker command. If the compiler command does not
 | 
			
		||||
    match the linker command, assume the linker command is
 | 
			
		||||
    just the first element.
 | 
			
		||||
 | 
			
		||||
    >>> _linker_params('gcc foo bar'.split(), ['gcc'])
 | 
			
		||||
    ['foo', 'bar']
 | 
			
		||||
    >>> _linker_params('gcc foo bar'.split(), ['other'])
 | 
			
		||||
    ['foo', 'bar']
 | 
			
		||||
    >>> _linker_params('ccache gcc foo bar'.split(), 'ccache gcc'.split())
 | 
			
		||||
    ['foo', 'bar']
 | 
			
		||||
    >>> _linker_params(['gcc'], ['gcc'])
 | 
			
		||||
    []
 | 
			
		||||
    """
 | 
			
		||||
    c_len = len(compiler_cmd)
 | 
			
		||||
    pivot = c_len if linker_cmd[:c_len] == compiler_cmd else 1
 | 
			
		||||
    return linker_cmd[pivot:]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class UnixCCompiler(CCompiler):
 | 
			
		||||
 | 
			
		||||
    compiler_type = 'unix'
 | 
			
		||||
 | 
			
		||||
    # These are used by CCompiler in two places: the constructor sets
 | 
			
		||||
    # instance attributes 'preprocessor', 'compiler', etc. from them, and
 | 
			
		||||
    # 'set_executable()' allows any of these to be set.  The defaults here
 | 
			
		||||
    # are pretty generic; they will probably have to be set by an outsider
 | 
			
		||||
    # (eg. using information discovered by the sysconfig about building
 | 
			
		||||
    # Python extensions).
 | 
			
		||||
    executables = {
 | 
			
		||||
        'preprocessor': None,
 | 
			
		||||
        'compiler': ["cc"],
 | 
			
		||||
        'compiler_so': ["cc"],
 | 
			
		||||
        'compiler_cxx': ["cc"],
 | 
			
		||||
        'linker_so': ["cc", "-shared"],
 | 
			
		||||
        'linker_exe': ["cc"],
 | 
			
		||||
        'archiver': ["ar", "-cr"],
 | 
			
		||||
        'ranlib': None,
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    if sys.platform[:6] == "darwin":
 | 
			
		||||
        executables['ranlib'] = ["ranlib"]
 | 
			
		||||
 | 
			
		||||
    # Needed for the filename generation methods provided by the base
 | 
			
		||||
    # class, CCompiler.  NB. whoever instantiates/uses a particular
 | 
			
		||||
    # UnixCCompiler instance should set 'shared_lib_ext' -- we set a
 | 
			
		||||
    # reasonable common default here, but it's not necessarily used on all
 | 
			
		||||
    # Unices!
 | 
			
		||||
 | 
			
		||||
    src_extensions = [".c", ".C", ".cc", ".cxx", ".cpp", ".m"]
 | 
			
		||||
    obj_extension = ".o"
 | 
			
		||||
    static_lib_extension = ".a"
 | 
			
		||||
    shared_lib_extension = ".so"
 | 
			
		||||
    dylib_lib_extension = ".dylib"
 | 
			
		||||
    xcode_stub_lib_extension = ".tbd"
 | 
			
		||||
    static_lib_format = shared_lib_format = dylib_lib_format = "lib%s%s"
 | 
			
		||||
    xcode_stub_lib_format = dylib_lib_format
 | 
			
		||||
    if sys.platform == "cygwin":
 | 
			
		||||
        exe_extension = ".exe"
 | 
			
		||||
 | 
			
		||||
    def preprocess(
 | 
			
		||||
        self,
 | 
			
		||||
        source,
 | 
			
		||||
        output_file=None,
 | 
			
		||||
        macros=None,
 | 
			
		||||
        include_dirs=None,
 | 
			
		||||
        extra_preargs=None,
 | 
			
		||||
        extra_postargs=None,
 | 
			
		||||
    ):
 | 
			
		||||
        fixed_args = self._fix_compile_args(None, macros, include_dirs)
 | 
			
		||||
        ignore, macros, include_dirs = fixed_args
 | 
			
		||||
        pp_opts = gen_preprocess_options(macros, include_dirs)
 | 
			
		||||
        pp_args = self.preprocessor + pp_opts
 | 
			
		||||
        if output_file:
 | 
			
		||||
            pp_args.extend(['-o', output_file])
 | 
			
		||||
        if extra_preargs:
 | 
			
		||||
            pp_args[:0] = extra_preargs
 | 
			
		||||
        if extra_postargs:
 | 
			
		||||
            pp_args.extend(extra_postargs)
 | 
			
		||||
        pp_args.append(source)
 | 
			
		||||
 | 
			
		||||
        # reasons to preprocess:
 | 
			
		||||
        # - force is indicated
 | 
			
		||||
        # - output is directed to stdout
 | 
			
		||||
        # - source file is newer than the target
 | 
			
		||||
        preprocess = self.force or output_file is None or newer(source, output_file)
 | 
			
		||||
        if not preprocess:
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
        if output_file:
 | 
			
		||||
            self.mkpath(os.path.dirname(output_file))
 | 
			
		||||
 | 
			
		||||
        try:
 | 
			
		||||
            self.spawn(pp_args)
 | 
			
		||||
        except DistutilsExecError as msg:
 | 
			
		||||
            raise CompileError(msg)
 | 
			
		||||
 | 
			
		||||
    def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
 | 
			
		||||
        compiler_so = compiler_fixup(self.compiler_so, cc_args + extra_postargs)
 | 
			
		||||
        try:
 | 
			
		||||
            self.spawn(compiler_so + cc_args + [src, '-o', obj] + extra_postargs)
 | 
			
		||||
        except DistutilsExecError as msg:
 | 
			
		||||
            raise CompileError(msg)
 | 
			
		||||
 | 
			
		||||
    def create_static_lib(
 | 
			
		||||
        self, objects, output_libname, output_dir=None, debug=0, target_lang=None
 | 
			
		||||
    ):
 | 
			
		||||
        objects, output_dir = self._fix_object_args(objects, output_dir)
 | 
			
		||||
 | 
			
		||||
        output_filename = self.library_filename(output_libname, output_dir=output_dir)
 | 
			
		||||
 | 
			
		||||
        if self._need_link(objects, output_filename):
 | 
			
		||||
            self.mkpath(os.path.dirname(output_filename))
 | 
			
		||||
            self.spawn(self.archiver + [output_filename] + objects + self.objects)
 | 
			
		||||
 | 
			
		||||
            # Not many Unices required ranlib anymore -- SunOS 4.x is, I
 | 
			
		||||
            # think the only major Unix that does.  Maybe we need some
 | 
			
		||||
            # platform intelligence here to skip ranlib if it's not
 | 
			
		||||
            # needed -- or maybe Python's configure script took care of
 | 
			
		||||
            # it for us, hence the check for leading colon.
 | 
			
		||||
            if self.ranlib:
 | 
			
		||||
                try:
 | 
			
		||||
                    self.spawn(self.ranlib + [output_filename])
 | 
			
		||||
                except DistutilsExecError as msg:
 | 
			
		||||
                    raise LibError(msg)
 | 
			
		||||
        else:
 | 
			
		||||
            log.debug("skipping %s (up-to-date)", output_filename)
 | 
			
		||||
 | 
			
		||||
    def link(
 | 
			
		||||
        self,
 | 
			
		||||
        target_desc,
 | 
			
		||||
        objects,
 | 
			
		||||
        output_filename,
 | 
			
		||||
        output_dir=None,
 | 
			
		||||
        libraries=None,
 | 
			
		||||
        library_dirs=None,
 | 
			
		||||
        runtime_library_dirs=None,
 | 
			
		||||
        export_symbols=None,
 | 
			
		||||
        debug=0,
 | 
			
		||||
        extra_preargs=None,
 | 
			
		||||
        extra_postargs=None,
 | 
			
		||||
        build_temp=None,
 | 
			
		||||
        target_lang=None,
 | 
			
		||||
    ):
 | 
			
		||||
        objects, output_dir = self._fix_object_args(objects, output_dir)
 | 
			
		||||
        fixed_args = self._fix_lib_args(libraries, library_dirs, runtime_library_dirs)
 | 
			
		||||
        libraries, library_dirs, runtime_library_dirs = fixed_args
 | 
			
		||||
 | 
			
		||||
        lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs, libraries)
 | 
			
		||||
        if not isinstance(output_dir, (str, type(None))):
 | 
			
		||||
            raise TypeError("'output_dir' must be a string or None")
 | 
			
		||||
        if output_dir is not None:
 | 
			
		||||
            output_filename = os.path.join(output_dir, output_filename)
 | 
			
		||||
 | 
			
		||||
        if self._need_link(objects, output_filename):
 | 
			
		||||
            ld_args = objects + self.objects + lib_opts + ['-o', output_filename]
 | 
			
		||||
            if debug:
 | 
			
		||||
                ld_args[:0] = ['-g']
 | 
			
		||||
            if extra_preargs:
 | 
			
		||||
                ld_args[:0] = extra_preargs
 | 
			
		||||
            if extra_postargs:
 | 
			
		||||
                ld_args.extend(extra_postargs)
 | 
			
		||||
            self.mkpath(os.path.dirname(output_filename))
 | 
			
		||||
            try:
 | 
			
		||||
                # Select a linker based on context: linker_exe when
 | 
			
		||||
                # building an executable or linker_so (with shared options)
 | 
			
		||||
                # when building a shared library.
 | 
			
		||||
                building_exe = target_desc == CCompiler.EXECUTABLE
 | 
			
		||||
                linker = (self.linker_exe if building_exe else self.linker_so)[:]
 | 
			
		||||
 | 
			
		||||
                if target_lang == "c++" and self.compiler_cxx:
 | 
			
		||||
                    env, linker_ne = _split_env(linker)
 | 
			
		||||
                    aix, linker_na = _split_aix(linker_ne)
 | 
			
		||||
                    _, compiler_cxx_ne = _split_env(self.compiler_cxx)
 | 
			
		||||
                    _, linker_exe_ne = _split_env(self.linker_exe)
 | 
			
		||||
 | 
			
		||||
                    params = _linker_params(linker_na, linker_exe_ne)
 | 
			
		||||
                    linker = env + aix + compiler_cxx_ne + params
 | 
			
		||||
 | 
			
		||||
                linker = compiler_fixup(linker, ld_args)
 | 
			
		||||
 | 
			
		||||
                self.spawn(linker + ld_args)
 | 
			
		||||
            except DistutilsExecError as msg:
 | 
			
		||||
                raise LinkError(msg)
 | 
			
		||||
        else:
 | 
			
		||||
            log.debug("skipping %s (up-to-date)", output_filename)
 | 
			
		||||
 | 
			
		||||
    # -- Miscellaneous methods -----------------------------------------
 | 
			
		||||
    # These are all used by the 'gen_lib_options() function, in
 | 
			
		||||
    # ccompiler.py.
 | 
			
		||||
 | 
			
		||||
    def library_dir_option(self, dir):
 | 
			
		||||
        return "-L" + dir
 | 
			
		||||
 | 
			
		||||
    def _is_gcc(self):
 | 
			
		||||
        cc_var = sysconfig.get_config_var("CC")
 | 
			
		||||
        compiler = os.path.basename(shlex.split(cc_var)[0])
 | 
			
		||||
        return "gcc" in compiler or "g++" in compiler
 | 
			
		||||
 | 
			
		||||
    def runtime_library_dir_option(self, dir):
 | 
			
		||||
        # XXX Hackish, at the very least.  See Python bug #445902:
 | 
			
		||||
        # http://sourceforge.net/tracker/index.php
 | 
			
		||||
        #   ?func=detail&aid=445902&group_id=5470&atid=105470
 | 
			
		||||
        # Linkers on different platforms need different options to
 | 
			
		||||
        # specify that directories need to be added to the list of
 | 
			
		||||
        # directories searched for dependencies when a dynamic library
 | 
			
		||||
        # is sought.  GCC on GNU systems (Linux, FreeBSD, ...) has to
 | 
			
		||||
        # be told to pass the -R option through to the linker, whereas
 | 
			
		||||
        # other compilers and gcc on other systems just know this.
 | 
			
		||||
        # Other compilers may need something slightly different.  At
 | 
			
		||||
        # this time, there's no way to determine this information from
 | 
			
		||||
        # the configuration data stored in the Python installation, so
 | 
			
		||||
        # we use this hack.
 | 
			
		||||
        if sys.platform[:6] == "darwin":
 | 
			
		||||
            from distutils.util import get_macosx_target_ver, split_version
 | 
			
		||||
 | 
			
		||||
            macosx_target_ver = get_macosx_target_ver()
 | 
			
		||||
            if macosx_target_ver and split_version(macosx_target_ver) >= [10, 5]:
 | 
			
		||||
                return "-Wl,-rpath," + dir
 | 
			
		||||
            else:  # no support for -rpath on earlier macOS versions
 | 
			
		||||
                return "-L" + dir
 | 
			
		||||
        elif sys.platform[:7] == "freebsd":
 | 
			
		||||
            return "-Wl,-rpath=" + dir
 | 
			
		||||
        elif sys.platform[:5] == "hp-ux":
 | 
			
		||||
            return [
 | 
			
		||||
                "-Wl,+s" if self._is_gcc() else "+s",
 | 
			
		||||
                "-L" + dir,
 | 
			
		||||
            ]
 | 
			
		||||
 | 
			
		||||
        # For all compilers, `-Wl` is the presumed way to
 | 
			
		||||
        # pass a compiler option to the linker and `-R` is
 | 
			
		||||
        # the way to pass an RPATH.
 | 
			
		||||
        if sysconfig.get_config_var("GNULD") == "yes":
 | 
			
		||||
            # GNU ld needs an extra option to get a RUNPATH
 | 
			
		||||
            # instead of just an RPATH.
 | 
			
		||||
            return "-Wl,--enable-new-dtags,-R" + dir
 | 
			
		||||
        else:
 | 
			
		||||
            return "-Wl,-R" + dir
 | 
			
		||||
 | 
			
		||||
    def library_option(self, lib):
 | 
			
		||||
        return "-l" + lib
 | 
			
		||||
 | 
			
		||||
    @staticmethod
 | 
			
		||||
    def _library_root(dir):
 | 
			
		||||
        """
 | 
			
		||||
        macOS users can specify an alternate SDK using'-isysroot'.
 | 
			
		||||
        Calculate the SDK root if it is specified.
 | 
			
		||||
 | 
			
		||||
        Note that, as of Xcode 7, Apple SDKs may contain textual stub
 | 
			
		||||
        libraries with .tbd extensions rather than the normal .dylib
 | 
			
		||||
        shared libraries installed in /.  The Apple compiler tool
 | 
			
		||||
        chain handles this transparently but it can cause problems
 | 
			
		||||
        for programs that are being built with an SDK and searching
 | 
			
		||||
        for specific libraries.  Callers of find_library_file need to
 | 
			
		||||
        keep in mind that the base filename of the returned SDK library
 | 
			
		||||
        file might have a different extension from that of the library
 | 
			
		||||
        file installed on the running system, for example:
 | 
			
		||||
          /Applications/Xcode.app/Contents/Developer/Platforms/
 | 
			
		||||
              MacOSX.platform/Developer/SDKs/MacOSX10.11.sdk/
 | 
			
		||||
              usr/lib/libedit.tbd
 | 
			
		||||
        vs
 | 
			
		||||
          /usr/lib/libedit.dylib
 | 
			
		||||
        """
 | 
			
		||||
        cflags = sysconfig.get_config_var('CFLAGS')
 | 
			
		||||
        match = re.search(r'-isysroot\s*(\S+)', cflags)
 | 
			
		||||
 | 
			
		||||
        apply_root = (
 | 
			
		||||
            sys.platform == 'darwin'
 | 
			
		||||
            and match
 | 
			
		||||
            and (
 | 
			
		||||
                dir.startswith('/System/')
 | 
			
		||||
                or (dir.startswith('/usr/') and not dir.startswith('/usr/local/'))
 | 
			
		||||
            )
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        return os.path.join(match.group(1), dir[1:]) if apply_root else dir
 | 
			
		||||
 | 
			
		||||
    def find_library_file(self, dirs, lib, debug=0):
 | 
			
		||||
        r"""
 | 
			
		||||
        Second-guess the linker with not much hard
 | 
			
		||||
        data to go on: GCC seems to prefer the shared library, so
 | 
			
		||||
        assume that *all* Unix C compilers do,
 | 
			
		||||
        ignoring even GCC's "-static" option.
 | 
			
		||||
 | 
			
		||||
        >>> compiler = UnixCCompiler()
 | 
			
		||||
        >>> compiler._library_root = lambda dir: dir
 | 
			
		||||
        >>> monkeypatch = getfixture('monkeypatch')
 | 
			
		||||
        >>> monkeypatch.setattr(os.path, 'exists', lambda d: 'existing' in d)
 | 
			
		||||
        >>> dirs = ('/foo/bar/missing', '/foo/bar/existing')
 | 
			
		||||
        >>> compiler.find_library_file(dirs, 'abc').replace('\\', '/')
 | 
			
		||||
        '/foo/bar/existing/libabc.dylib'
 | 
			
		||||
        >>> compiler.find_library_file(reversed(dirs), 'abc').replace('\\', '/')
 | 
			
		||||
        '/foo/bar/existing/libabc.dylib'
 | 
			
		||||
        >>> monkeypatch.setattr(os.path, 'exists',
 | 
			
		||||
        ...     lambda d: 'existing' in d and '.a' in d)
 | 
			
		||||
        >>> compiler.find_library_file(dirs, 'abc').replace('\\', '/')
 | 
			
		||||
        '/foo/bar/existing/libabc.a'
 | 
			
		||||
        >>> compiler.find_library_file(reversed(dirs), 'abc').replace('\\', '/')
 | 
			
		||||
        '/foo/bar/existing/libabc.a'
 | 
			
		||||
        """
 | 
			
		||||
        lib_names = (
 | 
			
		||||
            self.library_filename(lib, lib_type=type)
 | 
			
		||||
            for type in 'dylib xcode_stub shared static'.split()
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        roots = map(self._library_root, dirs)
 | 
			
		||||
 | 
			
		||||
        searched = (
 | 
			
		||||
            os.path.join(root, lib_name)
 | 
			
		||||
            for root, lib_name in itertools.product(roots, lib_names)
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        found = filter(os.path.exists, searched)
 | 
			
		||||
 | 
			
		||||
        # Return None if it could not be found in any dir.
 | 
			
		||||
        return next(found, None)
 | 
			
		||||
@@ -0,0 +1,513 @@
 | 
			
		||||
"""distutils.util
 | 
			
		||||
 | 
			
		||||
Miscellaneous utility functions -- anything that doesn't fit into
 | 
			
		||||
one of the other *util.py modules.
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
import importlib.util
 | 
			
		||||
import os
 | 
			
		||||
import re
 | 
			
		||||
import string
 | 
			
		||||
import subprocess
 | 
			
		||||
import sys
 | 
			
		||||
import sysconfig
 | 
			
		||||
import functools
 | 
			
		||||
 | 
			
		||||
from .errors import DistutilsPlatformError, DistutilsByteCompileError
 | 
			
		||||
from .dep_util import newer
 | 
			
		||||
from .spawn import spawn
 | 
			
		||||
from ._log import log
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_host_platform():
 | 
			
		||||
    """
 | 
			
		||||
    Return a string that identifies the current platform. Use this
 | 
			
		||||
    function to distinguish platform-specific build directories and
 | 
			
		||||
    platform-specific built distributions.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    # This function initially exposed platforms as defined in Python 3.9
 | 
			
		||||
    # even with older Python versions when distutils was split out.
 | 
			
		||||
    # Now it delegates to stdlib sysconfig, but maintains compatibility.
 | 
			
		||||
 | 
			
		||||
    if sys.version_info < (3, 8):
 | 
			
		||||
        if os.name == 'nt':
 | 
			
		||||
            if '(arm)' in sys.version.lower():
 | 
			
		||||
                return 'win-arm32'
 | 
			
		||||
            if '(arm64)' in sys.version.lower():
 | 
			
		||||
                return 'win-arm64'
 | 
			
		||||
 | 
			
		||||
    if sys.version_info < (3, 9):
 | 
			
		||||
        if os.name == "posix" and hasattr(os, 'uname'):
 | 
			
		||||
            osname, host, release, version, machine = os.uname()
 | 
			
		||||
            if osname[:3] == "aix":
 | 
			
		||||
                from .py38compat import aix_platform
 | 
			
		||||
 | 
			
		||||
                return aix_platform(osname, version, release)
 | 
			
		||||
 | 
			
		||||
    return sysconfig.get_platform()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_platform():
 | 
			
		||||
    if os.name == 'nt':
 | 
			
		||||
        TARGET_TO_PLAT = {
 | 
			
		||||
            'x86': 'win32',
 | 
			
		||||
            'x64': 'win-amd64',
 | 
			
		||||
            'arm': 'win-arm32',
 | 
			
		||||
            'arm64': 'win-arm64',
 | 
			
		||||
        }
 | 
			
		||||
        target = os.environ.get('VSCMD_ARG_TGT_ARCH')
 | 
			
		||||
        return TARGET_TO_PLAT.get(target) or get_host_platform()
 | 
			
		||||
    return get_host_platform()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
if sys.platform == 'darwin':
 | 
			
		||||
    _syscfg_macosx_ver = None  # cache the version pulled from sysconfig
 | 
			
		||||
MACOSX_VERSION_VAR = 'MACOSX_DEPLOYMENT_TARGET'
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _clear_cached_macosx_ver():
 | 
			
		||||
    """For testing only. Do not call."""
 | 
			
		||||
    global _syscfg_macosx_ver
 | 
			
		||||
    _syscfg_macosx_ver = None
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_macosx_target_ver_from_syscfg():
 | 
			
		||||
    """Get the version of macOS latched in the Python interpreter configuration.
 | 
			
		||||
    Returns the version as a string or None if can't obtain one. Cached."""
 | 
			
		||||
    global _syscfg_macosx_ver
 | 
			
		||||
    if _syscfg_macosx_ver is None:
 | 
			
		||||
        from distutils import sysconfig
 | 
			
		||||
 | 
			
		||||
        ver = sysconfig.get_config_var(MACOSX_VERSION_VAR) or ''
 | 
			
		||||
        if ver:
 | 
			
		||||
            _syscfg_macosx_ver = ver
 | 
			
		||||
    return _syscfg_macosx_ver
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_macosx_target_ver():
 | 
			
		||||
    """Return the version of macOS for which we are building.
 | 
			
		||||
 | 
			
		||||
    The target version defaults to the version in sysconfig latched at time
 | 
			
		||||
    the Python interpreter was built, unless overridden by an environment
 | 
			
		||||
    variable. If neither source has a value, then None is returned"""
 | 
			
		||||
 | 
			
		||||
    syscfg_ver = get_macosx_target_ver_from_syscfg()
 | 
			
		||||
    env_ver = os.environ.get(MACOSX_VERSION_VAR)
 | 
			
		||||
 | 
			
		||||
    if env_ver:
 | 
			
		||||
        # Validate overridden version against sysconfig version, if have both.
 | 
			
		||||
        # Ensure that the deployment target of the build process is not less
 | 
			
		||||
        # than 10.3 if the interpreter was built for 10.3 or later.  This
 | 
			
		||||
        # ensures extension modules are built with correct compatibility
 | 
			
		||||
        # values, specifically LDSHARED which can use
 | 
			
		||||
        # '-undefined dynamic_lookup' which only works on >= 10.3.
 | 
			
		||||
        if (
 | 
			
		||||
            syscfg_ver
 | 
			
		||||
            and split_version(syscfg_ver) >= [10, 3]
 | 
			
		||||
            and split_version(env_ver) < [10, 3]
 | 
			
		||||
        ):
 | 
			
		||||
            my_msg = (
 | 
			
		||||
                '$' + MACOSX_VERSION_VAR + ' mismatch: '
 | 
			
		||||
                'now "%s" but "%s" during configure; '
 | 
			
		||||
                'must use 10.3 or later' % (env_ver, syscfg_ver)
 | 
			
		||||
            )
 | 
			
		||||
            raise DistutilsPlatformError(my_msg)
 | 
			
		||||
        return env_ver
 | 
			
		||||
    return syscfg_ver
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def split_version(s):
 | 
			
		||||
    """Convert a dot-separated string into a list of numbers for comparisons"""
 | 
			
		||||
    return [int(n) for n in s.split('.')]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def convert_path(pathname):
 | 
			
		||||
    """Return 'pathname' as a name that will work on the native filesystem,
 | 
			
		||||
    i.e. split it on '/' and put it back together again using the current
 | 
			
		||||
    directory separator.  Needed because filenames in the setup script are
 | 
			
		||||
    always supplied in Unix style, and have to be converted to the local
 | 
			
		||||
    convention before we can actually use them in the filesystem.  Raises
 | 
			
		||||
    ValueError on non-Unix-ish systems if 'pathname' either starts or
 | 
			
		||||
    ends with a slash.
 | 
			
		||||
    """
 | 
			
		||||
    if os.sep == '/':
 | 
			
		||||
        return pathname
 | 
			
		||||
    if not pathname:
 | 
			
		||||
        return pathname
 | 
			
		||||
    if pathname[0] == '/':
 | 
			
		||||
        raise ValueError("path '%s' cannot be absolute" % pathname)
 | 
			
		||||
    if pathname[-1] == '/':
 | 
			
		||||
        raise ValueError("path '%s' cannot end with '/'" % pathname)
 | 
			
		||||
 | 
			
		||||
    paths = pathname.split('/')
 | 
			
		||||
    while '.' in paths:
 | 
			
		||||
        paths.remove('.')
 | 
			
		||||
    if not paths:
 | 
			
		||||
        return os.curdir
 | 
			
		||||
    return os.path.join(*paths)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# convert_path ()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def change_root(new_root, pathname):
 | 
			
		||||
    """Return 'pathname' with 'new_root' prepended.  If 'pathname' is
 | 
			
		||||
    relative, this is equivalent to "os.path.join(new_root,pathname)".
 | 
			
		||||
    Otherwise, it requires making 'pathname' relative and then joining the
 | 
			
		||||
    two, which is tricky on DOS/Windows and Mac OS.
 | 
			
		||||
    """
 | 
			
		||||
    if os.name == 'posix':
 | 
			
		||||
        if not os.path.isabs(pathname):
 | 
			
		||||
            return os.path.join(new_root, pathname)
 | 
			
		||||
        else:
 | 
			
		||||
            return os.path.join(new_root, pathname[1:])
 | 
			
		||||
 | 
			
		||||
    elif os.name == 'nt':
 | 
			
		||||
        (drive, path) = os.path.splitdrive(pathname)
 | 
			
		||||
        if path[0] == '\\':
 | 
			
		||||
            path = path[1:]
 | 
			
		||||
        return os.path.join(new_root, path)
 | 
			
		||||
 | 
			
		||||
    raise DistutilsPlatformError(f"nothing known about platform '{os.name}'")
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@functools.lru_cache()
 | 
			
		||||
def check_environ():
 | 
			
		||||
    """Ensure that 'os.environ' has all the environment variables we
 | 
			
		||||
    guarantee that users can use in config files, command-line options,
 | 
			
		||||
    etc.  Currently this includes:
 | 
			
		||||
      HOME - user's home directory (Unix only)
 | 
			
		||||
      PLAT - description of the current platform, including hardware
 | 
			
		||||
             and OS (see 'get_platform()')
 | 
			
		||||
    """
 | 
			
		||||
    if os.name == 'posix' and 'HOME' not in os.environ:
 | 
			
		||||
        try:
 | 
			
		||||
            import pwd
 | 
			
		||||
 | 
			
		||||
            os.environ['HOME'] = pwd.getpwuid(os.getuid())[5]
 | 
			
		||||
        except (ImportError, KeyError):
 | 
			
		||||
            # bpo-10496: if the current user identifier doesn't exist in the
 | 
			
		||||
            # password database, do nothing
 | 
			
		||||
            pass
 | 
			
		||||
 | 
			
		||||
    if 'PLAT' not in os.environ:
 | 
			
		||||
        os.environ['PLAT'] = get_platform()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def subst_vars(s, local_vars):
 | 
			
		||||
    """
 | 
			
		||||
    Perform variable substitution on 'string'.
 | 
			
		||||
    Variables are indicated by format-style braces ("{var}").
 | 
			
		||||
    Variable is substituted by the value found in the 'local_vars'
 | 
			
		||||
    dictionary or in 'os.environ' if it's not in 'local_vars'.
 | 
			
		||||
    'os.environ' is first checked/augmented to guarantee that it contains
 | 
			
		||||
    certain values: see 'check_environ()'.  Raise ValueError for any
 | 
			
		||||
    variables not found in either 'local_vars' or 'os.environ'.
 | 
			
		||||
    """
 | 
			
		||||
    check_environ()
 | 
			
		||||
    lookup = dict(os.environ)
 | 
			
		||||
    lookup.update((name, str(value)) for name, value in local_vars.items())
 | 
			
		||||
    try:
 | 
			
		||||
        return _subst_compat(s).format_map(lookup)
 | 
			
		||||
    except KeyError as var:
 | 
			
		||||
        raise ValueError(f"invalid variable {var}")
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _subst_compat(s):
 | 
			
		||||
    """
 | 
			
		||||
    Replace shell/Perl-style variable substitution with
 | 
			
		||||
    format-style. For compatibility.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    def _subst(match):
 | 
			
		||||
        return f'{{{match.group(1)}}}'
 | 
			
		||||
 | 
			
		||||
    repl = re.sub(r'\$([a-zA-Z_][a-zA-Z_0-9]*)', _subst, s)
 | 
			
		||||
    if repl != s:
 | 
			
		||||
        import warnings
 | 
			
		||||
 | 
			
		||||
        warnings.warn(
 | 
			
		||||
            "shell/Perl-style substitions are deprecated",
 | 
			
		||||
            DeprecationWarning,
 | 
			
		||||
        )
 | 
			
		||||
    return repl
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def grok_environment_error(exc, prefix="error: "):
 | 
			
		||||
    # Function kept for backward compatibility.
 | 
			
		||||
    # Used to try clever things with EnvironmentErrors,
 | 
			
		||||
    # but nowadays str(exception) produces good messages.
 | 
			
		||||
    return prefix + str(exc)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Needed by 'split_quoted()'
 | 
			
		||||
_wordchars_re = _squote_re = _dquote_re = None
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _init_regex():
 | 
			
		||||
    global _wordchars_re, _squote_re, _dquote_re
 | 
			
		||||
    _wordchars_re = re.compile(r'[^\\\'\"%s ]*' % string.whitespace)
 | 
			
		||||
    _squote_re = re.compile(r"'(?:[^'\\]|\\.)*'")
 | 
			
		||||
    _dquote_re = re.compile(r'"(?:[^"\\]|\\.)*"')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def split_quoted(s):
 | 
			
		||||
    """Split a string up according to Unix shell-like rules for quotes and
 | 
			
		||||
    backslashes.  In short: words are delimited by spaces, as long as those
 | 
			
		||||
    spaces are not escaped by a backslash, or inside a quoted string.
 | 
			
		||||
    Single and double quotes are equivalent, and the quote characters can
 | 
			
		||||
    be backslash-escaped.  The backslash is stripped from any two-character
 | 
			
		||||
    escape sequence, leaving only the escaped character.  The quote
 | 
			
		||||
    characters are stripped from any quoted string.  Returns a list of
 | 
			
		||||
    words.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    # This is a nice algorithm for splitting up a single string, since it
 | 
			
		||||
    # doesn't require character-by-character examination.  It was a little
 | 
			
		||||
    # bit of a brain-bender to get it working right, though...
 | 
			
		||||
    if _wordchars_re is None:
 | 
			
		||||
        _init_regex()
 | 
			
		||||
 | 
			
		||||
    s = s.strip()
 | 
			
		||||
    words = []
 | 
			
		||||
    pos = 0
 | 
			
		||||
 | 
			
		||||
    while s:
 | 
			
		||||
        m = _wordchars_re.match(s, pos)
 | 
			
		||||
        end = m.end()
 | 
			
		||||
        if end == len(s):
 | 
			
		||||
            words.append(s[:end])
 | 
			
		||||
            break
 | 
			
		||||
 | 
			
		||||
        if s[end] in string.whitespace:
 | 
			
		||||
            # unescaped, unquoted whitespace: now
 | 
			
		||||
            # we definitely have a word delimiter
 | 
			
		||||
            words.append(s[:end])
 | 
			
		||||
            s = s[end:].lstrip()
 | 
			
		||||
            pos = 0
 | 
			
		||||
 | 
			
		||||
        elif s[end] == '\\':
 | 
			
		||||
            # preserve whatever is being escaped;
 | 
			
		||||
            # will become part of the current word
 | 
			
		||||
            s = s[:end] + s[end + 1 :]
 | 
			
		||||
            pos = end + 1
 | 
			
		||||
 | 
			
		||||
        else:
 | 
			
		||||
            if s[end] == "'":  # slurp singly-quoted string
 | 
			
		||||
                m = _squote_re.match(s, end)
 | 
			
		||||
            elif s[end] == '"':  # slurp doubly-quoted string
 | 
			
		||||
                m = _dquote_re.match(s, end)
 | 
			
		||||
            else:
 | 
			
		||||
                raise RuntimeError("this can't happen (bad char '%c')" % s[end])
 | 
			
		||||
 | 
			
		||||
            if m is None:
 | 
			
		||||
                raise ValueError("bad string (mismatched %s quotes?)" % s[end])
 | 
			
		||||
 | 
			
		||||
            (beg, end) = m.span()
 | 
			
		||||
            s = s[:beg] + s[beg + 1 : end - 1] + s[end:]
 | 
			
		||||
            pos = m.end() - 2
 | 
			
		||||
 | 
			
		||||
        if pos >= len(s):
 | 
			
		||||
            words.append(s)
 | 
			
		||||
            break
 | 
			
		||||
 | 
			
		||||
    return words
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# split_quoted ()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def execute(func, args, msg=None, verbose=0, dry_run=0):
 | 
			
		||||
    """Perform some action that affects the outside world (eg.  by
 | 
			
		||||
    writing to the filesystem).  Such actions are special because they
 | 
			
		||||
    are disabled by the 'dry_run' flag.  This method takes care of all
 | 
			
		||||
    that bureaucracy for you; all you have to do is supply the
 | 
			
		||||
    function to call and an argument tuple for it (to embody the
 | 
			
		||||
    "external action" being performed), and an optional message to
 | 
			
		||||
    print.
 | 
			
		||||
    """
 | 
			
		||||
    if msg is None:
 | 
			
		||||
        msg = "{}{!r}".format(func.__name__, args)
 | 
			
		||||
        if msg[-2:] == ',)':  # correct for singleton tuple
 | 
			
		||||
            msg = msg[0:-2] + ')'
 | 
			
		||||
 | 
			
		||||
    log.info(msg)
 | 
			
		||||
    if not dry_run:
 | 
			
		||||
        func(*args)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def strtobool(val):
 | 
			
		||||
    """Convert a string representation of truth to true (1) or false (0).
 | 
			
		||||
 | 
			
		||||
    True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values
 | 
			
		||||
    are 'n', 'no', 'f', 'false', 'off', and '0'.  Raises ValueError if
 | 
			
		||||
    'val' is anything else.
 | 
			
		||||
    """
 | 
			
		||||
    val = val.lower()
 | 
			
		||||
    if val in ('y', 'yes', 't', 'true', 'on', '1'):
 | 
			
		||||
        return 1
 | 
			
		||||
    elif val in ('n', 'no', 'f', 'false', 'off', '0'):
 | 
			
		||||
        return 0
 | 
			
		||||
    else:
 | 
			
		||||
        raise ValueError("invalid truth value {!r}".format(val))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def byte_compile(  # noqa: C901
 | 
			
		||||
    py_files,
 | 
			
		||||
    optimize=0,
 | 
			
		||||
    force=0,
 | 
			
		||||
    prefix=None,
 | 
			
		||||
    base_dir=None,
 | 
			
		||||
    verbose=1,
 | 
			
		||||
    dry_run=0,
 | 
			
		||||
    direct=None,
 | 
			
		||||
):
 | 
			
		||||
    """Byte-compile a collection of Python source files to .pyc
 | 
			
		||||
    files in a __pycache__ subdirectory.  'py_files' is a list
 | 
			
		||||
    of files to compile; any files that don't end in ".py" are silently
 | 
			
		||||
    skipped.  'optimize' must be one of the following:
 | 
			
		||||
      0 - don't optimize
 | 
			
		||||
      1 - normal optimization (like "python -O")
 | 
			
		||||
      2 - extra optimization (like "python -OO")
 | 
			
		||||
    If 'force' is true, all files are recompiled regardless of
 | 
			
		||||
    timestamps.
 | 
			
		||||
 | 
			
		||||
    The source filename encoded in each bytecode file defaults to the
 | 
			
		||||
    filenames listed in 'py_files'; you can modify these with 'prefix' and
 | 
			
		||||
    'basedir'.  'prefix' is a string that will be stripped off of each
 | 
			
		||||
    source filename, and 'base_dir' is a directory name that will be
 | 
			
		||||
    prepended (after 'prefix' is stripped).  You can supply either or both
 | 
			
		||||
    (or neither) of 'prefix' and 'base_dir', as you wish.
 | 
			
		||||
 | 
			
		||||
    If 'dry_run' is true, doesn't actually do anything that would
 | 
			
		||||
    affect the filesystem.
 | 
			
		||||
 | 
			
		||||
    Byte-compilation is either done directly in this interpreter process
 | 
			
		||||
    with the standard py_compile module, or indirectly by writing a
 | 
			
		||||
    temporary script and executing it.  Normally, you should let
 | 
			
		||||
    'byte_compile()' figure out to use direct compilation or not (see
 | 
			
		||||
    the source for details).  The 'direct' flag is used by the script
 | 
			
		||||
    generated in indirect mode; unless you know what you're doing, leave
 | 
			
		||||
    it set to None.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    # nothing is done if sys.dont_write_bytecode is True
 | 
			
		||||
    if sys.dont_write_bytecode:
 | 
			
		||||
        raise DistutilsByteCompileError('byte-compiling is disabled.')
 | 
			
		||||
 | 
			
		||||
    # First, if the caller didn't force us into direct or indirect mode,
 | 
			
		||||
    # figure out which mode we should be in.  We take a conservative
 | 
			
		||||
    # approach: choose direct mode *only* if the current interpreter is
 | 
			
		||||
    # in debug mode and optimize is 0.  If we're not in debug mode (-O
 | 
			
		||||
    # or -OO), we don't know which level of optimization this
 | 
			
		||||
    # interpreter is running with, so we can't do direct
 | 
			
		||||
    # byte-compilation and be certain that it's the right thing.  Thus,
 | 
			
		||||
    # always compile indirectly if the current interpreter is in either
 | 
			
		||||
    # optimize mode, or if either optimization level was requested by
 | 
			
		||||
    # the caller.
 | 
			
		||||
    if direct is None:
 | 
			
		||||
        direct = __debug__ and optimize == 0
 | 
			
		||||
 | 
			
		||||
    # "Indirect" byte-compilation: write a temporary script and then
 | 
			
		||||
    # run it with the appropriate flags.
 | 
			
		||||
    if not direct:
 | 
			
		||||
        try:
 | 
			
		||||
            from tempfile import mkstemp
 | 
			
		||||
 | 
			
		||||
            (script_fd, script_name) = mkstemp(".py")
 | 
			
		||||
        except ImportError:
 | 
			
		||||
            from tempfile import mktemp
 | 
			
		||||
 | 
			
		||||
            (script_fd, script_name) = None, mktemp(".py")
 | 
			
		||||
        log.info("writing byte-compilation script '%s'", script_name)
 | 
			
		||||
        if not dry_run:
 | 
			
		||||
            if script_fd is not None:
 | 
			
		||||
                script = os.fdopen(script_fd, "w")
 | 
			
		||||
            else:
 | 
			
		||||
                script = open(script_name, "w")
 | 
			
		||||
 | 
			
		||||
            with script:
 | 
			
		||||
                script.write(
 | 
			
		||||
                    """\
 | 
			
		||||
from distutils.util import byte_compile
 | 
			
		||||
files = [
 | 
			
		||||
"""
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
                # XXX would be nice to write absolute filenames, just for
 | 
			
		||||
                # safety's sake (script should be more robust in the face of
 | 
			
		||||
                # chdir'ing before running it).  But this requires abspath'ing
 | 
			
		||||
                # 'prefix' as well, and that breaks the hack in build_lib's
 | 
			
		||||
                # 'byte_compile()' method that carefully tacks on a trailing
 | 
			
		||||
                # slash (os.sep really) to make sure the prefix here is "just
 | 
			
		||||
                # right".  This whole prefix business is rather delicate -- the
 | 
			
		||||
                # problem is that it's really a directory, but I'm treating it
 | 
			
		||||
                # as a dumb string, so trailing slashes and so forth matter.
 | 
			
		||||
 | 
			
		||||
                script.write(",\n".join(map(repr, py_files)) + "]\n")
 | 
			
		||||
                script.write(
 | 
			
		||||
                    """
 | 
			
		||||
byte_compile(files, optimize=%r, force=%r,
 | 
			
		||||
             prefix=%r, base_dir=%r,
 | 
			
		||||
             verbose=%r, dry_run=0,
 | 
			
		||||
             direct=1)
 | 
			
		||||
"""
 | 
			
		||||
                    % (optimize, force, prefix, base_dir, verbose)
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
        cmd = [sys.executable]
 | 
			
		||||
        cmd.extend(subprocess._optim_args_from_interpreter_flags())
 | 
			
		||||
        cmd.append(script_name)
 | 
			
		||||
        spawn(cmd, dry_run=dry_run)
 | 
			
		||||
        execute(os.remove, (script_name,), "removing %s" % script_name, dry_run=dry_run)
 | 
			
		||||
 | 
			
		||||
    # "Direct" byte-compilation: use the py_compile module to compile
 | 
			
		||||
    # right here, right now.  Note that the script generated in indirect
 | 
			
		||||
    # mode simply calls 'byte_compile()' in direct mode, a weird sort of
 | 
			
		||||
    # cross-process recursion.  Hey, it works!
 | 
			
		||||
    else:
 | 
			
		||||
        from py_compile import compile
 | 
			
		||||
 | 
			
		||||
        for file in py_files:
 | 
			
		||||
            if file[-3:] != ".py":
 | 
			
		||||
                # This lets us be lazy and not filter filenames in
 | 
			
		||||
                # the "install_lib" command.
 | 
			
		||||
                continue
 | 
			
		||||
 | 
			
		||||
            # Terminology from the py_compile module:
 | 
			
		||||
            #   cfile - byte-compiled file
 | 
			
		||||
            #   dfile - purported source filename (same as 'file' by default)
 | 
			
		||||
            if optimize >= 0:
 | 
			
		||||
                opt = '' if optimize == 0 else optimize
 | 
			
		||||
                cfile = importlib.util.cache_from_source(file, optimization=opt)
 | 
			
		||||
            else:
 | 
			
		||||
                cfile = importlib.util.cache_from_source(file)
 | 
			
		||||
            dfile = file
 | 
			
		||||
            if prefix:
 | 
			
		||||
                if file[: len(prefix)] != prefix:
 | 
			
		||||
                    raise ValueError(
 | 
			
		||||
                        "invalid prefix: filename %r doesn't start with %r"
 | 
			
		||||
                        % (file, prefix)
 | 
			
		||||
                    )
 | 
			
		||||
                dfile = dfile[len(prefix) :]
 | 
			
		||||
            if base_dir:
 | 
			
		||||
                dfile = os.path.join(base_dir, dfile)
 | 
			
		||||
 | 
			
		||||
            cfile_base = os.path.basename(cfile)
 | 
			
		||||
            if direct:
 | 
			
		||||
                if force or newer(file, cfile):
 | 
			
		||||
                    log.info("byte-compiling %s to %s", file, cfile_base)
 | 
			
		||||
                    if not dry_run:
 | 
			
		||||
                        compile(file, cfile, dfile)
 | 
			
		||||
                else:
 | 
			
		||||
                    log.debug("skipping byte-compilation of %s to %s", file, cfile_base)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def rfc822_escape(header):
 | 
			
		||||
    """Return a version of the string escaped for inclusion in an
 | 
			
		||||
    RFC-822 header, by ensuring there are 8 spaces space after each newline.
 | 
			
		||||
    """
 | 
			
		||||
    lines = header.split('\n')
 | 
			
		||||
    sep = '\n' + 8 * ' '
 | 
			
		||||
    return sep.join(lines)
 | 
			
		||||
@@ -0,0 +1,358 @@
 | 
			
		||||
#
 | 
			
		||||
# distutils/version.py
 | 
			
		||||
#
 | 
			
		||||
# Implements multiple version numbering conventions for the
 | 
			
		||||
# Python Module Distribution Utilities.
 | 
			
		||||
#
 | 
			
		||||
# $Id$
 | 
			
		||||
#
 | 
			
		||||
 | 
			
		||||
"""Provides classes to represent module version numbers (one class for
 | 
			
		||||
each style of version numbering).  There are currently two such classes
 | 
			
		||||
implemented: StrictVersion and LooseVersion.
 | 
			
		||||
 | 
			
		||||
Every version number class implements the following interface:
 | 
			
		||||
  * the 'parse' method takes a string and parses it to some internal
 | 
			
		||||
    representation; if the string is an invalid version number,
 | 
			
		||||
    'parse' raises a ValueError exception
 | 
			
		||||
  * the class constructor takes an optional string argument which,
 | 
			
		||||
    if supplied, is passed to 'parse'
 | 
			
		||||
  * __str__ reconstructs the string that was passed to 'parse' (or
 | 
			
		||||
    an equivalent string -- ie. one that will generate an equivalent
 | 
			
		||||
    version number instance)
 | 
			
		||||
  * __repr__ generates Python code to recreate the version number instance
 | 
			
		||||
  * _cmp compares the current instance with either another instance
 | 
			
		||||
    of the same class or a string (which will be parsed to an instance
 | 
			
		||||
    of the same class, thus must follow the same rules)
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
import re
 | 
			
		||||
import warnings
 | 
			
		||||
import contextlib
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@contextlib.contextmanager
 | 
			
		||||
def suppress_known_deprecation():
 | 
			
		||||
    with warnings.catch_warnings(record=True) as ctx:
 | 
			
		||||
        warnings.filterwarnings(
 | 
			
		||||
            action='default',
 | 
			
		||||
            category=DeprecationWarning,
 | 
			
		||||
            message="distutils Version classes are deprecated.",
 | 
			
		||||
        )
 | 
			
		||||
        yield ctx
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Version:
 | 
			
		||||
    """Abstract base class for version numbering classes.  Just provides
 | 
			
		||||
    constructor (__init__) and reproducer (__repr__), because those
 | 
			
		||||
    seem to be the same for all version numbering classes; and route
 | 
			
		||||
    rich comparisons to _cmp.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    def __init__(self, vstring=None):
 | 
			
		||||
        if vstring:
 | 
			
		||||
            self.parse(vstring)
 | 
			
		||||
        warnings.warn(
 | 
			
		||||
            "distutils Version classes are deprecated. "
 | 
			
		||||
            "Use packaging.version instead.",
 | 
			
		||||
            DeprecationWarning,
 | 
			
		||||
            stacklevel=2,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def __repr__(self):
 | 
			
		||||
        return "{} ('{}')".format(self.__class__.__name__, str(self))
 | 
			
		||||
 | 
			
		||||
    def __eq__(self, other):
 | 
			
		||||
        c = self._cmp(other)
 | 
			
		||||
        if c is NotImplemented:
 | 
			
		||||
            return c
 | 
			
		||||
        return c == 0
 | 
			
		||||
 | 
			
		||||
    def __lt__(self, other):
 | 
			
		||||
        c = self._cmp(other)
 | 
			
		||||
        if c is NotImplemented:
 | 
			
		||||
            return c
 | 
			
		||||
        return c < 0
 | 
			
		||||
 | 
			
		||||
    def __le__(self, other):
 | 
			
		||||
        c = self._cmp(other)
 | 
			
		||||
        if c is NotImplemented:
 | 
			
		||||
            return c
 | 
			
		||||
        return c <= 0
 | 
			
		||||
 | 
			
		||||
    def __gt__(self, other):
 | 
			
		||||
        c = self._cmp(other)
 | 
			
		||||
        if c is NotImplemented:
 | 
			
		||||
            return c
 | 
			
		||||
        return c > 0
 | 
			
		||||
 | 
			
		||||
    def __ge__(self, other):
 | 
			
		||||
        c = self._cmp(other)
 | 
			
		||||
        if c is NotImplemented:
 | 
			
		||||
            return c
 | 
			
		||||
        return c >= 0
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Interface for version-number classes -- must be implemented
 | 
			
		||||
# by the following classes (the concrete ones -- Version should
 | 
			
		||||
# be treated as an abstract class).
 | 
			
		||||
#    __init__ (string) - create and take same action as 'parse'
 | 
			
		||||
#                        (string parameter is optional)
 | 
			
		||||
#    parse (string)    - convert a string representation to whatever
 | 
			
		||||
#                        internal representation is appropriate for
 | 
			
		||||
#                        this style of version numbering
 | 
			
		||||
#    __str__ (self)    - convert back to a string; should be very similar
 | 
			
		||||
#                        (if not identical to) the string supplied to parse
 | 
			
		||||
#    __repr__ (self)   - generate Python code to recreate
 | 
			
		||||
#                        the instance
 | 
			
		||||
#    _cmp (self, other) - compare two version numbers ('other' may
 | 
			
		||||
#                        be an unparsed version string, or another
 | 
			
		||||
#                        instance of your version class)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class StrictVersion(Version):
 | 
			
		||||
 | 
			
		||||
    """Version numbering for anal retentives and software idealists.
 | 
			
		||||
    Implements the standard interface for version number classes as
 | 
			
		||||
    described above.  A version number consists of two or three
 | 
			
		||||
    dot-separated numeric components, with an optional "pre-release" tag
 | 
			
		||||
    on the end.  The pre-release tag consists of the letter 'a' or 'b'
 | 
			
		||||
    followed by a number.  If the numeric components of two version
 | 
			
		||||
    numbers are equal, then one with a pre-release tag will always
 | 
			
		||||
    be deemed earlier (lesser) than one without.
 | 
			
		||||
 | 
			
		||||
    The following are valid version numbers (shown in the order that
 | 
			
		||||
    would be obtained by sorting according to the supplied cmp function):
 | 
			
		||||
 | 
			
		||||
        0.4       0.4.0  (these two are equivalent)
 | 
			
		||||
        0.4.1
 | 
			
		||||
        0.5a1
 | 
			
		||||
        0.5b3
 | 
			
		||||
        0.5
 | 
			
		||||
        0.9.6
 | 
			
		||||
        1.0
 | 
			
		||||
        1.0.4a3
 | 
			
		||||
        1.0.4b1
 | 
			
		||||
        1.0.4
 | 
			
		||||
 | 
			
		||||
    The following are examples of invalid version numbers:
 | 
			
		||||
 | 
			
		||||
        1
 | 
			
		||||
        2.7.2.2
 | 
			
		||||
        1.3.a4
 | 
			
		||||
        1.3pl1
 | 
			
		||||
        1.3c4
 | 
			
		||||
 | 
			
		||||
    The rationale for this version numbering system will be explained
 | 
			
		||||
    in the distutils documentation.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    version_re = re.compile(
 | 
			
		||||
        r'^(\d+) \. (\d+) (\. (\d+))? ([ab](\d+))?$', re.VERBOSE | re.ASCII
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    def parse(self, vstring):
 | 
			
		||||
        match = self.version_re.match(vstring)
 | 
			
		||||
        if not match:
 | 
			
		||||
            raise ValueError("invalid version number '%s'" % vstring)
 | 
			
		||||
 | 
			
		||||
        (major, minor, patch, prerelease, prerelease_num) = match.group(1, 2, 4, 5, 6)
 | 
			
		||||
 | 
			
		||||
        if patch:
 | 
			
		||||
            self.version = tuple(map(int, [major, minor, patch]))
 | 
			
		||||
        else:
 | 
			
		||||
            self.version = tuple(map(int, [major, minor])) + (0,)
 | 
			
		||||
 | 
			
		||||
        if prerelease:
 | 
			
		||||
            self.prerelease = (prerelease[0], int(prerelease_num))
 | 
			
		||||
        else:
 | 
			
		||||
            self.prerelease = None
 | 
			
		||||
 | 
			
		||||
    def __str__(self):
 | 
			
		||||
 | 
			
		||||
        if self.version[2] == 0:
 | 
			
		||||
            vstring = '.'.join(map(str, self.version[0:2]))
 | 
			
		||||
        else:
 | 
			
		||||
            vstring = '.'.join(map(str, self.version))
 | 
			
		||||
 | 
			
		||||
        if self.prerelease:
 | 
			
		||||
            vstring = vstring + self.prerelease[0] + str(self.prerelease[1])
 | 
			
		||||
 | 
			
		||||
        return vstring
 | 
			
		||||
 | 
			
		||||
    def _cmp(self, other):  # noqa: C901
 | 
			
		||||
        if isinstance(other, str):
 | 
			
		||||
            with suppress_known_deprecation():
 | 
			
		||||
                other = StrictVersion(other)
 | 
			
		||||
        elif not isinstance(other, StrictVersion):
 | 
			
		||||
            return NotImplemented
 | 
			
		||||
 | 
			
		||||
        if self.version != other.version:
 | 
			
		||||
            # numeric versions don't match
 | 
			
		||||
            # prerelease stuff doesn't matter
 | 
			
		||||
            if self.version < other.version:
 | 
			
		||||
                return -1
 | 
			
		||||
            else:
 | 
			
		||||
                return 1
 | 
			
		||||
 | 
			
		||||
        # have to compare prerelease
 | 
			
		||||
        # case 1: neither has prerelease; they're equal
 | 
			
		||||
        # case 2: self has prerelease, other doesn't; other is greater
 | 
			
		||||
        # case 3: self doesn't have prerelease, other does: self is greater
 | 
			
		||||
        # case 4: both have prerelease: must compare them!
 | 
			
		||||
 | 
			
		||||
        if not self.prerelease and not other.prerelease:
 | 
			
		||||
            return 0
 | 
			
		||||
        elif self.prerelease and not other.prerelease:
 | 
			
		||||
            return -1
 | 
			
		||||
        elif not self.prerelease and other.prerelease:
 | 
			
		||||
            return 1
 | 
			
		||||
        elif self.prerelease and other.prerelease:
 | 
			
		||||
            if self.prerelease == other.prerelease:
 | 
			
		||||
                return 0
 | 
			
		||||
            elif self.prerelease < other.prerelease:
 | 
			
		||||
                return -1
 | 
			
		||||
            else:
 | 
			
		||||
                return 1
 | 
			
		||||
        else:
 | 
			
		||||
            assert False, "never get here"
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# end class StrictVersion
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# The rules according to Greg Stein:
 | 
			
		||||
# 1) a version number has 1 or more numbers separated by a period or by
 | 
			
		||||
#    sequences of letters. If only periods, then these are compared
 | 
			
		||||
#    left-to-right to determine an ordering.
 | 
			
		||||
# 2) sequences of letters are part of the tuple for comparison and are
 | 
			
		||||
#    compared lexicographically
 | 
			
		||||
# 3) recognize the numeric components may have leading zeroes
 | 
			
		||||
#
 | 
			
		||||
# The LooseVersion class below implements these rules: a version number
 | 
			
		||||
# string is split up into a tuple of integer and string components, and
 | 
			
		||||
# comparison is a simple tuple comparison.  This means that version
 | 
			
		||||
# numbers behave in a predictable and obvious way, but a way that might
 | 
			
		||||
# not necessarily be how people *want* version numbers to behave.  There
 | 
			
		||||
# wouldn't be a problem if people could stick to purely numeric version
 | 
			
		||||
# numbers: just split on period and compare the numbers as tuples.
 | 
			
		||||
# However, people insist on putting letters into their version numbers;
 | 
			
		||||
# the most common purpose seems to be:
 | 
			
		||||
#   - indicating a "pre-release" version
 | 
			
		||||
#     ('alpha', 'beta', 'a', 'b', 'pre', 'p')
 | 
			
		||||
#   - indicating a post-release patch ('p', 'pl', 'patch')
 | 
			
		||||
# but of course this can't cover all version number schemes, and there's
 | 
			
		||||
# no way to know what a programmer means without asking him.
 | 
			
		||||
#
 | 
			
		||||
# The problem is what to do with letters (and other non-numeric
 | 
			
		||||
# characters) in a version number.  The current implementation does the
 | 
			
		||||
# obvious and predictable thing: keep them as strings and compare
 | 
			
		||||
# lexically within a tuple comparison.  This has the desired effect if
 | 
			
		||||
# an appended letter sequence implies something "post-release":
 | 
			
		||||
# eg. "0.99" < "0.99pl14" < "1.0", and "5.001" < "5.001m" < "5.002".
 | 
			
		||||
#
 | 
			
		||||
# However, if letters in a version number imply a pre-release version,
 | 
			
		||||
# the "obvious" thing isn't correct.  Eg. you would expect that
 | 
			
		||||
# "1.5.1" < "1.5.2a2" < "1.5.2", but under the tuple/lexical comparison
 | 
			
		||||
# implemented here, this just isn't so.
 | 
			
		||||
#
 | 
			
		||||
# Two possible solutions come to mind.  The first is to tie the
 | 
			
		||||
# comparison algorithm to a particular set of semantic rules, as has
 | 
			
		||||
# been done in the StrictVersion class above.  This works great as long
 | 
			
		||||
# as everyone can go along with bondage and discipline.  Hopefully a
 | 
			
		||||
# (large) subset of Python module programmers will agree that the
 | 
			
		||||
# particular flavour of bondage and discipline provided by StrictVersion
 | 
			
		||||
# provides enough benefit to be worth using, and will submit their
 | 
			
		||||
# version numbering scheme to its domination.  The free-thinking
 | 
			
		||||
# anarchists in the lot will never give in, though, and something needs
 | 
			
		||||
# to be done to accommodate them.
 | 
			
		||||
#
 | 
			
		||||
# Perhaps a "moderately strict" version class could be implemented that
 | 
			
		||||
# lets almost anything slide (syntactically), and makes some heuristic
 | 
			
		||||
# assumptions about non-digits in version number strings.  This could
 | 
			
		||||
# sink into special-case-hell, though; if I was as talented and
 | 
			
		||||
# idiosyncratic as Larry Wall, I'd go ahead and implement a class that
 | 
			
		||||
# somehow knows that "1.2.1" < "1.2.2a2" < "1.2.2" < "1.2.2pl3", and is
 | 
			
		||||
# just as happy dealing with things like "2g6" and "1.13++".  I don't
 | 
			
		||||
# think I'm smart enough to do it right though.
 | 
			
		||||
#
 | 
			
		||||
# In any case, I've coded the test suite for this module (see
 | 
			
		||||
# ../test/test_version.py) specifically to fail on things like comparing
 | 
			
		||||
# "1.2a2" and "1.2".  That's not because the *code* is doing anything
 | 
			
		||||
# wrong, it's because the simple, obvious design doesn't match my
 | 
			
		||||
# complicated, hairy expectations for real-world version numbers.  It
 | 
			
		||||
# would be a snap to fix the test suite to say, "Yep, LooseVersion does
 | 
			
		||||
# the Right Thing" (ie. the code matches the conception).  But I'd rather
 | 
			
		||||
# have a conception that matches common notions about version numbers.
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class LooseVersion(Version):
 | 
			
		||||
 | 
			
		||||
    """Version numbering for anarchists and software realists.
 | 
			
		||||
    Implements the standard interface for version number classes as
 | 
			
		||||
    described above.  A version number consists of a series of numbers,
 | 
			
		||||
    separated by either periods or strings of letters.  When comparing
 | 
			
		||||
    version numbers, the numeric components will be compared
 | 
			
		||||
    numerically, and the alphabetic components lexically.  The following
 | 
			
		||||
    are all valid version numbers, in no particular order:
 | 
			
		||||
 | 
			
		||||
        1.5.1
 | 
			
		||||
        1.5.2b2
 | 
			
		||||
        161
 | 
			
		||||
        3.10a
 | 
			
		||||
        8.02
 | 
			
		||||
        3.4j
 | 
			
		||||
        1996.07.12
 | 
			
		||||
        3.2.pl0
 | 
			
		||||
        3.1.1.6
 | 
			
		||||
        2g6
 | 
			
		||||
        11g
 | 
			
		||||
        0.960923
 | 
			
		||||
        2.2beta29
 | 
			
		||||
        1.13++
 | 
			
		||||
        5.5.kw
 | 
			
		||||
        2.0b1pl0
 | 
			
		||||
 | 
			
		||||
    In fact, there is no such thing as an invalid version number under
 | 
			
		||||
    this scheme; the rules for comparison are simple and predictable,
 | 
			
		||||
    but may not always give the results you want (for some definition
 | 
			
		||||
    of "want").
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    component_re = re.compile(r'(\d+ | [a-z]+ | \.)', re.VERBOSE)
 | 
			
		||||
 | 
			
		||||
    def parse(self, vstring):
 | 
			
		||||
        # I've given up on thinking I can reconstruct the version string
 | 
			
		||||
        # from the parsed tuple -- so I just store the string here for
 | 
			
		||||
        # use by __str__
 | 
			
		||||
        self.vstring = vstring
 | 
			
		||||
        components = [x for x in self.component_re.split(vstring) if x and x != '.']
 | 
			
		||||
        for i, obj in enumerate(components):
 | 
			
		||||
            try:
 | 
			
		||||
                components[i] = int(obj)
 | 
			
		||||
            except ValueError:
 | 
			
		||||
                pass
 | 
			
		||||
 | 
			
		||||
        self.version = components
 | 
			
		||||
 | 
			
		||||
    def __str__(self):
 | 
			
		||||
        return self.vstring
 | 
			
		||||
 | 
			
		||||
    def __repr__(self):
 | 
			
		||||
        return "LooseVersion ('%s')" % str(self)
 | 
			
		||||
 | 
			
		||||
    def _cmp(self, other):
 | 
			
		||||
        if isinstance(other, str):
 | 
			
		||||
            other = LooseVersion(other)
 | 
			
		||||
        elif not isinstance(other, LooseVersion):
 | 
			
		||||
            return NotImplemented
 | 
			
		||||
 | 
			
		||||
        if self.version == other.version:
 | 
			
		||||
            return 0
 | 
			
		||||
        if self.version < other.version:
 | 
			
		||||
            return -1
 | 
			
		||||
        if self.version > other.version:
 | 
			
		||||
            return 1
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# end class LooseVersion
 | 
			
		||||
@@ -0,0 +1,175 @@
 | 
			
		||||
"""Module for parsing and testing package version predicate strings.
 | 
			
		||||
"""
 | 
			
		||||
import re
 | 
			
		||||
from . import version
 | 
			
		||||
import operator
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
re_validPackage = re.compile(r"(?i)^\s*([a-z_]\w*(?:\.[a-z_]\w*)*)(.*)", re.ASCII)
 | 
			
		||||
# (package) (rest)
 | 
			
		||||
 | 
			
		||||
re_paren = re.compile(r"^\s*\((.*)\)\s*$")  # (list) inside of parentheses
 | 
			
		||||
re_splitComparison = re.compile(r"^\s*(<=|>=|<|>|!=|==)\s*([^\s,]+)\s*$")
 | 
			
		||||
# (comp) (version)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def splitUp(pred):
 | 
			
		||||
    """Parse a single version comparison.
 | 
			
		||||
 | 
			
		||||
    Return (comparison string, StrictVersion)
 | 
			
		||||
    """
 | 
			
		||||
    res = re_splitComparison.match(pred)
 | 
			
		||||
    if not res:
 | 
			
		||||
        raise ValueError("bad package restriction syntax: %r" % pred)
 | 
			
		||||
    comp, verStr = res.groups()
 | 
			
		||||
    with version.suppress_known_deprecation():
 | 
			
		||||
        other = version.StrictVersion(verStr)
 | 
			
		||||
    return (comp, other)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
compmap = {
 | 
			
		||||
    "<": operator.lt,
 | 
			
		||||
    "<=": operator.le,
 | 
			
		||||
    "==": operator.eq,
 | 
			
		||||
    ">": operator.gt,
 | 
			
		||||
    ">=": operator.ge,
 | 
			
		||||
    "!=": operator.ne,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class VersionPredicate:
 | 
			
		||||
    """Parse and test package version predicates.
 | 
			
		||||
 | 
			
		||||
    >>> v = VersionPredicate('pyepat.abc (>1.0, <3333.3a1, !=1555.1b3)')
 | 
			
		||||
 | 
			
		||||
    The `name` attribute provides the full dotted name that is given::
 | 
			
		||||
 | 
			
		||||
    >>> v.name
 | 
			
		||||
    'pyepat.abc'
 | 
			
		||||
 | 
			
		||||
    The str() of a `VersionPredicate` provides a normalized
 | 
			
		||||
    human-readable version of the expression::
 | 
			
		||||
 | 
			
		||||
    >>> print(v)
 | 
			
		||||
    pyepat.abc (> 1.0, < 3333.3a1, != 1555.1b3)
 | 
			
		||||
 | 
			
		||||
    The `satisfied_by()` method can be used to determine with a given
 | 
			
		||||
    version number is included in the set described by the version
 | 
			
		||||
    restrictions::
 | 
			
		||||
 | 
			
		||||
    >>> v.satisfied_by('1.1')
 | 
			
		||||
    True
 | 
			
		||||
    >>> v.satisfied_by('1.4')
 | 
			
		||||
    True
 | 
			
		||||
    >>> v.satisfied_by('1.0')
 | 
			
		||||
    False
 | 
			
		||||
    >>> v.satisfied_by('4444.4')
 | 
			
		||||
    False
 | 
			
		||||
    >>> v.satisfied_by('1555.1b3')
 | 
			
		||||
    False
 | 
			
		||||
 | 
			
		||||
    `VersionPredicate` is flexible in accepting extra whitespace::
 | 
			
		||||
 | 
			
		||||
    >>> v = VersionPredicate(' pat( ==  0.1  )  ')
 | 
			
		||||
    >>> v.name
 | 
			
		||||
    'pat'
 | 
			
		||||
    >>> v.satisfied_by('0.1')
 | 
			
		||||
    True
 | 
			
		||||
    >>> v.satisfied_by('0.2')
 | 
			
		||||
    False
 | 
			
		||||
 | 
			
		||||
    If any version numbers passed in do not conform to the
 | 
			
		||||
    restrictions of `StrictVersion`, a `ValueError` is raised::
 | 
			
		||||
 | 
			
		||||
    >>> v = VersionPredicate('p1.p2.p3.p4(>=1.0, <=1.3a1, !=1.2zb3)')
 | 
			
		||||
    Traceback (most recent call last):
 | 
			
		||||
      ...
 | 
			
		||||
    ValueError: invalid version number '1.2zb3'
 | 
			
		||||
 | 
			
		||||
    It the module or package name given does not conform to what's
 | 
			
		||||
    allowed as a legal module or package name, `ValueError` is
 | 
			
		||||
    raised::
 | 
			
		||||
 | 
			
		||||
    >>> v = VersionPredicate('foo-bar')
 | 
			
		||||
    Traceback (most recent call last):
 | 
			
		||||
      ...
 | 
			
		||||
    ValueError: expected parenthesized list: '-bar'
 | 
			
		||||
 | 
			
		||||
    >>> v = VersionPredicate('foo bar (12.21)')
 | 
			
		||||
    Traceback (most recent call last):
 | 
			
		||||
      ...
 | 
			
		||||
    ValueError: expected parenthesized list: 'bar (12.21)'
 | 
			
		||||
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    def __init__(self, versionPredicateStr):
 | 
			
		||||
        """Parse a version predicate string."""
 | 
			
		||||
        # Fields:
 | 
			
		||||
        #    name:  package name
 | 
			
		||||
        #    pred:  list of (comparison string, StrictVersion)
 | 
			
		||||
 | 
			
		||||
        versionPredicateStr = versionPredicateStr.strip()
 | 
			
		||||
        if not versionPredicateStr:
 | 
			
		||||
            raise ValueError("empty package restriction")
 | 
			
		||||
        match = re_validPackage.match(versionPredicateStr)
 | 
			
		||||
        if not match:
 | 
			
		||||
            raise ValueError("bad package name in %r" % versionPredicateStr)
 | 
			
		||||
        self.name, paren = match.groups()
 | 
			
		||||
        paren = paren.strip()
 | 
			
		||||
        if paren:
 | 
			
		||||
            match = re_paren.match(paren)
 | 
			
		||||
            if not match:
 | 
			
		||||
                raise ValueError("expected parenthesized list: %r" % paren)
 | 
			
		||||
            str = match.groups()[0]
 | 
			
		||||
            self.pred = [splitUp(aPred) for aPred in str.split(",")]
 | 
			
		||||
            if not self.pred:
 | 
			
		||||
                raise ValueError("empty parenthesized list in %r" % versionPredicateStr)
 | 
			
		||||
        else:
 | 
			
		||||
            self.pred = []
 | 
			
		||||
 | 
			
		||||
    def __str__(self):
 | 
			
		||||
        if self.pred:
 | 
			
		||||
            seq = [cond + " " + str(ver) for cond, ver in self.pred]
 | 
			
		||||
            return self.name + " (" + ", ".join(seq) + ")"
 | 
			
		||||
        else:
 | 
			
		||||
            return self.name
 | 
			
		||||
 | 
			
		||||
    def satisfied_by(self, version):
 | 
			
		||||
        """True if version is compatible with all the predicates in self.
 | 
			
		||||
        The parameter version must be acceptable to the StrictVersion
 | 
			
		||||
        constructor.  It may be either a string or StrictVersion.
 | 
			
		||||
        """
 | 
			
		||||
        for cond, ver in self.pred:
 | 
			
		||||
            if not compmap[cond](version, ver):
 | 
			
		||||
                return False
 | 
			
		||||
        return True
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
_provision_rx = None
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def split_provision(value):
 | 
			
		||||
    """Return the name and optional version number of a provision.
 | 
			
		||||
 | 
			
		||||
    The version number, if given, will be returned as a `StrictVersion`
 | 
			
		||||
    instance, otherwise it will be `None`.
 | 
			
		||||
 | 
			
		||||
    >>> split_provision('mypkg')
 | 
			
		||||
    ('mypkg', None)
 | 
			
		||||
    >>> split_provision(' mypkg( 1.2 ) ')
 | 
			
		||||
    ('mypkg', StrictVersion ('1.2'))
 | 
			
		||||
    """
 | 
			
		||||
    global _provision_rx
 | 
			
		||||
    if _provision_rx is None:
 | 
			
		||||
        _provision_rx = re.compile(
 | 
			
		||||
            r"([a-zA-Z_]\w*(?:\.[a-zA-Z_]\w*)*)(?:\s*\(\s*([^)\s]+)\s*\))?$", re.ASCII
 | 
			
		||||
        )
 | 
			
		||||
    value = value.strip()
 | 
			
		||||
    m = _provision_rx.match(value)
 | 
			
		||||
    if not m:
 | 
			
		||||
        raise ValueError("illegal provides specification: %r" % value)
 | 
			
		||||
    ver = m.group(2) or None
 | 
			
		||||
    if ver:
 | 
			
		||||
        with version.suppress_known_deprecation():
 | 
			
		||||
            ver = version.StrictVersion(ver)
 | 
			
		||||
    return m.group(1), ver
 | 
			
		||||
@@ -0,0 +1,94 @@
 | 
			
		||||
import functools
 | 
			
		||||
import operator
 | 
			
		||||
import itertools
 | 
			
		||||
 | 
			
		||||
from .errors import OptionError
 | 
			
		||||
from .extern.jaraco.text import yield_lines
 | 
			
		||||
from .extern.jaraco.functools import pass_none
 | 
			
		||||
from ._importlib import metadata
 | 
			
		||||
from ._itertools import ensure_unique
 | 
			
		||||
from .extern.more_itertools import consume
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def ensure_valid(ep):
 | 
			
		||||
    """
 | 
			
		||||
    Exercise one of the dynamic properties to trigger
 | 
			
		||||
    the pattern match.
 | 
			
		||||
    """
 | 
			
		||||
    try:
 | 
			
		||||
        ep.extras
 | 
			
		||||
    except AttributeError as ex:
 | 
			
		||||
        msg = (
 | 
			
		||||
            f"Problems to parse {ep}.\nPlease ensure entry-point follows the spec: "
 | 
			
		||||
            "https://packaging.python.org/en/latest/specifications/entry-points/"
 | 
			
		||||
        )
 | 
			
		||||
        raise OptionError(msg) from ex
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def load_group(value, group):
 | 
			
		||||
    """
 | 
			
		||||
    Given a value of an entry point or series of entry points,
 | 
			
		||||
    return each as an EntryPoint.
 | 
			
		||||
    """
 | 
			
		||||
    # normalize to a single sequence of lines
 | 
			
		||||
    lines = yield_lines(value)
 | 
			
		||||
    text = f'[{group}]\n' + '\n'.join(lines)
 | 
			
		||||
    return metadata.EntryPoints._from_text(text)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def by_group_and_name(ep):
 | 
			
		||||
    return ep.group, ep.name
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def validate(eps: metadata.EntryPoints):
 | 
			
		||||
    """
 | 
			
		||||
    Ensure entry points are unique by group and name and validate each.
 | 
			
		||||
    """
 | 
			
		||||
    consume(map(ensure_valid, ensure_unique(eps, key=by_group_and_name)))
 | 
			
		||||
    return eps
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@functools.singledispatch
 | 
			
		||||
def load(eps):
 | 
			
		||||
    """
 | 
			
		||||
    Given a Distribution.entry_points, produce EntryPoints.
 | 
			
		||||
    """
 | 
			
		||||
    groups = itertools.chain.from_iterable(
 | 
			
		||||
        load_group(value, group)
 | 
			
		||||
        for group, value in eps.items())
 | 
			
		||||
    return validate(metadata.EntryPoints(groups))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@load.register(str)
 | 
			
		||||
def _(eps):
 | 
			
		||||
    r"""
 | 
			
		||||
    >>> ep, = load('[console_scripts]\nfoo=bar')
 | 
			
		||||
    >>> ep.group
 | 
			
		||||
    'console_scripts'
 | 
			
		||||
    >>> ep.name
 | 
			
		||||
    'foo'
 | 
			
		||||
    >>> ep.value
 | 
			
		||||
    'bar'
 | 
			
		||||
    """
 | 
			
		||||
    return validate(metadata.EntryPoints(metadata.EntryPoints._from_text(eps)))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
load.register(type(None), lambda x: x)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@pass_none
 | 
			
		||||
def render(eps: metadata.EntryPoints):
 | 
			
		||||
    by_group = operator.attrgetter('group')
 | 
			
		||||
    groups = itertools.groupby(sorted(eps, key=by_group), by_group)
 | 
			
		||||
 | 
			
		||||
    return '\n'.join(
 | 
			
		||||
        f'[{group}]\n{render_items(items)}\n'
 | 
			
		||||
        for group, items in groups
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def render_items(eps):
 | 
			
		||||
    return '\n'.join(
 | 
			
		||||
        f'{ep.name} = {ep.value}'
 | 
			
		||||
        for ep in sorted(eps)
 | 
			
		||||
    )
 | 
			
		||||
							
								
								
									
										82
									
								
								teil20/lib/python3.11/site-packages/setuptools/_imp.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										82
									
								
								teil20/lib/python3.11/site-packages/setuptools/_imp.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,82 @@
 | 
			
		||||
"""
 | 
			
		||||
Re-implementation of find_module and get_frozen_object
 | 
			
		||||
from the deprecated imp module.
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
import os
 | 
			
		||||
import importlib.util
 | 
			
		||||
import importlib.machinery
 | 
			
		||||
 | 
			
		||||
from .py34compat import module_from_spec
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
PY_SOURCE = 1
 | 
			
		||||
PY_COMPILED = 2
 | 
			
		||||
C_EXTENSION = 3
 | 
			
		||||
C_BUILTIN = 6
 | 
			
		||||
PY_FROZEN = 7
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def find_spec(module, paths):
 | 
			
		||||
    finder = (
 | 
			
		||||
        importlib.machinery.PathFinder().find_spec
 | 
			
		||||
        if isinstance(paths, list) else
 | 
			
		||||
        importlib.util.find_spec
 | 
			
		||||
    )
 | 
			
		||||
    return finder(module, paths)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def find_module(module, paths=None):
 | 
			
		||||
    """Just like 'imp.find_module()', but with package support"""
 | 
			
		||||
    spec = find_spec(module, paths)
 | 
			
		||||
    if spec is None:
 | 
			
		||||
        raise ImportError("Can't find %s" % module)
 | 
			
		||||
    if not spec.has_location and hasattr(spec, 'submodule_search_locations'):
 | 
			
		||||
        spec = importlib.util.spec_from_loader('__init__.py', spec.loader)
 | 
			
		||||
 | 
			
		||||
    kind = -1
 | 
			
		||||
    file = None
 | 
			
		||||
    static = isinstance(spec.loader, type)
 | 
			
		||||
    if spec.origin == 'frozen' or static and issubclass(
 | 
			
		||||
            spec.loader, importlib.machinery.FrozenImporter):
 | 
			
		||||
        kind = PY_FROZEN
 | 
			
		||||
        path = None  # imp compabilty
 | 
			
		||||
        suffix = mode = ''  # imp compatibility
 | 
			
		||||
    elif spec.origin == 'built-in' or static and issubclass(
 | 
			
		||||
            spec.loader, importlib.machinery.BuiltinImporter):
 | 
			
		||||
        kind = C_BUILTIN
 | 
			
		||||
        path = None  # imp compabilty
 | 
			
		||||
        suffix = mode = ''  # imp compatibility
 | 
			
		||||
    elif spec.has_location:
 | 
			
		||||
        path = spec.origin
 | 
			
		||||
        suffix = os.path.splitext(path)[1]
 | 
			
		||||
        mode = 'r' if suffix in importlib.machinery.SOURCE_SUFFIXES else 'rb'
 | 
			
		||||
 | 
			
		||||
        if suffix in importlib.machinery.SOURCE_SUFFIXES:
 | 
			
		||||
            kind = PY_SOURCE
 | 
			
		||||
        elif suffix in importlib.machinery.BYTECODE_SUFFIXES:
 | 
			
		||||
            kind = PY_COMPILED
 | 
			
		||||
        elif suffix in importlib.machinery.EXTENSION_SUFFIXES:
 | 
			
		||||
            kind = C_EXTENSION
 | 
			
		||||
 | 
			
		||||
        if kind in {PY_SOURCE, PY_COMPILED}:
 | 
			
		||||
            file = open(path, mode)
 | 
			
		||||
    else:
 | 
			
		||||
        path = None
 | 
			
		||||
        suffix = mode = ''
 | 
			
		||||
 | 
			
		||||
    return file, path, (suffix, mode, kind)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_frozen_object(module, paths=None):
 | 
			
		||||
    spec = find_spec(module, paths)
 | 
			
		||||
    if not spec:
 | 
			
		||||
        raise ImportError("Can't find %s" % module)
 | 
			
		||||
    return spec.loader.get_code(module)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_module(module, paths, info):
 | 
			
		||||
    spec = find_spec(module, paths)
 | 
			
		||||
    if not spec:
 | 
			
		||||
        raise ImportError("Can't find %s" % module)
 | 
			
		||||
    return module_from_spec(spec)
 | 
			
		||||
							
								
								
									
										47
									
								
								teil20/lib/python3.11/site-packages/setuptools/_importlib.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										47
									
								
								teil20/lib/python3.11/site-packages/setuptools/_importlib.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,47 @@
 | 
			
		||||
import sys
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def disable_importlib_metadata_finder(metadata):
 | 
			
		||||
    """
 | 
			
		||||
    Ensure importlib_metadata doesn't provide older, incompatible
 | 
			
		||||
    Distributions.
 | 
			
		||||
 | 
			
		||||
    Workaround for #3102.
 | 
			
		||||
    """
 | 
			
		||||
    try:
 | 
			
		||||
        import importlib_metadata
 | 
			
		||||
    except ImportError:
 | 
			
		||||
        return
 | 
			
		||||
    except AttributeError:
 | 
			
		||||
        import warnings
 | 
			
		||||
 | 
			
		||||
        msg = (
 | 
			
		||||
            "`importlib-metadata` version is incompatible with `setuptools`.\n"
 | 
			
		||||
            "This problem is likely to be solved by installing an updated version of "
 | 
			
		||||
            "`importlib-metadata`."
 | 
			
		||||
        )
 | 
			
		||||
        warnings.warn(msg)  # Ensure a descriptive message is shown.
 | 
			
		||||
        raise  # This exception can be suppressed by _distutils_hack
 | 
			
		||||
 | 
			
		||||
    if importlib_metadata is metadata:
 | 
			
		||||
        return
 | 
			
		||||
    to_remove = [
 | 
			
		||||
        ob
 | 
			
		||||
        for ob in sys.meta_path
 | 
			
		||||
        if isinstance(ob, importlib_metadata.MetadataPathFinder)
 | 
			
		||||
    ]
 | 
			
		||||
    for item in to_remove:
 | 
			
		||||
        sys.meta_path.remove(item)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
if sys.version_info < (3, 10):
 | 
			
		||||
    from setuptools.extern import importlib_metadata as metadata
 | 
			
		||||
    disable_importlib_metadata_finder(metadata)
 | 
			
		||||
else:
 | 
			
		||||
    import importlib.metadata as metadata  # noqa: F401
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
if sys.version_info < (3, 9):
 | 
			
		||||
    from setuptools.extern import importlib_resources as resources
 | 
			
		||||
else:
 | 
			
		||||
    import importlib.resources as resources  # noqa: F401
 | 
			
		||||
							
								
								
									
										23
									
								
								teil20/lib/python3.11/site-packages/setuptools/_itertools.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										23
									
								
								teil20/lib/python3.11/site-packages/setuptools/_itertools.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,23 @@
 | 
			
		||||
from setuptools.extern.more_itertools import consume  # noqa: F401
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# copied from jaraco.itertools 6.1
 | 
			
		||||
def ensure_unique(iterable, key=lambda x: x):
 | 
			
		||||
    """
 | 
			
		||||
    Wrap an iterable to raise a ValueError if non-unique values are encountered.
 | 
			
		||||
 | 
			
		||||
    >>> list(ensure_unique('abc'))
 | 
			
		||||
    ['a', 'b', 'c']
 | 
			
		||||
    >>> consume(ensure_unique('abca'))
 | 
			
		||||
    Traceback (most recent call last):
 | 
			
		||||
    ...
 | 
			
		||||
    ValueError: Duplicate element 'a' encountered.
 | 
			
		||||
    """
 | 
			
		||||
    seen = set()
 | 
			
		||||
    seen_add = seen.add
 | 
			
		||||
    for element in iterable:
 | 
			
		||||
        k = key(element)
 | 
			
		||||
        if k in seen:
 | 
			
		||||
            raise ValueError(f"Duplicate element {element!r} encountered.")
 | 
			
		||||
        seen_add(k)
 | 
			
		||||
        yield element
 | 
			
		||||
							
								
								
									
										29
									
								
								teil20/lib/python3.11/site-packages/setuptools/_path.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										29
									
								
								teil20/lib/python3.11/site-packages/setuptools/_path.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,29 @@
 | 
			
		||||
import os
 | 
			
		||||
from typing import Union
 | 
			
		||||
 | 
			
		||||
_Path = Union[str, os.PathLike]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def ensure_directory(path):
 | 
			
		||||
    """Ensure that the parent directory of `path` exists"""
 | 
			
		||||
    dirname = os.path.dirname(path)
 | 
			
		||||
    os.makedirs(dirname, exist_ok=True)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def same_path(p1: _Path, p2: _Path) -> bool:
 | 
			
		||||
    """Differs from os.path.samefile because it does not require paths to exist.
 | 
			
		||||
    Purely string based (no comparison between i-nodes).
 | 
			
		||||
    >>> same_path("a/b", "./a/b")
 | 
			
		||||
    True
 | 
			
		||||
    >>> same_path("a/b", "a/./b")
 | 
			
		||||
    True
 | 
			
		||||
    >>> same_path("a/b", "././a/b")
 | 
			
		||||
    True
 | 
			
		||||
    >>> same_path("a/b", "./a/b/c/..")
 | 
			
		||||
    True
 | 
			
		||||
    >>> same_path("a/b", "../a/b/c")
 | 
			
		||||
    False
 | 
			
		||||
    >>> same_path("a", "a/b")
 | 
			
		||||
    False
 | 
			
		||||
    """
 | 
			
		||||
    return os.path.normpath(p1) == os.path.normpath(p2)
 | 
			
		||||
							
								
								
									
										19
									
								
								teil20/lib/python3.11/site-packages/setuptools/_reqs.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										19
									
								
								teil20/lib/python3.11/site-packages/setuptools/_reqs.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,19 @@
 | 
			
		||||
import setuptools.extern.jaraco.text as text
 | 
			
		||||
 | 
			
		||||
from pkg_resources import Requirement
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def parse_strings(strs):
 | 
			
		||||
    """
 | 
			
		||||
    Yield requirement strings for each specification in `strs`.
 | 
			
		||||
 | 
			
		||||
    `strs` must be a string, or a (possibly-nested) iterable thereof.
 | 
			
		||||
    """
 | 
			
		||||
    return text.join_continuation(map(text.drop_comment, text.yield_lines(strs)))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def parse(strs):
 | 
			
		||||
    """
 | 
			
		||||
    Deprecated drop-in replacement for pkg_resources.parse_requirements.
 | 
			
		||||
    """
 | 
			
		||||
    return map(Requirement, parse_strings(strs))
 | 
			
		||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							@@ -0,0 +1,68 @@
 | 
			
		||||
import re
 | 
			
		||||
import textwrap
 | 
			
		||||
import email.message
 | 
			
		||||
 | 
			
		||||
from ._text import FoldedCase
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Message(email.message.Message):
 | 
			
		||||
    multiple_use_keys = set(
 | 
			
		||||
        map(
 | 
			
		||||
            FoldedCase,
 | 
			
		||||
            [
 | 
			
		||||
                'Classifier',
 | 
			
		||||
                'Obsoletes-Dist',
 | 
			
		||||
                'Platform',
 | 
			
		||||
                'Project-URL',
 | 
			
		||||
                'Provides-Dist',
 | 
			
		||||
                'Provides-Extra',
 | 
			
		||||
                'Requires-Dist',
 | 
			
		||||
                'Requires-External',
 | 
			
		||||
                'Supported-Platform',
 | 
			
		||||
                'Dynamic',
 | 
			
		||||
            ],
 | 
			
		||||
        )
 | 
			
		||||
    )
 | 
			
		||||
    """
 | 
			
		||||
    Keys that may be indicated multiple times per PEP 566.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    def __new__(cls, orig: email.message.Message):
 | 
			
		||||
        res = super().__new__(cls)
 | 
			
		||||
        vars(res).update(vars(orig))
 | 
			
		||||
        return res
 | 
			
		||||
 | 
			
		||||
    def __init__(self, *args, **kwargs):
 | 
			
		||||
        self._headers = self._repair_headers()
 | 
			
		||||
 | 
			
		||||
    # suppress spurious error from mypy
 | 
			
		||||
    def __iter__(self):
 | 
			
		||||
        return super().__iter__()
 | 
			
		||||
 | 
			
		||||
    def _repair_headers(self):
 | 
			
		||||
        def redent(value):
 | 
			
		||||
            "Correct for RFC822 indentation"
 | 
			
		||||
            if not value or '\n' not in value:
 | 
			
		||||
                return value
 | 
			
		||||
            return textwrap.dedent(' ' * 8 + value)
 | 
			
		||||
 | 
			
		||||
        headers = [(key, redent(value)) for key, value in vars(self)['_headers']]
 | 
			
		||||
        if self._payload:
 | 
			
		||||
            headers.append(('Description', self.get_payload()))
 | 
			
		||||
        return headers
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def json(self):
 | 
			
		||||
        """
 | 
			
		||||
        Convert PackageMetadata to a JSON-compatible format
 | 
			
		||||
        per PEP 0566.
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
        def transform(key):
 | 
			
		||||
            value = self.get_all(key) if key in self.multiple_use_keys else self[key]
 | 
			
		||||
            if key == 'Keywords':
 | 
			
		||||
                value = re.split(r'\s+', value)
 | 
			
		||||
            tk = key.lower().replace('-', '_')
 | 
			
		||||
            return tk, value
 | 
			
		||||
 | 
			
		||||
        return dict(map(transform, map(FoldedCase, self)))
 | 
			
		||||
@@ -0,0 +1,30 @@
 | 
			
		||||
import collections
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# from jaraco.collections 3.3
 | 
			
		||||
class FreezableDefaultDict(collections.defaultdict):
 | 
			
		||||
    """
 | 
			
		||||
    Often it is desirable to prevent the mutation of
 | 
			
		||||
    a default dict after its initial construction, such
 | 
			
		||||
    as to prevent mutation during iteration.
 | 
			
		||||
 | 
			
		||||
    >>> dd = FreezableDefaultDict(list)
 | 
			
		||||
    >>> dd[0].append('1')
 | 
			
		||||
    >>> dd.freeze()
 | 
			
		||||
    >>> dd[1]
 | 
			
		||||
    []
 | 
			
		||||
    >>> len(dd)
 | 
			
		||||
    1
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    def __missing__(self, key):
 | 
			
		||||
        return getattr(self, '_frozen', super().__missing__)(key)
 | 
			
		||||
 | 
			
		||||
    def freeze(self):
 | 
			
		||||
        self._frozen = lambda key: self.default_factory()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Pair(collections.namedtuple('Pair', 'name value')):
 | 
			
		||||
    @classmethod
 | 
			
		||||
    def parse(cls, text):
 | 
			
		||||
        return cls(*map(str.strip, text.split("=", 1)))
 | 
			
		||||
@@ -0,0 +1,71 @@
 | 
			
		||||
import sys
 | 
			
		||||
import platform
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
__all__ = ['install', 'NullFinder', 'Protocol']
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
try:
 | 
			
		||||
    from typing import Protocol
 | 
			
		||||
except ImportError:  # pragma: no cover
 | 
			
		||||
    from ..typing_extensions import Protocol  # type: ignore
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def install(cls):
 | 
			
		||||
    """
 | 
			
		||||
    Class decorator for installation on sys.meta_path.
 | 
			
		||||
 | 
			
		||||
    Adds the backport DistributionFinder to sys.meta_path and
 | 
			
		||||
    attempts to disable the finder functionality of the stdlib
 | 
			
		||||
    DistributionFinder.
 | 
			
		||||
    """
 | 
			
		||||
    sys.meta_path.append(cls())
 | 
			
		||||
    disable_stdlib_finder()
 | 
			
		||||
    return cls
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def disable_stdlib_finder():
 | 
			
		||||
    """
 | 
			
		||||
    Give the backport primacy for discovering path-based distributions
 | 
			
		||||
    by monkey-patching the stdlib O_O.
 | 
			
		||||
 | 
			
		||||
    See #91 for more background for rationale on this sketchy
 | 
			
		||||
    behavior.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    def matches(finder):
 | 
			
		||||
        return getattr(
 | 
			
		||||
            finder, '__module__', None
 | 
			
		||||
        ) == '_frozen_importlib_external' and hasattr(finder, 'find_distributions')
 | 
			
		||||
 | 
			
		||||
    for finder in filter(matches, sys.meta_path):  # pragma: nocover
 | 
			
		||||
        del finder.find_distributions
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class NullFinder:
 | 
			
		||||
    """
 | 
			
		||||
    A "Finder" (aka "MetaClassFinder") that never finds any modules,
 | 
			
		||||
    but may find distributions.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    @staticmethod
 | 
			
		||||
    def find_spec(*args, **kwargs):
 | 
			
		||||
        return None
 | 
			
		||||
 | 
			
		||||
    # In Python 2, the import system requires finders
 | 
			
		||||
    # to have a find_module() method, but this usage
 | 
			
		||||
    # is deprecated in Python 3 in favor of find_spec().
 | 
			
		||||
    # For the purposes of this finder (i.e. being present
 | 
			
		||||
    # on sys.meta_path but having no other import
 | 
			
		||||
    # system functionality), the two methods are identical.
 | 
			
		||||
    find_module = find_spec
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def pypy_partial(val):
 | 
			
		||||
    """
 | 
			
		||||
    Adjust for variable stacklevel on partial under PyPy.
 | 
			
		||||
 | 
			
		||||
    Workaround for #327.
 | 
			
		||||
    """
 | 
			
		||||
    is_pypy = platform.python_implementation() == 'PyPy'
 | 
			
		||||
    return val + is_pypy
 | 
			
		||||
@@ -0,0 +1,104 @@
 | 
			
		||||
import types
 | 
			
		||||
import functools
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# from jaraco.functools 3.3
 | 
			
		||||
def method_cache(method, cache_wrapper=None):
 | 
			
		||||
    """
 | 
			
		||||
    Wrap lru_cache to support storing the cache data in the object instances.
 | 
			
		||||
 | 
			
		||||
    Abstracts the common paradigm where the method explicitly saves an
 | 
			
		||||
    underscore-prefixed protected property on first call and returns that
 | 
			
		||||
    subsequently.
 | 
			
		||||
 | 
			
		||||
    >>> class MyClass:
 | 
			
		||||
    ...     calls = 0
 | 
			
		||||
    ...
 | 
			
		||||
    ...     @method_cache
 | 
			
		||||
    ...     def method(self, value):
 | 
			
		||||
    ...         self.calls += 1
 | 
			
		||||
    ...         return value
 | 
			
		||||
 | 
			
		||||
    >>> a = MyClass()
 | 
			
		||||
    >>> a.method(3)
 | 
			
		||||
    3
 | 
			
		||||
    >>> for x in range(75):
 | 
			
		||||
    ...     res = a.method(x)
 | 
			
		||||
    >>> a.calls
 | 
			
		||||
    75
 | 
			
		||||
 | 
			
		||||
    Note that the apparent behavior will be exactly like that of lru_cache
 | 
			
		||||
    except that the cache is stored on each instance, so values in one
 | 
			
		||||
    instance will not flush values from another, and when an instance is
 | 
			
		||||
    deleted, so are the cached values for that instance.
 | 
			
		||||
 | 
			
		||||
    >>> b = MyClass()
 | 
			
		||||
    >>> for x in range(35):
 | 
			
		||||
    ...     res = b.method(x)
 | 
			
		||||
    >>> b.calls
 | 
			
		||||
    35
 | 
			
		||||
    >>> a.method(0)
 | 
			
		||||
    0
 | 
			
		||||
    >>> a.calls
 | 
			
		||||
    75
 | 
			
		||||
 | 
			
		||||
    Note that if method had been decorated with ``functools.lru_cache()``,
 | 
			
		||||
    a.calls would have been 76 (due to the cached value of 0 having been
 | 
			
		||||
    flushed by the 'b' instance).
 | 
			
		||||
 | 
			
		||||
    Clear the cache with ``.cache_clear()``
 | 
			
		||||
 | 
			
		||||
    >>> a.method.cache_clear()
 | 
			
		||||
 | 
			
		||||
    Same for a method that hasn't yet been called.
 | 
			
		||||
 | 
			
		||||
    >>> c = MyClass()
 | 
			
		||||
    >>> c.method.cache_clear()
 | 
			
		||||
 | 
			
		||||
    Another cache wrapper may be supplied:
 | 
			
		||||
 | 
			
		||||
    >>> cache = functools.lru_cache(maxsize=2)
 | 
			
		||||
    >>> MyClass.method2 = method_cache(lambda self: 3, cache_wrapper=cache)
 | 
			
		||||
    >>> a = MyClass()
 | 
			
		||||
    >>> a.method2()
 | 
			
		||||
    3
 | 
			
		||||
 | 
			
		||||
    Caution - do not subsequently wrap the method with another decorator, such
 | 
			
		||||
    as ``@property``, which changes the semantics of the function.
 | 
			
		||||
 | 
			
		||||
    See also
 | 
			
		||||
    http://code.activestate.com/recipes/577452-a-memoize-decorator-for-instance-methods/
 | 
			
		||||
    for another implementation and additional justification.
 | 
			
		||||
    """
 | 
			
		||||
    cache_wrapper = cache_wrapper or functools.lru_cache()
 | 
			
		||||
 | 
			
		||||
    def wrapper(self, *args, **kwargs):
 | 
			
		||||
        # it's the first call, replace the method with a cached, bound method
 | 
			
		||||
        bound_method = types.MethodType(method, self)
 | 
			
		||||
        cached_method = cache_wrapper(bound_method)
 | 
			
		||||
        setattr(self, method.__name__, cached_method)
 | 
			
		||||
        return cached_method(*args, **kwargs)
 | 
			
		||||
 | 
			
		||||
    # Support cache clear even before cache has been created.
 | 
			
		||||
    wrapper.cache_clear = lambda: None
 | 
			
		||||
 | 
			
		||||
    return wrapper
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# From jaraco.functools 3.3
 | 
			
		||||
def pass_none(func):
 | 
			
		||||
    """
 | 
			
		||||
    Wrap func so it's not called if its first param is None
 | 
			
		||||
 | 
			
		||||
    >>> print_text = pass_none(print)
 | 
			
		||||
    >>> print_text('text')
 | 
			
		||||
    text
 | 
			
		||||
    >>> print_text(None)
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    @functools.wraps(func)
 | 
			
		||||
    def wrapper(param, *args, **kwargs):
 | 
			
		||||
        if param is not None:
 | 
			
		||||
            return func(param, *args, **kwargs)
 | 
			
		||||
 | 
			
		||||
    return wrapper
 | 
			
		||||
@@ -0,0 +1,73 @@
 | 
			
		||||
from itertools import filterfalse
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def unique_everseen(iterable, key=None):
 | 
			
		||||
    "List unique elements, preserving order. Remember all elements ever seen."
 | 
			
		||||
    # unique_everseen('AAAABBBCCDAABBB') --> A B C D
 | 
			
		||||
    # unique_everseen('ABBCcAD', str.lower) --> A B C D
 | 
			
		||||
    seen = set()
 | 
			
		||||
    seen_add = seen.add
 | 
			
		||||
    if key is None:
 | 
			
		||||
        for element in filterfalse(seen.__contains__, iterable):
 | 
			
		||||
            seen_add(element)
 | 
			
		||||
            yield element
 | 
			
		||||
    else:
 | 
			
		||||
        for element in iterable:
 | 
			
		||||
            k = key(element)
 | 
			
		||||
            if k not in seen:
 | 
			
		||||
                seen_add(k)
 | 
			
		||||
                yield element
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# copied from more_itertools 8.8
 | 
			
		||||
def always_iterable(obj, base_type=(str, bytes)):
 | 
			
		||||
    """If *obj* is iterable, return an iterator over its items::
 | 
			
		||||
 | 
			
		||||
        >>> obj = (1, 2, 3)
 | 
			
		||||
        >>> list(always_iterable(obj))
 | 
			
		||||
        [1, 2, 3]
 | 
			
		||||
 | 
			
		||||
    If *obj* is not iterable, return a one-item iterable containing *obj*::
 | 
			
		||||
 | 
			
		||||
        >>> obj = 1
 | 
			
		||||
        >>> list(always_iterable(obj))
 | 
			
		||||
        [1]
 | 
			
		||||
 | 
			
		||||
    If *obj* is ``None``, return an empty iterable:
 | 
			
		||||
 | 
			
		||||
        >>> obj = None
 | 
			
		||||
        >>> list(always_iterable(None))
 | 
			
		||||
        []
 | 
			
		||||
 | 
			
		||||
    By default, binary and text strings are not considered iterable::
 | 
			
		||||
 | 
			
		||||
        >>> obj = 'foo'
 | 
			
		||||
        >>> list(always_iterable(obj))
 | 
			
		||||
        ['foo']
 | 
			
		||||
 | 
			
		||||
    If *base_type* is set, objects for which ``isinstance(obj, base_type)``
 | 
			
		||||
    returns ``True`` won't be considered iterable.
 | 
			
		||||
 | 
			
		||||
        >>> obj = {'a': 1}
 | 
			
		||||
        >>> list(always_iterable(obj))  # Iterate over the dict's keys
 | 
			
		||||
        ['a']
 | 
			
		||||
        >>> list(always_iterable(obj, base_type=dict))  # Treat dicts as a unit
 | 
			
		||||
        [{'a': 1}]
 | 
			
		||||
 | 
			
		||||
    Set *base_type* to ``None`` to avoid any special handling and treat objects
 | 
			
		||||
    Python considers iterable as iterable:
 | 
			
		||||
 | 
			
		||||
        >>> obj = 'foo'
 | 
			
		||||
        >>> list(always_iterable(obj, base_type=None))
 | 
			
		||||
        ['f', 'o', 'o']
 | 
			
		||||
    """
 | 
			
		||||
    if obj is None:
 | 
			
		||||
        return iter(())
 | 
			
		||||
 | 
			
		||||
    if (base_type is not None) and isinstance(obj, base_type):
 | 
			
		||||
        return iter((obj,))
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        return iter(obj)
 | 
			
		||||
    except TypeError:
 | 
			
		||||
        return iter((obj,))
 | 
			
		||||
@@ -0,0 +1,48 @@
 | 
			
		||||
from ._compat import Protocol
 | 
			
		||||
from typing import Any, Dict, Iterator, List, TypeVar, Union
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
_T = TypeVar("_T")
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class PackageMetadata(Protocol):
 | 
			
		||||
    def __len__(self) -> int:
 | 
			
		||||
        ...  # pragma: no cover
 | 
			
		||||
 | 
			
		||||
    def __contains__(self, item: str) -> bool:
 | 
			
		||||
        ...  # pragma: no cover
 | 
			
		||||
 | 
			
		||||
    def __getitem__(self, key: str) -> str:
 | 
			
		||||
        ...  # pragma: no cover
 | 
			
		||||
 | 
			
		||||
    def __iter__(self) -> Iterator[str]:
 | 
			
		||||
        ...  # pragma: no cover
 | 
			
		||||
 | 
			
		||||
    def get_all(self, name: str, failobj: _T = ...) -> Union[List[Any], _T]:
 | 
			
		||||
        """
 | 
			
		||||
        Return all values associated with a possibly multi-valued key.
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def json(self) -> Dict[str, Union[str, List[str]]]:
 | 
			
		||||
        """
 | 
			
		||||
        A JSON-compatible form of the metadata.
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class SimplePath(Protocol):
 | 
			
		||||
    """
 | 
			
		||||
    A minimal subset of pathlib.Path required by PathDistribution.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    def joinpath(self) -> 'SimplePath':
 | 
			
		||||
        ...  # pragma: no cover
 | 
			
		||||
 | 
			
		||||
    def __truediv__(self) -> 'SimplePath':
 | 
			
		||||
        ...  # pragma: no cover
 | 
			
		||||
 | 
			
		||||
    def parent(self) -> 'SimplePath':
 | 
			
		||||
        ...  # pragma: no cover
 | 
			
		||||
 | 
			
		||||
    def read_text(self) -> str:
 | 
			
		||||
        ...  # pragma: no cover
 | 
			
		||||
@@ -0,0 +1,99 @@
 | 
			
		||||
import re
 | 
			
		||||
 | 
			
		||||
from ._functools import method_cache
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# from jaraco.text 3.5
 | 
			
		||||
class FoldedCase(str):
 | 
			
		||||
    """
 | 
			
		||||
    A case insensitive string class; behaves just like str
 | 
			
		||||
    except compares equal when the only variation is case.
 | 
			
		||||
 | 
			
		||||
    >>> s = FoldedCase('hello world')
 | 
			
		||||
 | 
			
		||||
    >>> s == 'Hello World'
 | 
			
		||||
    True
 | 
			
		||||
 | 
			
		||||
    >>> 'Hello World' == s
 | 
			
		||||
    True
 | 
			
		||||
 | 
			
		||||
    >>> s != 'Hello World'
 | 
			
		||||
    False
 | 
			
		||||
 | 
			
		||||
    >>> s.index('O')
 | 
			
		||||
    4
 | 
			
		||||
 | 
			
		||||
    >>> s.split('O')
 | 
			
		||||
    ['hell', ' w', 'rld']
 | 
			
		||||
 | 
			
		||||
    >>> sorted(map(FoldedCase, ['GAMMA', 'alpha', 'Beta']))
 | 
			
		||||
    ['alpha', 'Beta', 'GAMMA']
 | 
			
		||||
 | 
			
		||||
    Sequence membership is straightforward.
 | 
			
		||||
 | 
			
		||||
    >>> "Hello World" in [s]
 | 
			
		||||
    True
 | 
			
		||||
    >>> s in ["Hello World"]
 | 
			
		||||
    True
 | 
			
		||||
 | 
			
		||||
    You may test for set inclusion, but candidate and elements
 | 
			
		||||
    must both be folded.
 | 
			
		||||
 | 
			
		||||
    >>> FoldedCase("Hello World") in {s}
 | 
			
		||||
    True
 | 
			
		||||
    >>> s in {FoldedCase("Hello World")}
 | 
			
		||||
    True
 | 
			
		||||
 | 
			
		||||
    String inclusion works as long as the FoldedCase object
 | 
			
		||||
    is on the right.
 | 
			
		||||
 | 
			
		||||
    >>> "hello" in FoldedCase("Hello World")
 | 
			
		||||
    True
 | 
			
		||||
 | 
			
		||||
    But not if the FoldedCase object is on the left:
 | 
			
		||||
 | 
			
		||||
    >>> FoldedCase('hello') in 'Hello World'
 | 
			
		||||
    False
 | 
			
		||||
 | 
			
		||||
    In that case, use in_:
 | 
			
		||||
 | 
			
		||||
    >>> FoldedCase('hello').in_('Hello World')
 | 
			
		||||
    True
 | 
			
		||||
 | 
			
		||||
    >>> FoldedCase('hello') > FoldedCase('Hello')
 | 
			
		||||
    False
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    def __lt__(self, other):
 | 
			
		||||
        return self.lower() < other.lower()
 | 
			
		||||
 | 
			
		||||
    def __gt__(self, other):
 | 
			
		||||
        return self.lower() > other.lower()
 | 
			
		||||
 | 
			
		||||
    def __eq__(self, other):
 | 
			
		||||
        return self.lower() == other.lower()
 | 
			
		||||
 | 
			
		||||
    def __ne__(self, other):
 | 
			
		||||
        return self.lower() != other.lower()
 | 
			
		||||
 | 
			
		||||
    def __hash__(self):
 | 
			
		||||
        return hash(self.lower())
 | 
			
		||||
 | 
			
		||||
    def __contains__(self, other):
 | 
			
		||||
        return super().lower().__contains__(other.lower())
 | 
			
		||||
 | 
			
		||||
    def in_(self, other):
 | 
			
		||||
        "Does self appear in other?"
 | 
			
		||||
        return self in FoldedCase(other)
 | 
			
		||||
 | 
			
		||||
    # cache lower since it's likely to be called frequently.
 | 
			
		||||
    @method_cache
 | 
			
		||||
    def lower(self):
 | 
			
		||||
        return super().lower()
 | 
			
		||||
 | 
			
		||||
    def index(self, sub):
 | 
			
		||||
        return self.lower().index(sub.lower())
 | 
			
		||||
 | 
			
		||||
    def split(self, splitter=' ', maxsplit=0):
 | 
			
		||||
        pattern = re.compile(re.escape(splitter), re.I)
 | 
			
		||||
        return pattern.split(self, maxsplit)
 | 
			
		||||
@@ -0,0 +1,36 @@
 | 
			
		||||
"""Read resources contained within a package."""
 | 
			
		||||
 | 
			
		||||
from ._common import (
 | 
			
		||||
    as_file,
 | 
			
		||||
    files,
 | 
			
		||||
    Package,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
from ._legacy import (
 | 
			
		||||
    contents,
 | 
			
		||||
    open_binary,
 | 
			
		||||
    read_binary,
 | 
			
		||||
    open_text,
 | 
			
		||||
    read_text,
 | 
			
		||||
    is_resource,
 | 
			
		||||
    path,
 | 
			
		||||
    Resource,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
from .abc import ResourceReader
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
__all__ = [
 | 
			
		||||
    'Package',
 | 
			
		||||
    'Resource',
 | 
			
		||||
    'ResourceReader',
 | 
			
		||||
    'as_file',
 | 
			
		||||
    'contents',
 | 
			
		||||
    'files',
 | 
			
		||||
    'is_resource',
 | 
			
		||||
    'open_binary',
 | 
			
		||||
    'open_text',
 | 
			
		||||
    'path',
 | 
			
		||||
    'read_binary',
 | 
			
		||||
    'read_text',
 | 
			
		||||
]
 | 
			
		||||
@@ -0,0 +1,170 @@
 | 
			
		||||
from contextlib import suppress
 | 
			
		||||
from io import TextIOWrapper
 | 
			
		||||
 | 
			
		||||
from . import abc
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class SpecLoaderAdapter:
 | 
			
		||||
    """
 | 
			
		||||
    Adapt a package spec to adapt the underlying loader.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    def __init__(self, spec, adapter=lambda spec: spec.loader):
 | 
			
		||||
        self.spec = spec
 | 
			
		||||
        self.loader = adapter(spec)
 | 
			
		||||
 | 
			
		||||
    def __getattr__(self, name):
 | 
			
		||||
        return getattr(self.spec, name)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class TraversableResourcesLoader:
 | 
			
		||||
    """
 | 
			
		||||
    Adapt a loader to provide TraversableResources.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    def __init__(self, spec):
 | 
			
		||||
        self.spec = spec
 | 
			
		||||
 | 
			
		||||
    def get_resource_reader(self, name):
 | 
			
		||||
        return CompatibilityFiles(self.spec)._native()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _io_wrapper(file, mode='r', *args, **kwargs):
 | 
			
		||||
    if mode == 'r':
 | 
			
		||||
        return TextIOWrapper(file, *args, **kwargs)
 | 
			
		||||
    elif mode == 'rb':
 | 
			
		||||
        return file
 | 
			
		||||
    raise ValueError(
 | 
			
		||||
        "Invalid mode value '{}', only 'r' and 'rb' are supported".format(mode)
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class CompatibilityFiles:
 | 
			
		||||
    """
 | 
			
		||||
    Adapter for an existing or non-existent resource reader
 | 
			
		||||
    to provide a compatibility .files().
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    class SpecPath(abc.Traversable):
 | 
			
		||||
        """
 | 
			
		||||
        Path tied to a module spec.
 | 
			
		||||
        Can be read and exposes the resource reader children.
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
        def __init__(self, spec, reader):
 | 
			
		||||
            self._spec = spec
 | 
			
		||||
            self._reader = reader
 | 
			
		||||
 | 
			
		||||
        def iterdir(self):
 | 
			
		||||
            if not self._reader:
 | 
			
		||||
                return iter(())
 | 
			
		||||
            return iter(
 | 
			
		||||
                CompatibilityFiles.ChildPath(self._reader, path)
 | 
			
		||||
                for path in self._reader.contents()
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        def is_file(self):
 | 
			
		||||
            return False
 | 
			
		||||
 | 
			
		||||
        is_dir = is_file
 | 
			
		||||
 | 
			
		||||
        def joinpath(self, other):
 | 
			
		||||
            if not self._reader:
 | 
			
		||||
                return CompatibilityFiles.OrphanPath(other)
 | 
			
		||||
            return CompatibilityFiles.ChildPath(self._reader, other)
 | 
			
		||||
 | 
			
		||||
        @property
 | 
			
		||||
        def name(self):
 | 
			
		||||
            return self._spec.name
 | 
			
		||||
 | 
			
		||||
        def open(self, mode='r', *args, **kwargs):
 | 
			
		||||
            return _io_wrapper(self._reader.open_resource(None), mode, *args, **kwargs)
 | 
			
		||||
 | 
			
		||||
    class ChildPath(abc.Traversable):
 | 
			
		||||
        """
 | 
			
		||||
        Path tied to a resource reader child.
 | 
			
		||||
        Can be read but doesn't expose any meaningful children.
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
        def __init__(self, reader, name):
 | 
			
		||||
            self._reader = reader
 | 
			
		||||
            self._name = name
 | 
			
		||||
 | 
			
		||||
        def iterdir(self):
 | 
			
		||||
            return iter(())
 | 
			
		||||
 | 
			
		||||
        def is_file(self):
 | 
			
		||||
            return self._reader.is_resource(self.name)
 | 
			
		||||
 | 
			
		||||
        def is_dir(self):
 | 
			
		||||
            return not self.is_file()
 | 
			
		||||
 | 
			
		||||
        def joinpath(self, other):
 | 
			
		||||
            return CompatibilityFiles.OrphanPath(self.name, other)
 | 
			
		||||
 | 
			
		||||
        @property
 | 
			
		||||
        def name(self):
 | 
			
		||||
            return self._name
 | 
			
		||||
 | 
			
		||||
        def open(self, mode='r', *args, **kwargs):
 | 
			
		||||
            return _io_wrapper(
 | 
			
		||||
                self._reader.open_resource(self.name), mode, *args, **kwargs
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
    class OrphanPath(abc.Traversable):
 | 
			
		||||
        """
 | 
			
		||||
        Orphan path, not tied to a module spec or resource reader.
 | 
			
		||||
        Can't be read and doesn't expose any meaningful children.
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
        def __init__(self, *path_parts):
 | 
			
		||||
            if len(path_parts) < 1:
 | 
			
		||||
                raise ValueError('Need at least one path part to construct a path')
 | 
			
		||||
            self._path = path_parts
 | 
			
		||||
 | 
			
		||||
        def iterdir(self):
 | 
			
		||||
            return iter(())
 | 
			
		||||
 | 
			
		||||
        def is_file(self):
 | 
			
		||||
            return False
 | 
			
		||||
 | 
			
		||||
        is_dir = is_file
 | 
			
		||||
 | 
			
		||||
        def joinpath(self, other):
 | 
			
		||||
            return CompatibilityFiles.OrphanPath(*self._path, other)
 | 
			
		||||
 | 
			
		||||
        @property
 | 
			
		||||
        def name(self):
 | 
			
		||||
            return self._path[-1]
 | 
			
		||||
 | 
			
		||||
        def open(self, mode='r', *args, **kwargs):
 | 
			
		||||
            raise FileNotFoundError("Can't open orphan path")
 | 
			
		||||
 | 
			
		||||
    def __init__(self, spec):
 | 
			
		||||
        self.spec = spec
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def _reader(self):
 | 
			
		||||
        with suppress(AttributeError):
 | 
			
		||||
            return self.spec.loader.get_resource_reader(self.spec.name)
 | 
			
		||||
 | 
			
		||||
    def _native(self):
 | 
			
		||||
        """
 | 
			
		||||
        Return the native reader if it supports files().
 | 
			
		||||
        """
 | 
			
		||||
        reader = self._reader
 | 
			
		||||
        return reader if hasattr(reader, 'files') else self
 | 
			
		||||
 | 
			
		||||
    def __getattr__(self, attr):
 | 
			
		||||
        return getattr(self._reader, attr)
 | 
			
		||||
 | 
			
		||||
    def files(self):
 | 
			
		||||
        return CompatibilityFiles.SpecPath(self.spec, self._reader)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def wrap_spec(package):
 | 
			
		||||
    """
 | 
			
		||||
    Construct a package spec with traversable compatibility
 | 
			
		||||
    on the spec/loader/reader.
 | 
			
		||||
    """
 | 
			
		||||
    return SpecLoaderAdapter(package.__spec__, TraversableResourcesLoader)
 | 
			
		||||
@@ -0,0 +1,104 @@
 | 
			
		||||
import os
 | 
			
		||||
import pathlib
 | 
			
		||||
import tempfile
 | 
			
		||||
import functools
 | 
			
		||||
import contextlib
 | 
			
		||||
import types
 | 
			
		||||
import importlib
 | 
			
		||||
 | 
			
		||||
from typing import Union, Optional
 | 
			
		||||
from .abc import ResourceReader, Traversable
 | 
			
		||||
 | 
			
		||||
from ._compat import wrap_spec
 | 
			
		||||
 | 
			
		||||
Package = Union[types.ModuleType, str]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def files(package):
 | 
			
		||||
    # type: (Package) -> Traversable
 | 
			
		||||
    """
 | 
			
		||||
    Get a Traversable resource from a package
 | 
			
		||||
    """
 | 
			
		||||
    return from_package(get_package(package))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_resource_reader(package):
 | 
			
		||||
    # type: (types.ModuleType) -> Optional[ResourceReader]
 | 
			
		||||
    """
 | 
			
		||||
    Return the package's loader if it's a ResourceReader.
 | 
			
		||||
    """
 | 
			
		||||
    # We can't use
 | 
			
		||||
    # a issubclass() check here because apparently abc.'s __subclasscheck__()
 | 
			
		||||
    # hook wants to create a weak reference to the object, but
 | 
			
		||||
    # zipimport.zipimporter does not support weak references, resulting in a
 | 
			
		||||
    # TypeError.  That seems terrible.
 | 
			
		||||
    spec = package.__spec__
 | 
			
		||||
    reader = getattr(spec.loader, 'get_resource_reader', None)  # type: ignore
 | 
			
		||||
    if reader is None:
 | 
			
		||||
        return None
 | 
			
		||||
    return reader(spec.name)  # type: ignore
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def resolve(cand):
 | 
			
		||||
    # type: (Package) -> types.ModuleType
 | 
			
		||||
    return cand if isinstance(cand, types.ModuleType) else importlib.import_module(cand)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_package(package):
 | 
			
		||||
    # type: (Package) -> types.ModuleType
 | 
			
		||||
    """Take a package name or module object and return the module.
 | 
			
		||||
 | 
			
		||||
    Raise an exception if the resolved module is not a package.
 | 
			
		||||
    """
 | 
			
		||||
    resolved = resolve(package)
 | 
			
		||||
    if wrap_spec(resolved).submodule_search_locations is None:
 | 
			
		||||
        raise TypeError(f'{package!r} is not a package')
 | 
			
		||||
    return resolved
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def from_package(package):
 | 
			
		||||
    """
 | 
			
		||||
    Return a Traversable object for the given package.
 | 
			
		||||
 | 
			
		||||
    """
 | 
			
		||||
    spec = wrap_spec(package)
 | 
			
		||||
    reader = spec.loader.get_resource_reader(spec.name)
 | 
			
		||||
    return reader.files()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@contextlib.contextmanager
 | 
			
		||||
def _tempfile(reader, suffix=''):
 | 
			
		||||
    # Not using tempfile.NamedTemporaryFile as it leads to deeper 'try'
 | 
			
		||||
    # blocks due to the need to close the temporary file to work on Windows
 | 
			
		||||
    # properly.
 | 
			
		||||
    fd, raw_path = tempfile.mkstemp(suffix=suffix)
 | 
			
		||||
    try:
 | 
			
		||||
        try:
 | 
			
		||||
            os.write(fd, reader())
 | 
			
		||||
        finally:
 | 
			
		||||
            os.close(fd)
 | 
			
		||||
        del reader
 | 
			
		||||
        yield pathlib.Path(raw_path)
 | 
			
		||||
    finally:
 | 
			
		||||
        try:
 | 
			
		||||
            os.remove(raw_path)
 | 
			
		||||
        except FileNotFoundError:
 | 
			
		||||
            pass
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@functools.singledispatch
 | 
			
		||||
def as_file(path):
 | 
			
		||||
    """
 | 
			
		||||
    Given a Traversable object, return that object as a
 | 
			
		||||
    path on the local file system in a context manager.
 | 
			
		||||
    """
 | 
			
		||||
    return _tempfile(path.read_bytes, suffix=path.name)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@as_file.register(pathlib.Path)
 | 
			
		||||
@contextlib.contextmanager
 | 
			
		||||
def _(path):
 | 
			
		||||
    """
 | 
			
		||||
    Degenerate behavior for pathlib.Path objects.
 | 
			
		||||
    """
 | 
			
		||||
    yield path
 | 
			
		||||
@@ -0,0 +1,98 @@
 | 
			
		||||
# flake8: noqa
 | 
			
		||||
 | 
			
		||||
import abc
 | 
			
		||||
import sys
 | 
			
		||||
import pathlib
 | 
			
		||||
from contextlib import suppress
 | 
			
		||||
 | 
			
		||||
if sys.version_info >= (3, 10):
 | 
			
		||||
    from zipfile import Path as ZipPath  # type: ignore
 | 
			
		||||
else:
 | 
			
		||||
    from ..zipp import Path as ZipPath  # type: ignore
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
try:
 | 
			
		||||
    from typing import runtime_checkable  # type: ignore
 | 
			
		||||
except ImportError:
 | 
			
		||||
 | 
			
		||||
    def runtime_checkable(cls):  # type: ignore
 | 
			
		||||
        return cls
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
try:
 | 
			
		||||
    from typing import Protocol  # type: ignore
 | 
			
		||||
except ImportError:
 | 
			
		||||
    Protocol = abc.ABC  # type: ignore
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class TraversableResourcesLoader:
 | 
			
		||||
    """
 | 
			
		||||
    Adapt loaders to provide TraversableResources and other
 | 
			
		||||
    compatibility.
 | 
			
		||||
 | 
			
		||||
    Used primarily for Python 3.9 and earlier where the native
 | 
			
		||||
    loaders do not yet implement TraversableResources.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    def __init__(self, spec):
 | 
			
		||||
        self.spec = spec
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def path(self):
 | 
			
		||||
        return self.spec.origin
 | 
			
		||||
 | 
			
		||||
    def get_resource_reader(self, name):
 | 
			
		||||
        from . import readers, _adapters
 | 
			
		||||
 | 
			
		||||
        def _zip_reader(spec):
 | 
			
		||||
            with suppress(AttributeError):
 | 
			
		||||
                return readers.ZipReader(spec.loader, spec.name)
 | 
			
		||||
 | 
			
		||||
        def _namespace_reader(spec):
 | 
			
		||||
            with suppress(AttributeError, ValueError):
 | 
			
		||||
                return readers.NamespaceReader(spec.submodule_search_locations)
 | 
			
		||||
 | 
			
		||||
        def _available_reader(spec):
 | 
			
		||||
            with suppress(AttributeError):
 | 
			
		||||
                return spec.loader.get_resource_reader(spec.name)
 | 
			
		||||
 | 
			
		||||
        def _native_reader(spec):
 | 
			
		||||
            reader = _available_reader(spec)
 | 
			
		||||
            return reader if hasattr(reader, 'files') else None
 | 
			
		||||
 | 
			
		||||
        def _file_reader(spec):
 | 
			
		||||
            try:
 | 
			
		||||
                path = pathlib.Path(self.path)
 | 
			
		||||
            except TypeError:
 | 
			
		||||
                return None
 | 
			
		||||
            if path.exists():
 | 
			
		||||
                return readers.FileReader(self)
 | 
			
		||||
 | 
			
		||||
        return (
 | 
			
		||||
            # native reader if it supplies 'files'
 | 
			
		||||
            _native_reader(self.spec)
 | 
			
		||||
            or
 | 
			
		||||
            # local ZipReader if a zip module
 | 
			
		||||
            _zip_reader(self.spec)
 | 
			
		||||
            or
 | 
			
		||||
            # local NamespaceReader if a namespace module
 | 
			
		||||
            _namespace_reader(self.spec)
 | 
			
		||||
            or
 | 
			
		||||
            # local FileReader
 | 
			
		||||
            _file_reader(self.spec)
 | 
			
		||||
            # fallback - adapt the spec ResourceReader to TraversableReader
 | 
			
		||||
            or _adapters.CompatibilityFiles(self.spec)
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def wrap_spec(package):
 | 
			
		||||
    """
 | 
			
		||||
    Construct a package spec with traversable compatibility
 | 
			
		||||
    on the spec/loader/reader.
 | 
			
		||||
 | 
			
		||||
    Supersedes _adapters.wrap_spec to use TraversableResourcesLoader
 | 
			
		||||
    from above for older Python compatibility (<3.10).
 | 
			
		||||
    """
 | 
			
		||||
    from . import _adapters
 | 
			
		||||
 | 
			
		||||
    return _adapters.SpecLoaderAdapter(package.__spec__, TraversableResourcesLoader)
 | 
			
		||||
@@ -0,0 +1,35 @@
 | 
			
		||||
from itertools import filterfalse
 | 
			
		||||
 | 
			
		||||
from typing import (
 | 
			
		||||
    Callable,
 | 
			
		||||
    Iterable,
 | 
			
		||||
    Iterator,
 | 
			
		||||
    Optional,
 | 
			
		||||
    Set,
 | 
			
		||||
    TypeVar,
 | 
			
		||||
    Union,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
# Type and type variable definitions
 | 
			
		||||
_T = TypeVar('_T')
 | 
			
		||||
_U = TypeVar('_U')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def unique_everseen(
 | 
			
		||||
    iterable: Iterable[_T], key: Optional[Callable[[_T], _U]] = None
 | 
			
		||||
) -> Iterator[_T]:
 | 
			
		||||
    "List unique elements, preserving order. Remember all elements ever seen."
 | 
			
		||||
    # unique_everseen('AAAABBBCCDAABBB') --> A B C D
 | 
			
		||||
    # unique_everseen('ABBCcAD', str.lower) --> A B C D
 | 
			
		||||
    seen: Set[Union[_T, _U]] = set()
 | 
			
		||||
    seen_add = seen.add
 | 
			
		||||
    if key is None:
 | 
			
		||||
        for element in filterfalse(seen.__contains__, iterable):
 | 
			
		||||
            seen_add(element)
 | 
			
		||||
            yield element
 | 
			
		||||
    else:
 | 
			
		||||
        for element in iterable:
 | 
			
		||||
            k = key(element)
 | 
			
		||||
            if k not in seen:
 | 
			
		||||
                seen_add(k)
 | 
			
		||||
                yield element
 | 
			
		||||
@@ -0,0 +1,121 @@
 | 
			
		||||
import functools
 | 
			
		||||
import os
 | 
			
		||||
import pathlib
 | 
			
		||||
import types
 | 
			
		||||
import warnings
 | 
			
		||||
 | 
			
		||||
from typing import Union, Iterable, ContextManager, BinaryIO, TextIO, Any
 | 
			
		||||
 | 
			
		||||
from . import _common
 | 
			
		||||
 | 
			
		||||
Package = Union[types.ModuleType, str]
 | 
			
		||||
Resource = str
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def deprecated(func):
 | 
			
		||||
    @functools.wraps(func)
 | 
			
		||||
    def wrapper(*args, **kwargs):
 | 
			
		||||
        warnings.warn(
 | 
			
		||||
            f"{func.__name__} is deprecated. Use files() instead. "
 | 
			
		||||
            "Refer to https://importlib-resources.readthedocs.io"
 | 
			
		||||
            "/en/latest/using.html#migrating-from-legacy for migration advice.",
 | 
			
		||||
            DeprecationWarning,
 | 
			
		||||
            stacklevel=2,
 | 
			
		||||
        )
 | 
			
		||||
        return func(*args, **kwargs)
 | 
			
		||||
 | 
			
		||||
    return wrapper
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def normalize_path(path):
 | 
			
		||||
    # type: (Any) -> str
 | 
			
		||||
    """Normalize a path by ensuring it is a string.
 | 
			
		||||
 | 
			
		||||
    If the resulting string contains path separators, an exception is raised.
 | 
			
		||||
    """
 | 
			
		||||
    str_path = str(path)
 | 
			
		||||
    parent, file_name = os.path.split(str_path)
 | 
			
		||||
    if parent:
 | 
			
		||||
        raise ValueError(f'{path!r} must be only a file name')
 | 
			
		||||
    return file_name
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@deprecated
 | 
			
		||||
def open_binary(package: Package, resource: Resource) -> BinaryIO:
 | 
			
		||||
    """Return a file-like object opened for binary reading of the resource."""
 | 
			
		||||
    return (_common.files(package) / normalize_path(resource)).open('rb')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@deprecated
 | 
			
		||||
def read_binary(package: Package, resource: Resource) -> bytes:
 | 
			
		||||
    """Return the binary contents of the resource."""
 | 
			
		||||
    return (_common.files(package) / normalize_path(resource)).read_bytes()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@deprecated
 | 
			
		||||
def open_text(
 | 
			
		||||
    package: Package,
 | 
			
		||||
    resource: Resource,
 | 
			
		||||
    encoding: str = 'utf-8',
 | 
			
		||||
    errors: str = 'strict',
 | 
			
		||||
) -> TextIO:
 | 
			
		||||
    """Return a file-like object opened for text reading of the resource."""
 | 
			
		||||
    return (_common.files(package) / normalize_path(resource)).open(
 | 
			
		||||
        'r', encoding=encoding, errors=errors
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@deprecated
 | 
			
		||||
def read_text(
 | 
			
		||||
    package: Package,
 | 
			
		||||
    resource: Resource,
 | 
			
		||||
    encoding: str = 'utf-8',
 | 
			
		||||
    errors: str = 'strict',
 | 
			
		||||
) -> str:
 | 
			
		||||
    """Return the decoded string of the resource.
 | 
			
		||||
 | 
			
		||||
    The decoding-related arguments have the same semantics as those of
 | 
			
		||||
    bytes.decode().
 | 
			
		||||
    """
 | 
			
		||||
    with open_text(package, resource, encoding, errors) as fp:
 | 
			
		||||
        return fp.read()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@deprecated
 | 
			
		||||
def contents(package: Package) -> Iterable[str]:
 | 
			
		||||
    """Return an iterable of entries in `package`.
 | 
			
		||||
 | 
			
		||||
    Note that not all entries are resources.  Specifically, directories are
 | 
			
		||||
    not considered resources.  Use `is_resource()` on each entry returned here
 | 
			
		||||
    to check if it is a resource or not.
 | 
			
		||||
    """
 | 
			
		||||
    return [path.name for path in _common.files(package).iterdir()]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@deprecated
 | 
			
		||||
def is_resource(package: Package, name: str) -> bool:
 | 
			
		||||
    """True if `name` is a resource inside `package`.
 | 
			
		||||
 | 
			
		||||
    Directories are *not* resources.
 | 
			
		||||
    """
 | 
			
		||||
    resource = normalize_path(name)
 | 
			
		||||
    return any(
 | 
			
		||||
        traversable.name == resource and traversable.is_file()
 | 
			
		||||
        for traversable in _common.files(package).iterdir()
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@deprecated
 | 
			
		||||
def path(
 | 
			
		||||
    package: Package,
 | 
			
		||||
    resource: Resource,
 | 
			
		||||
) -> ContextManager[pathlib.Path]:
 | 
			
		||||
    """A context manager providing a file path object to the resource.
 | 
			
		||||
 | 
			
		||||
    If the resource does not already exist on its own on the file system,
 | 
			
		||||
    a temporary file will be created. If the file was created, the file
 | 
			
		||||
    will be deleted upon exiting the context manager (no exception is
 | 
			
		||||
    raised if the file was deleted prior to the context manager
 | 
			
		||||
    exiting).
 | 
			
		||||
    """
 | 
			
		||||
    return _common.as_file(_common.files(package) / normalize_path(resource))
 | 
			
		||||
@@ -0,0 +1,137 @@
 | 
			
		||||
import abc
 | 
			
		||||
from typing import BinaryIO, Iterable, Text
 | 
			
		||||
 | 
			
		||||
from ._compat import runtime_checkable, Protocol
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class ResourceReader(metaclass=abc.ABCMeta):
 | 
			
		||||
    """Abstract base class for loaders to provide resource reading support."""
 | 
			
		||||
 | 
			
		||||
    @abc.abstractmethod
 | 
			
		||||
    def open_resource(self, resource: Text) -> BinaryIO:
 | 
			
		||||
        """Return an opened, file-like object for binary reading.
 | 
			
		||||
 | 
			
		||||
        The 'resource' argument is expected to represent only a file name.
 | 
			
		||||
        If the resource cannot be found, FileNotFoundError is raised.
 | 
			
		||||
        """
 | 
			
		||||
        # This deliberately raises FileNotFoundError instead of
 | 
			
		||||
        # NotImplementedError so that if this method is accidentally called,
 | 
			
		||||
        # it'll still do the right thing.
 | 
			
		||||
        raise FileNotFoundError
 | 
			
		||||
 | 
			
		||||
    @abc.abstractmethod
 | 
			
		||||
    def resource_path(self, resource: Text) -> Text:
 | 
			
		||||
        """Return the file system path to the specified resource.
 | 
			
		||||
 | 
			
		||||
        The 'resource' argument is expected to represent only a file name.
 | 
			
		||||
        If the resource does not exist on the file system, raise
 | 
			
		||||
        FileNotFoundError.
 | 
			
		||||
        """
 | 
			
		||||
        # This deliberately raises FileNotFoundError instead of
 | 
			
		||||
        # NotImplementedError so that if this method is accidentally called,
 | 
			
		||||
        # it'll still do the right thing.
 | 
			
		||||
        raise FileNotFoundError
 | 
			
		||||
 | 
			
		||||
    @abc.abstractmethod
 | 
			
		||||
    def is_resource(self, path: Text) -> bool:
 | 
			
		||||
        """Return True if the named 'path' is a resource.
 | 
			
		||||
 | 
			
		||||
        Files are resources, directories are not.
 | 
			
		||||
        """
 | 
			
		||||
        raise FileNotFoundError
 | 
			
		||||
 | 
			
		||||
    @abc.abstractmethod
 | 
			
		||||
    def contents(self) -> Iterable[str]:
 | 
			
		||||
        """Return an iterable of entries in `package`."""
 | 
			
		||||
        raise FileNotFoundError
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@runtime_checkable
 | 
			
		||||
class Traversable(Protocol):
 | 
			
		||||
    """
 | 
			
		||||
    An object with a subset of pathlib.Path methods suitable for
 | 
			
		||||
    traversing directories and opening files.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    @abc.abstractmethod
 | 
			
		||||
    def iterdir(self):
 | 
			
		||||
        """
 | 
			
		||||
        Yield Traversable objects in self
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
    def read_bytes(self):
 | 
			
		||||
        """
 | 
			
		||||
        Read contents of self as bytes
 | 
			
		||||
        """
 | 
			
		||||
        with self.open('rb') as strm:
 | 
			
		||||
            return strm.read()
 | 
			
		||||
 | 
			
		||||
    def read_text(self, encoding=None):
 | 
			
		||||
        """
 | 
			
		||||
        Read contents of self as text
 | 
			
		||||
        """
 | 
			
		||||
        with self.open(encoding=encoding) as strm:
 | 
			
		||||
            return strm.read()
 | 
			
		||||
 | 
			
		||||
    @abc.abstractmethod
 | 
			
		||||
    def is_dir(self) -> bool:
 | 
			
		||||
        """
 | 
			
		||||
        Return True if self is a directory
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
    @abc.abstractmethod
 | 
			
		||||
    def is_file(self) -> bool:
 | 
			
		||||
        """
 | 
			
		||||
        Return True if self is a file
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
    @abc.abstractmethod
 | 
			
		||||
    def joinpath(self, child):
 | 
			
		||||
        """
 | 
			
		||||
        Return Traversable child in self
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
    def __truediv__(self, child):
 | 
			
		||||
        """
 | 
			
		||||
        Return Traversable child in self
 | 
			
		||||
        """
 | 
			
		||||
        return self.joinpath(child)
 | 
			
		||||
 | 
			
		||||
    @abc.abstractmethod
 | 
			
		||||
    def open(self, mode='r', *args, **kwargs):
 | 
			
		||||
        """
 | 
			
		||||
        mode may be 'r' or 'rb' to open as text or binary. Return a handle
 | 
			
		||||
        suitable for reading (same as pathlib.Path.open).
 | 
			
		||||
 | 
			
		||||
        When opening as text, accepts encoding parameters such as those
 | 
			
		||||
        accepted by io.TextIOWrapper.
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
    @abc.abstractproperty
 | 
			
		||||
    def name(self) -> str:
 | 
			
		||||
        """
 | 
			
		||||
        The base name of this object without any parent references.
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class TraversableResources(ResourceReader):
 | 
			
		||||
    """
 | 
			
		||||
    The required interface for providing traversable
 | 
			
		||||
    resources.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    @abc.abstractmethod
 | 
			
		||||
    def files(self):
 | 
			
		||||
        """Return a Traversable object for the loaded package."""
 | 
			
		||||
 | 
			
		||||
    def open_resource(self, resource):
 | 
			
		||||
        return self.files().joinpath(resource).open('rb')
 | 
			
		||||
 | 
			
		||||
    def resource_path(self, resource):
 | 
			
		||||
        raise FileNotFoundError(resource)
 | 
			
		||||
 | 
			
		||||
    def is_resource(self, path):
 | 
			
		||||
        return self.files().joinpath(path).is_file()
 | 
			
		||||
 | 
			
		||||
    def contents(self):
 | 
			
		||||
        return (item.name for item in self.files().iterdir())
 | 
			
		||||
@@ -0,0 +1,122 @@
 | 
			
		||||
import collections
 | 
			
		||||
import pathlib
 | 
			
		||||
import operator
 | 
			
		||||
 | 
			
		||||
from . import abc
 | 
			
		||||
 | 
			
		||||
from ._itertools import unique_everseen
 | 
			
		||||
from ._compat import ZipPath
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def remove_duplicates(items):
 | 
			
		||||
    return iter(collections.OrderedDict.fromkeys(items))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class FileReader(abc.TraversableResources):
 | 
			
		||||
    def __init__(self, loader):
 | 
			
		||||
        self.path = pathlib.Path(loader.path).parent
 | 
			
		||||
 | 
			
		||||
    def resource_path(self, resource):
 | 
			
		||||
        """
 | 
			
		||||
        Return the file system path to prevent
 | 
			
		||||
        `resources.path()` from creating a temporary
 | 
			
		||||
        copy.
 | 
			
		||||
        """
 | 
			
		||||
        return str(self.path.joinpath(resource))
 | 
			
		||||
 | 
			
		||||
    def files(self):
 | 
			
		||||
        return self.path
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class ZipReader(abc.TraversableResources):
 | 
			
		||||
    def __init__(self, loader, module):
 | 
			
		||||
        _, _, name = module.rpartition('.')
 | 
			
		||||
        self.prefix = loader.prefix.replace('\\', '/') + name + '/'
 | 
			
		||||
        self.archive = loader.archive
 | 
			
		||||
 | 
			
		||||
    def open_resource(self, resource):
 | 
			
		||||
        try:
 | 
			
		||||
            return super().open_resource(resource)
 | 
			
		||||
        except KeyError as exc:
 | 
			
		||||
            raise FileNotFoundError(exc.args[0])
 | 
			
		||||
 | 
			
		||||
    def is_resource(self, path):
 | 
			
		||||
        # workaround for `zipfile.Path.is_file` returning true
 | 
			
		||||
        # for non-existent paths.
 | 
			
		||||
        target = self.files().joinpath(path)
 | 
			
		||||
        return target.is_file() and target.exists()
 | 
			
		||||
 | 
			
		||||
    def files(self):
 | 
			
		||||
        return ZipPath(self.archive, self.prefix)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class MultiplexedPath(abc.Traversable):
 | 
			
		||||
    """
 | 
			
		||||
    Given a series of Traversable objects, implement a merged
 | 
			
		||||
    version of the interface across all objects. Useful for
 | 
			
		||||
    namespace packages which may be multihomed at a single
 | 
			
		||||
    name.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    def __init__(self, *paths):
 | 
			
		||||
        self._paths = list(map(pathlib.Path, remove_duplicates(paths)))
 | 
			
		||||
        if not self._paths:
 | 
			
		||||
            message = 'MultiplexedPath must contain at least one path'
 | 
			
		||||
            raise FileNotFoundError(message)
 | 
			
		||||
        if not all(path.is_dir() for path in self._paths):
 | 
			
		||||
            raise NotADirectoryError('MultiplexedPath only supports directories')
 | 
			
		||||
 | 
			
		||||
    def iterdir(self):
 | 
			
		||||
        files = (file for path in self._paths for file in path.iterdir())
 | 
			
		||||
        return unique_everseen(files, key=operator.attrgetter('name'))
 | 
			
		||||
 | 
			
		||||
    def read_bytes(self):
 | 
			
		||||
        raise FileNotFoundError(f'{self} is not a file')
 | 
			
		||||
 | 
			
		||||
    def read_text(self, *args, **kwargs):
 | 
			
		||||
        raise FileNotFoundError(f'{self} is not a file')
 | 
			
		||||
 | 
			
		||||
    def is_dir(self):
 | 
			
		||||
        return True
 | 
			
		||||
 | 
			
		||||
    def is_file(self):
 | 
			
		||||
        return False
 | 
			
		||||
 | 
			
		||||
    def joinpath(self, child):
 | 
			
		||||
        # first try to find child in current paths
 | 
			
		||||
        for file in self.iterdir():
 | 
			
		||||
            if file.name == child:
 | 
			
		||||
                return file
 | 
			
		||||
        # if it does not exist, construct it with the first path
 | 
			
		||||
        return self._paths[0] / child
 | 
			
		||||
 | 
			
		||||
    __truediv__ = joinpath
 | 
			
		||||
 | 
			
		||||
    def open(self, *args, **kwargs):
 | 
			
		||||
        raise FileNotFoundError(f'{self} is not a file')
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def name(self):
 | 
			
		||||
        return self._paths[0].name
 | 
			
		||||
 | 
			
		||||
    def __repr__(self):
 | 
			
		||||
        paths = ', '.join(f"'{path}'" for path in self._paths)
 | 
			
		||||
        return f'MultiplexedPath({paths})'
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class NamespaceReader(abc.TraversableResources):
 | 
			
		||||
    def __init__(self, namespace_path):
 | 
			
		||||
        if 'NamespacePath' not in str(namespace_path):
 | 
			
		||||
            raise ValueError('Invalid path')
 | 
			
		||||
        self.path = MultiplexedPath(*list(namespace_path))
 | 
			
		||||
 | 
			
		||||
    def resource_path(self, resource):
 | 
			
		||||
        """
 | 
			
		||||
        Return the file system path to prevent
 | 
			
		||||
        `resources.path()` from creating a temporary
 | 
			
		||||
        copy.
 | 
			
		||||
        """
 | 
			
		||||
        return str(self.path.joinpath(resource))
 | 
			
		||||
 | 
			
		||||
    def files(self):
 | 
			
		||||
        return self.path
 | 
			
		||||
@@ -0,0 +1,116 @@
 | 
			
		||||
"""
 | 
			
		||||
Interface adapters for low-level readers.
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
import abc
 | 
			
		||||
import io
 | 
			
		||||
import itertools
 | 
			
		||||
from typing import BinaryIO, List
 | 
			
		||||
 | 
			
		||||
from .abc import Traversable, TraversableResources
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class SimpleReader(abc.ABC):
 | 
			
		||||
    """
 | 
			
		||||
    The minimum, low-level interface required from a resource
 | 
			
		||||
    provider.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    @abc.abstractproperty
 | 
			
		||||
    def package(self):
 | 
			
		||||
        # type: () -> str
 | 
			
		||||
        """
 | 
			
		||||
        The name of the package for which this reader loads resources.
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
    @abc.abstractmethod
 | 
			
		||||
    def children(self):
 | 
			
		||||
        # type: () -> List['SimpleReader']
 | 
			
		||||
        """
 | 
			
		||||
        Obtain an iterable of SimpleReader for available
 | 
			
		||||
        child containers (e.g. directories).
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
    @abc.abstractmethod
 | 
			
		||||
    def resources(self):
 | 
			
		||||
        # type: () -> List[str]
 | 
			
		||||
        """
 | 
			
		||||
        Obtain available named resources for this virtual package.
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
    @abc.abstractmethod
 | 
			
		||||
    def open_binary(self, resource):
 | 
			
		||||
        # type: (str) -> BinaryIO
 | 
			
		||||
        """
 | 
			
		||||
        Obtain a File-like for a named resource.
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def name(self):
 | 
			
		||||
        return self.package.split('.')[-1]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class ResourceHandle(Traversable):
 | 
			
		||||
    """
 | 
			
		||||
    Handle to a named resource in a ResourceReader.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    def __init__(self, parent, name):
 | 
			
		||||
        # type: (ResourceContainer, str) -> None
 | 
			
		||||
        self.parent = parent
 | 
			
		||||
        self.name = name  # type: ignore
 | 
			
		||||
 | 
			
		||||
    def is_file(self):
 | 
			
		||||
        return True
 | 
			
		||||
 | 
			
		||||
    def is_dir(self):
 | 
			
		||||
        return False
 | 
			
		||||
 | 
			
		||||
    def open(self, mode='r', *args, **kwargs):
 | 
			
		||||
        stream = self.parent.reader.open_binary(self.name)
 | 
			
		||||
        if 'b' not in mode:
 | 
			
		||||
            stream = io.TextIOWrapper(*args, **kwargs)
 | 
			
		||||
        return stream
 | 
			
		||||
 | 
			
		||||
    def joinpath(self, name):
 | 
			
		||||
        raise RuntimeError("Cannot traverse into a resource")
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class ResourceContainer(Traversable):
 | 
			
		||||
    """
 | 
			
		||||
    Traversable container for a package's resources via its reader.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    def __init__(self, reader):
 | 
			
		||||
        # type: (SimpleReader) -> None
 | 
			
		||||
        self.reader = reader
 | 
			
		||||
 | 
			
		||||
    def is_dir(self):
 | 
			
		||||
        return True
 | 
			
		||||
 | 
			
		||||
    def is_file(self):
 | 
			
		||||
        return False
 | 
			
		||||
 | 
			
		||||
    def iterdir(self):
 | 
			
		||||
        files = (ResourceHandle(self, name) for name in self.reader.resources)
 | 
			
		||||
        dirs = map(ResourceContainer, self.reader.children())
 | 
			
		||||
        return itertools.chain(files, dirs)
 | 
			
		||||
 | 
			
		||||
    def open(self, *args, **kwargs):
 | 
			
		||||
        raise IsADirectoryError()
 | 
			
		||||
 | 
			
		||||
    def joinpath(self, name):
 | 
			
		||||
        return next(
 | 
			
		||||
            traversable for traversable in self.iterdir() if traversable.name == name
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class TraversableReader(TraversableResources, SimpleReader):
 | 
			
		||||
    """
 | 
			
		||||
    A TraversableResources based on SimpleReader. Resource providers
 | 
			
		||||
    may derive from this class to provide the TraversableResources
 | 
			
		||||
    interface by supplying the SimpleReader interface.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    def files(self):
 | 
			
		||||
        return ResourceContainer(self)
 | 
			
		||||
@@ -0,0 +1,253 @@
 | 
			
		||||
import os
 | 
			
		||||
import subprocess
 | 
			
		||||
import contextlib
 | 
			
		||||
import functools
 | 
			
		||||
import tempfile
 | 
			
		||||
import shutil
 | 
			
		||||
import operator
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@contextlib.contextmanager
 | 
			
		||||
def pushd(dir):
 | 
			
		||||
    orig = os.getcwd()
 | 
			
		||||
    os.chdir(dir)
 | 
			
		||||
    try:
 | 
			
		||||
        yield dir
 | 
			
		||||
    finally:
 | 
			
		||||
        os.chdir(orig)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@contextlib.contextmanager
 | 
			
		||||
def tarball_context(url, target_dir=None, runner=None, pushd=pushd):
 | 
			
		||||
    """
 | 
			
		||||
    Get a tarball, extract it, change to that directory, yield, then
 | 
			
		||||
    clean up.
 | 
			
		||||
    `runner` is the function to invoke commands.
 | 
			
		||||
    `pushd` is a context manager for changing the directory.
 | 
			
		||||
    """
 | 
			
		||||
    if target_dir is None:
 | 
			
		||||
        target_dir = os.path.basename(url).replace('.tar.gz', '').replace('.tgz', '')
 | 
			
		||||
    if runner is None:
 | 
			
		||||
        runner = functools.partial(subprocess.check_call, shell=True)
 | 
			
		||||
    # In the tar command, use --strip-components=1 to strip the first path and
 | 
			
		||||
    #  then
 | 
			
		||||
    #  use -C to cause the files to be extracted to {target_dir}. This ensures
 | 
			
		||||
    #  that we always know where the files were extracted.
 | 
			
		||||
    runner('mkdir {target_dir}'.format(**vars()))
 | 
			
		||||
    try:
 | 
			
		||||
        getter = 'wget {url} -O -'
 | 
			
		||||
        extract = 'tar x{compression} --strip-components=1 -C {target_dir}'
 | 
			
		||||
        cmd = ' | '.join((getter, extract))
 | 
			
		||||
        runner(cmd.format(compression=infer_compression(url), **vars()))
 | 
			
		||||
        with pushd(target_dir):
 | 
			
		||||
            yield target_dir
 | 
			
		||||
    finally:
 | 
			
		||||
        runner('rm -Rf {target_dir}'.format(**vars()))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def infer_compression(url):
 | 
			
		||||
    """
 | 
			
		||||
    Given a URL or filename, infer the compression code for tar.
 | 
			
		||||
    """
 | 
			
		||||
    # cheat and just assume it's the last two characters
 | 
			
		||||
    compression_indicator = url[-2:]
 | 
			
		||||
    mapping = dict(gz='z', bz='j', xz='J')
 | 
			
		||||
    # Assume 'z' (gzip) if no match
 | 
			
		||||
    return mapping.get(compression_indicator, 'z')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@contextlib.contextmanager
 | 
			
		||||
def temp_dir(remover=shutil.rmtree):
 | 
			
		||||
    """
 | 
			
		||||
    Create a temporary directory context. Pass a custom remover
 | 
			
		||||
    to override the removal behavior.
 | 
			
		||||
    """
 | 
			
		||||
    temp_dir = tempfile.mkdtemp()
 | 
			
		||||
    try:
 | 
			
		||||
        yield temp_dir
 | 
			
		||||
    finally:
 | 
			
		||||
        remover(temp_dir)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@contextlib.contextmanager
 | 
			
		||||
def repo_context(url, branch=None, quiet=True, dest_ctx=temp_dir):
 | 
			
		||||
    """
 | 
			
		||||
    Check out the repo indicated by url.
 | 
			
		||||
 | 
			
		||||
    If dest_ctx is supplied, it should be a context manager
 | 
			
		||||
    to yield the target directory for the check out.
 | 
			
		||||
    """
 | 
			
		||||
    exe = 'git' if 'git' in url else 'hg'
 | 
			
		||||
    with dest_ctx() as repo_dir:
 | 
			
		||||
        cmd = [exe, 'clone', url, repo_dir]
 | 
			
		||||
        if branch:
 | 
			
		||||
            cmd.extend(['--branch', branch])
 | 
			
		||||
        devnull = open(os.path.devnull, 'w')
 | 
			
		||||
        stdout = devnull if quiet else None
 | 
			
		||||
        subprocess.check_call(cmd, stdout=stdout)
 | 
			
		||||
        yield repo_dir
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@contextlib.contextmanager
 | 
			
		||||
def null():
 | 
			
		||||
    yield
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class ExceptionTrap:
 | 
			
		||||
    """
 | 
			
		||||
    A context manager that will catch certain exceptions and provide an
 | 
			
		||||
    indication they occurred.
 | 
			
		||||
 | 
			
		||||
    >>> with ExceptionTrap() as trap:
 | 
			
		||||
    ...     raise Exception()
 | 
			
		||||
    >>> bool(trap)
 | 
			
		||||
    True
 | 
			
		||||
 | 
			
		||||
    >>> with ExceptionTrap() as trap:
 | 
			
		||||
    ...     pass
 | 
			
		||||
    >>> bool(trap)
 | 
			
		||||
    False
 | 
			
		||||
 | 
			
		||||
    >>> with ExceptionTrap(ValueError) as trap:
 | 
			
		||||
    ...     raise ValueError("1 + 1 is not 3")
 | 
			
		||||
    >>> bool(trap)
 | 
			
		||||
    True
 | 
			
		||||
 | 
			
		||||
    >>> with ExceptionTrap(ValueError) as trap:
 | 
			
		||||
    ...     raise Exception()
 | 
			
		||||
    Traceback (most recent call last):
 | 
			
		||||
    ...
 | 
			
		||||
    Exception
 | 
			
		||||
 | 
			
		||||
    >>> bool(trap)
 | 
			
		||||
    False
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    exc_info = None, None, None
 | 
			
		||||
 | 
			
		||||
    def __init__(self, exceptions=(Exception,)):
 | 
			
		||||
        self.exceptions = exceptions
 | 
			
		||||
 | 
			
		||||
    def __enter__(self):
 | 
			
		||||
        return self
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def type(self):
 | 
			
		||||
        return self.exc_info[0]
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def value(self):
 | 
			
		||||
        return self.exc_info[1]
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def tb(self):
 | 
			
		||||
        return self.exc_info[2]
 | 
			
		||||
 | 
			
		||||
    def __exit__(self, *exc_info):
 | 
			
		||||
        type = exc_info[0]
 | 
			
		||||
        matches = type and issubclass(type, self.exceptions)
 | 
			
		||||
        if matches:
 | 
			
		||||
            self.exc_info = exc_info
 | 
			
		||||
        return matches
 | 
			
		||||
 | 
			
		||||
    def __bool__(self):
 | 
			
		||||
        return bool(self.type)
 | 
			
		||||
 | 
			
		||||
    def raises(self, func, *, _test=bool):
 | 
			
		||||
        """
 | 
			
		||||
        Wrap func and replace the result with the truth
 | 
			
		||||
        value of the trap (True if an exception occurred).
 | 
			
		||||
 | 
			
		||||
        First, give the decorator an alias to support Python 3.8
 | 
			
		||||
        Syntax.
 | 
			
		||||
 | 
			
		||||
        >>> raises = ExceptionTrap(ValueError).raises
 | 
			
		||||
 | 
			
		||||
        Now decorate a function that always fails.
 | 
			
		||||
 | 
			
		||||
        >>> @raises
 | 
			
		||||
        ... def fail():
 | 
			
		||||
        ...     raise ValueError('failed')
 | 
			
		||||
        >>> fail()
 | 
			
		||||
        True
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
        @functools.wraps(func)
 | 
			
		||||
        def wrapper(*args, **kwargs):
 | 
			
		||||
            with ExceptionTrap(self.exceptions) as trap:
 | 
			
		||||
                func(*args, **kwargs)
 | 
			
		||||
            return _test(trap)
 | 
			
		||||
 | 
			
		||||
        return wrapper
 | 
			
		||||
 | 
			
		||||
    def passes(self, func):
 | 
			
		||||
        """
 | 
			
		||||
        Wrap func and replace the result with the truth
 | 
			
		||||
        value of the trap (True if no exception).
 | 
			
		||||
 | 
			
		||||
        First, give the decorator an alias to support Python 3.8
 | 
			
		||||
        Syntax.
 | 
			
		||||
 | 
			
		||||
        >>> passes = ExceptionTrap(ValueError).passes
 | 
			
		||||
 | 
			
		||||
        Now decorate a function that always fails.
 | 
			
		||||
 | 
			
		||||
        >>> @passes
 | 
			
		||||
        ... def fail():
 | 
			
		||||
        ...     raise ValueError('failed')
 | 
			
		||||
 | 
			
		||||
        >>> fail()
 | 
			
		||||
        False
 | 
			
		||||
        """
 | 
			
		||||
        return self.raises(func, _test=operator.not_)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class suppress(contextlib.suppress, contextlib.ContextDecorator):
 | 
			
		||||
    """
 | 
			
		||||
    A version of contextlib.suppress with decorator support.
 | 
			
		||||
 | 
			
		||||
    >>> @suppress(KeyError)
 | 
			
		||||
    ... def key_error():
 | 
			
		||||
    ...     {}['']
 | 
			
		||||
    >>> key_error()
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class on_interrupt(contextlib.ContextDecorator):
 | 
			
		||||
    """
 | 
			
		||||
    Replace a KeyboardInterrupt with SystemExit(1)
 | 
			
		||||
 | 
			
		||||
    >>> def do_interrupt():
 | 
			
		||||
    ...     raise KeyboardInterrupt()
 | 
			
		||||
    >>> on_interrupt('error')(do_interrupt)()
 | 
			
		||||
    Traceback (most recent call last):
 | 
			
		||||
    ...
 | 
			
		||||
    SystemExit: 1
 | 
			
		||||
    >>> on_interrupt('error', code=255)(do_interrupt)()
 | 
			
		||||
    Traceback (most recent call last):
 | 
			
		||||
    ...
 | 
			
		||||
    SystemExit: 255
 | 
			
		||||
    >>> on_interrupt('suppress')(do_interrupt)()
 | 
			
		||||
    >>> with __import__('pytest').raises(KeyboardInterrupt):
 | 
			
		||||
    ...     on_interrupt('ignore')(do_interrupt)()
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    def __init__(
 | 
			
		||||
        self,
 | 
			
		||||
        action='error',
 | 
			
		||||
        # py3.7 compat
 | 
			
		||||
        # /,
 | 
			
		||||
        code=1,
 | 
			
		||||
    ):
 | 
			
		||||
        self.action = action
 | 
			
		||||
        self.code = code
 | 
			
		||||
 | 
			
		||||
    def __enter__(self):
 | 
			
		||||
        return self
 | 
			
		||||
 | 
			
		||||
    def __exit__(self, exctype, excinst, exctb):
 | 
			
		||||
        if exctype is not KeyboardInterrupt or self.action == 'ignore':
 | 
			
		||||
            return
 | 
			
		||||
        elif self.action == 'error':
 | 
			
		||||
            raise SystemExit(self.code) from excinst
 | 
			
		||||
        return self.action == 'suppress'
 | 
			
		||||
@@ -0,0 +1,525 @@
 | 
			
		||||
import functools
 | 
			
		||||
import time
 | 
			
		||||
import inspect
 | 
			
		||||
import collections
 | 
			
		||||
import types
 | 
			
		||||
import itertools
 | 
			
		||||
 | 
			
		||||
import setuptools.extern.more_itertools
 | 
			
		||||
 | 
			
		||||
from typing import Callable, TypeVar
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
CallableT = TypeVar("CallableT", bound=Callable[..., object])
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def compose(*funcs):
 | 
			
		||||
    """
 | 
			
		||||
    Compose any number of unary functions into a single unary function.
 | 
			
		||||
 | 
			
		||||
    >>> import textwrap
 | 
			
		||||
    >>> expected = str.strip(textwrap.dedent(compose.__doc__))
 | 
			
		||||
    >>> strip_and_dedent = compose(str.strip, textwrap.dedent)
 | 
			
		||||
    >>> strip_and_dedent(compose.__doc__) == expected
 | 
			
		||||
    True
 | 
			
		||||
 | 
			
		||||
    Compose also allows the innermost function to take arbitrary arguments.
 | 
			
		||||
 | 
			
		||||
    >>> round_three = lambda x: round(x, ndigits=3)
 | 
			
		||||
    >>> f = compose(round_three, int.__truediv__)
 | 
			
		||||
    >>> [f(3*x, x+1) for x in range(1,10)]
 | 
			
		||||
    [1.5, 2.0, 2.25, 2.4, 2.5, 2.571, 2.625, 2.667, 2.7]
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    def compose_two(f1, f2):
 | 
			
		||||
        return lambda *args, **kwargs: f1(f2(*args, **kwargs))
 | 
			
		||||
 | 
			
		||||
    return functools.reduce(compose_two, funcs)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def method_caller(method_name, *args, **kwargs):
 | 
			
		||||
    """
 | 
			
		||||
    Return a function that will call a named method on the
 | 
			
		||||
    target object with optional positional and keyword
 | 
			
		||||
    arguments.
 | 
			
		||||
 | 
			
		||||
    >>> lower = method_caller('lower')
 | 
			
		||||
    >>> lower('MyString')
 | 
			
		||||
    'mystring'
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    def call_method(target):
 | 
			
		||||
        func = getattr(target, method_name)
 | 
			
		||||
        return func(*args, **kwargs)
 | 
			
		||||
 | 
			
		||||
    return call_method
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def once(func):
 | 
			
		||||
    """
 | 
			
		||||
    Decorate func so it's only ever called the first time.
 | 
			
		||||
 | 
			
		||||
    This decorator can ensure that an expensive or non-idempotent function
 | 
			
		||||
    will not be expensive on subsequent calls and is idempotent.
 | 
			
		||||
 | 
			
		||||
    >>> add_three = once(lambda a: a+3)
 | 
			
		||||
    >>> add_three(3)
 | 
			
		||||
    6
 | 
			
		||||
    >>> add_three(9)
 | 
			
		||||
    6
 | 
			
		||||
    >>> add_three('12')
 | 
			
		||||
    6
 | 
			
		||||
 | 
			
		||||
    To reset the stored value, simply clear the property ``saved_result``.
 | 
			
		||||
 | 
			
		||||
    >>> del add_three.saved_result
 | 
			
		||||
    >>> add_three(9)
 | 
			
		||||
    12
 | 
			
		||||
    >>> add_three(8)
 | 
			
		||||
    12
 | 
			
		||||
 | 
			
		||||
    Or invoke 'reset()' on it.
 | 
			
		||||
 | 
			
		||||
    >>> add_three.reset()
 | 
			
		||||
    >>> add_three(-3)
 | 
			
		||||
    0
 | 
			
		||||
    >>> add_three(0)
 | 
			
		||||
    0
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    @functools.wraps(func)
 | 
			
		||||
    def wrapper(*args, **kwargs):
 | 
			
		||||
        if not hasattr(wrapper, 'saved_result'):
 | 
			
		||||
            wrapper.saved_result = func(*args, **kwargs)
 | 
			
		||||
        return wrapper.saved_result
 | 
			
		||||
 | 
			
		||||
    wrapper.reset = lambda: vars(wrapper).__delitem__('saved_result')
 | 
			
		||||
    return wrapper
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def method_cache(
 | 
			
		||||
    method: CallableT,
 | 
			
		||||
    cache_wrapper: Callable[
 | 
			
		||||
        [CallableT], CallableT
 | 
			
		||||
    ] = functools.lru_cache(),  # type: ignore[assignment]
 | 
			
		||||
) -> CallableT:
 | 
			
		||||
    """
 | 
			
		||||
    Wrap lru_cache to support storing the cache data in the object instances.
 | 
			
		||||
 | 
			
		||||
    Abstracts the common paradigm where the method explicitly saves an
 | 
			
		||||
    underscore-prefixed protected property on first call and returns that
 | 
			
		||||
    subsequently.
 | 
			
		||||
 | 
			
		||||
    >>> class MyClass:
 | 
			
		||||
    ...     calls = 0
 | 
			
		||||
    ...
 | 
			
		||||
    ...     @method_cache
 | 
			
		||||
    ...     def method(self, value):
 | 
			
		||||
    ...         self.calls += 1
 | 
			
		||||
    ...         return value
 | 
			
		||||
 | 
			
		||||
    >>> a = MyClass()
 | 
			
		||||
    >>> a.method(3)
 | 
			
		||||
    3
 | 
			
		||||
    >>> for x in range(75):
 | 
			
		||||
    ...     res = a.method(x)
 | 
			
		||||
    >>> a.calls
 | 
			
		||||
    75
 | 
			
		||||
 | 
			
		||||
    Note that the apparent behavior will be exactly like that of lru_cache
 | 
			
		||||
    except that the cache is stored on each instance, so values in one
 | 
			
		||||
    instance will not flush values from another, and when an instance is
 | 
			
		||||
    deleted, so are the cached values for that instance.
 | 
			
		||||
 | 
			
		||||
    >>> b = MyClass()
 | 
			
		||||
    >>> for x in range(35):
 | 
			
		||||
    ...     res = b.method(x)
 | 
			
		||||
    >>> b.calls
 | 
			
		||||
    35
 | 
			
		||||
    >>> a.method(0)
 | 
			
		||||
    0
 | 
			
		||||
    >>> a.calls
 | 
			
		||||
    75
 | 
			
		||||
 | 
			
		||||
    Note that if method had been decorated with ``functools.lru_cache()``,
 | 
			
		||||
    a.calls would have been 76 (due to the cached value of 0 having been
 | 
			
		||||
    flushed by the 'b' instance).
 | 
			
		||||
 | 
			
		||||
    Clear the cache with ``.cache_clear()``
 | 
			
		||||
 | 
			
		||||
    >>> a.method.cache_clear()
 | 
			
		||||
 | 
			
		||||
    Same for a method that hasn't yet been called.
 | 
			
		||||
 | 
			
		||||
    >>> c = MyClass()
 | 
			
		||||
    >>> c.method.cache_clear()
 | 
			
		||||
 | 
			
		||||
    Another cache wrapper may be supplied:
 | 
			
		||||
 | 
			
		||||
    >>> cache = functools.lru_cache(maxsize=2)
 | 
			
		||||
    >>> MyClass.method2 = method_cache(lambda self: 3, cache_wrapper=cache)
 | 
			
		||||
    >>> a = MyClass()
 | 
			
		||||
    >>> a.method2()
 | 
			
		||||
    3
 | 
			
		||||
 | 
			
		||||
    Caution - do not subsequently wrap the method with another decorator, such
 | 
			
		||||
    as ``@property``, which changes the semantics of the function.
 | 
			
		||||
 | 
			
		||||
    See also
 | 
			
		||||
    http://code.activestate.com/recipes/577452-a-memoize-decorator-for-instance-methods/
 | 
			
		||||
    for another implementation and additional justification.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    def wrapper(self: object, *args: object, **kwargs: object) -> object:
 | 
			
		||||
        # it's the first call, replace the method with a cached, bound method
 | 
			
		||||
        bound_method: CallableT = types.MethodType(  # type: ignore[assignment]
 | 
			
		||||
            method, self
 | 
			
		||||
        )
 | 
			
		||||
        cached_method = cache_wrapper(bound_method)
 | 
			
		||||
        setattr(self, method.__name__, cached_method)
 | 
			
		||||
        return cached_method(*args, **kwargs)
 | 
			
		||||
 | 
			
		||||
    # Support cache clear even before cache has been created.
 | 
			
		||||
    wrapper.cache_clear = lambda: None  # type: ignore[attr-defined]
 | 
			
		||||
 | 
			
		||||
    return (  # type: ignore[return-value]
 | 
			
		||||
        _special_method_cache(method, cache_wrapper) or wrapper
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _special_method_cache(method, cache_wrapper):
 | 
			
		||||
    """
 | 
			
		||||
    Because Python treats special methods differently, it's not
 | 
			
		||||
    possible to use instance attributes to implement the cached
 | 
			
		||||
    methods.
 | 
			
		||||
 | 
			
		||||
    Instead, install the wrapper method under a different name
 | 
			
		||||
    and return a simple proxy to that wrapper.
 | 
			
		||||
 | 
			
		||||
    https://github.com/jaraco/jaraco.functools/issues/5
 | 
			
		||||
    """
 | 
			
		||||
    name = method.__name__
 | 
			
		||||
    special_names = '__getattr__', '__getitem__'
 | 
			
		||||
    if name not in special_names:
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    wrapper_name = '__cached' + name
 | 
			
		||||
 | 
			
		||||
    def proxy(self, *args, **kwargs):
 | 
			
		||||
        if wrapper_name not in vars(self):
 | 
			
		||||
            bound = types.MethodType(method, self)
 | 
			
		||||
            cache = cache_wrapper(bound)
 | 
			
		||||
            setattr(self, wrapper_name, cache)
 | 
			
		||||
        else:
 | 
			
		||||
            cache = getattr(self, wrapper_name)
 | 
			
		||||
        return cache(*args, **kwargs)
 | 
			
		||||
 | 
			
		||||
    return proxy
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def apply(transform):
 | 
			
		||||
    """
 | 
			
		||||
    Decorate a function with a transform function that is
 | 
			
		||||
    invoked on results returned from the decorated function.
 | 
			
		||||
 | 
			
		||||
    >>> @apply(reversed)
 | 
			
		||||
    ... def get_numbers(start):
 | 
			
		||||
    ...     "doc for get_numbers"
 | 
			
		||||
    ...     return range(start, start+3)
 | 
			
		||||
    >>> list(get_numbers(4))
 | 
			
		||||
    [6, 5, 4]
 | 
			
		||||
    >>> get_numbers.__doc__
 | 
			
		||||
    'doc for get_numbers'
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    def wrap(func):
 | 
			
		||||
        return functools.wraps(func)(compose(transform, func))
 | 
			
		||||
 | 
			
		||||
    return wrap
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def result_invoke(action):
 | 
			
		||||
    r"""
 | 
			
		||||
    Decorate a function with an action function that is
 | 
			
		||||
    invoked on the results returned from the decorated
 | 
			
		||||
    function (for its side-effect), then return the original
 | 
			
		||||
    result.
 | 
			
		||||
 | 
			
		||||
    >>> @result_invoke(print)
 | 
			
		||||
    ... def add_two(a, b):
 | 
			
		||||
    ...     return a + b
 | 
			
		||||
    >>> x = add_two(2, 3)
 | 
			
		||||
    5
 | 
			
		||||
    >>> x
 | 
			
		||||
    5
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    def wrap(func):
 | 
			
		||||
        @functools.wraps(func)
 | 
			
		||||
        def wrapper(*args, **kwargs):
 | 
			
		||||
            result = func(*args, **kwargs)
 | 
			
		||||
            action(result)
 | 
			
		||||
            return result
 | 
			
		||||
 | 
			
		||||
        return wrapper
 | 
			
		||||
 | 
			
		||||
    return wrap
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def call_aside(f, *args, **kwargs):
 | 
			
		||||
    """
 | 
			
		||||
    Call a function for its side effect after initialization.
 | 
			
		||||
 | 
			
		||||
    >>> @call_aside
 | 
			
		||||
    ... def func(): print("called")
 | 
			
		||||
    called
 | 
			
		||||
    >>> func()
 | 
			
		||||
    called
 | 
			
		||||
 | 
			
		||||
    Use functools.partial to pass parameters to the initial call
 | 
			
		||||
 | 
			
		||||
    >>> @functools.partial(call_aside, name='bingo')
 | 
			
		||||
    ... def func(name): print("called with", name)
 | 
			
		||||
    called with bingo
 | 
			
		||||
    """
 | 
			
		||||
    f(*args, **kwargs)
 | 
			
		||||
    return f
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Throttler:
 | 
			
		||||
    """
 | 
			
		||||
    Rate-limit a function (or other callable)
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    def __init__(self, func, max_rate=float('Inf')):
 | 
			
		||||
        if isinstance(func, Throttler):
 | 
			
		||||
            func = func.func
 | 
			
		||||
        self.func = func
 | 
			
		||||
        self.max_rate = max_rate
 | 
			
		||||
        self.reset()
 | 
			
		||||
 | 
			
		||||
    def reset(self):
 | 
			
		||||
        self.last_called = 0
 | 
			
		||||
 | 
			
		||||
    def __call__(self, *args, **kwargs):
 | 
			
		||||
        self._wait()
 | 
			
		||||
        return self.func(*args, **kwargs)
 | 
			
		||||
 | 
			
		||||
    def _wait(self):
 | 
			
		||||
        "ensure at least 1/max_rate seconds from last call"
 | 
			
		||||
        elapsed = time.time() - self.last_called
 | 
			
		||||
        must_wait = 1 / self.max_rate - elapsed
 | 
			
		||||
        time.sleep(max(0, must_wait))
 | 
			
		||||
        self.last_called = time.time()
 | 
			
		||||
 | 
			
		||||
    def __get__(self, obj, type=None):
 | 
			
		||||
        return first_invoke(self._wait, functools.partial(self.func, obj))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def first_invoke(func1, func2):
 | 
			
		||||
    """
 | 
			
		||||
    Return a function that when invoked will invoke func1 without
 | 
			
		||||
    any parameters (for its side-effect) and then invoke func2
 | 
			
		||||
    with whatever parameters were passed, returning its result.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    def wrapper(*args, **kwargs):
 | 
			
		||||
        func1()
 | 
			
		||||
        return func2(*args, **kwargs)
 | 
			
		||||
 | 
			
		||||
    return wrapper
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def retry_call(func, cleanup=lambda: None, retries=0, trap=()):
 | 
			
		||||
    """
 | 
			
		||||
    Given a callable func, trap the indicated exceptions
 | 
			
		||||
    for up to 'retries' times, invoking cleanup on the
 | 
			
		||||
    exception. On the final attempt, allow any exceptions
 | 
			
		||||
    to propagate.
 | 
			
		||||
    """
 | 
			
		||||
    attempts = itertools.count() if retries == float('inf') else range(retries)
 | 
			
		||||
    for attempt in attempts:
 | 
			
		||||
        try:
 | 
			
		||||
            return func()
 | 
			
		||||
        except trap:
 | 
			
		||||
            cleanup()
 | 
			
		||||
 | 
			
		||||
    return func()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def retry(*r_args, **r_kwargs):
 | 
			
		||||
    """
 | 
			
		||||
    Decorator wrapper for retry_call. Accepts arguments to retry_call
 | 
			
		||||
    except func and then returns a decorator for the decorated function.
 | 
			
		||||
 | 
			
		||||
    Ex:
 | 
			
		||||
 | 
			
		||||
    >>> @retry(retries=3)
 | 
			
		||||
    ... def my_func(a, b):
 | 
			
		||||
    ...     "this is my funk"
 | 
			
		||||
    ...     print(a, b)
 | 
			
		||||
    >>> my_func.__doc__
 | 
			
		||||
    'this is my funk'
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    def decorate(func):
 | 
			
		||||
        @functools.wraps(func)
 | 
			
		||||
        def wrapper(*f_args, **f_kwargs):
 | 
			
		||||
            bound = functools.partial(func, *f_args, **f_kwargs)
 | 
			
		||||
            return retry_call(bound, *r_args, **r_kwargs)
 | 
			
		||||
 | 
			
		||||
        return wrapper
 | 
			
		||||
 | 
			
		||||
    return decorate
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def print_yielded(func):
 | 
			
		||||
    """
 | 
			
		||||
    Convert a generator into a function that prints all yielded elements
 | 
			
		||||
 | 
			
		||||
    >>> @print_yielded
 | 
			
		||||
    ... def x():
 | 
			
		||||
    ...     yield 3; yield None
 | 
			
		||||
    >>> x()
 | 
			
		||||
    3
 | 
			
		||||
    None
 | 
			
		||||
    """
 | 
			
		||||
    print_all = functools.partial(map, print)
 | 
			
		||||
    print_results = compose(more_itertools.consume, print_all, func)
 | 
			
		||||
    return functools.wraps(func)(print_results)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def pass_none(func):
 | 
			
		||||
    """
 | 
			
		||||
    Wrap func so it's not called if its first param is None
 | 
			
		||||
 | 
			
		||||
    >>> print_text = pass_none(print)
 | 
			
		||||
    >>> print_text('text')
 | 
			
		||||
    text
 | 
			
		||||
    >>> print_text(None)
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    @functools.wraps(func)
 | 
			
		||||
    def wrapper(param, *args, **kwargs):
 | 
			
		||||
        if param is not None:
 | 
			
		||||
            return func(param, *args, **kwargs)
 | 
			
		||||
 | 
			
		||||
    return wrapper
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def assign_params(func, namespace):
 | 
			
		||||
    """
 | 
			
		||||
    Assign parameters from namespace where func solicits.
 | 
			
		||||
 | 
			
		||||
    >>> def func(x, y=3):
 | 
			
		||||
    ...     print(x, y)
 | 
			
		||||
    >>> assigned = assign_params(func, dict(x=2, z=4))
 | 
			
		||||
    >>> assigned()
 | 
			
		||||
    2 3
 | 
			
		||||
 | 
			
		||||
    The usual errors are raised if a function doesn't receive
 | 
			
		||||
    its required parameters:
 | 
			
		||||
 | 
			
		||||
    >>> assigned = assign_params(func, dict(y=3, z=4))
 | 
			
		||||
    >>> assigned()
 | 
			
		||||
    Traceback (most recent call last):
 | 
			
		||||
    TypeError: func() ...argument...
 | 
			
		||||
 | 
			
		||||
    It even works on methods:
 | 
			
		||||
 | 
			
		||||
    >>> class Handler:
 | 
			
		||||
    ...     def meth(self, arg):
 | 
			
		||||
    ...         print(arg)
 | 
			
		||||
    >>> assign_params(Handler().meth, dict(arg='crystal', foo='clear'))()
 | 
			
		||||
    crystal
 | 
			
		||||
    """
 | 
			
		||||
    sig = inspect.signature(func)
 | 
			
		||||
    params = sig.parameters.keys()
 | 
			
		||||
    call_ns = {k: namespace[k] for k in params if k in namespace}
 | 
			
		||||
    return functools.partial(func, **call_ns)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def save_method_args(method):
 | 
			
		||||
    """
 | 
			
		||||
    Wrap a method such that when it is called, the args and kwargs are
 | 
			
		||||
    saved on the method.
 | 
			
		||||
 | 
			
		||||
    >>> class MyClass:
 | 
			
		||||
    ...     @save_method_args
 | 
			
		||||
    ...     def method(self, a, b):
 | 
			
		||||
    ...         print(a, b)
 | 
			
		||||
    >>> my_ob = MyClass()
 | 
			
		||||
    >>> my_ob.method(1, 2)
 | 
			
		||||
    1 2
 | 
			
		||||
    >>> my_ob._saved_method.args
 | 
			
		||||
    (1, 2)
 | 
			
		||||
    >>> my_ob._saved_method.kwargs
 | 
			
		||||
    {}
 | 
			
		||||
    >>> my_ob.method(a=3, b='foo')
 | 
			
		||||
    3 foo
 | 
			
		||||
    >>> my_ob._saved_method.args
 | 
			
		||||
    ()
 | 
			
		||||
    >>> my_ob._saved_method.kwargs == dict(a=3, b='foo')
 | 
			
		||||
    True
 | 
			
		||||
 | 
			
		||||
    The arguments are stored on the instance, allowing for
 | 
			
		||||
    different instance to save different args.
 | 
			
		||||
 | 
			
		||||
    >>> your_ob = MyClass()
 | 
			
		||||
    >>> your_ob.method({str('x'): 3}, b=[4])
 | 
			
		||||
    {'x': 3} [4]
 | 
			
		||||
    >>> your_ob._saved_method.args
 | 
			
		||||
    ({'x': 3},)
 | 
			
		||||
    >>> my_ob._saved_method.args
 | 
			
		||||
    ()
 | 
			
		||||
    """
 | 
			
		||||
    args_and_kwargs = collections.namedtuple('args_and_kwargs', 'args kwargs')
 | 
			
		||||
 | 
			
		||||
    @functools.wraps(method)
 | 
			
		||||
    def wrapper(self, *args, **kwargs):
 | 
			
		||||
        attr_name = '_saved_' + method.__name__
 | 
			
		||||
        attr = args_and_kwargs(args, kwargs)
 | 
			
		||||
        setattr(self, attr_name, attr)
 | 
			
		||||
        return method(self, *args, **kwargs)
 | 
			
		||||
 | 
			
		||||
    return wrapper
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def except_(*exceptions, replace=None, use=None):
 | 
			
		||||
    """
 | 
			
		||||
    Replace the indicated exceptions, if raised, with the indicated
 | 
			
		||||
    literal replacement or evaluated expression (if present).
 | 
			
		||||
 | 
			
		||||
    >>> safe_int = except_(ValueError)(int)
 | 
			
		||||
    >>> safe_int('five')
 | 
			
		||||
    >>> safe_int('5')
 | 
			
		||||
    5
 | 
			
		||||
 | 
			
		||||
    Specify a literal replacement with ``replace``.
 | 
			
		||||
 | 
			
		||||
    >>> safe_int_r = except_(ValueError, replace=0)(int)
 | 
			
		||||
    >>> safe_int_r('five')
 | 
			
		||||
    0
 | 
			
		||||
 | 
			
		||||
    Provide an expression to ``use`` to pass through particular parameters.
 | 
			
		||||
 | 
			
		||||
    >>> safe_int_pt = except_(ValueError, use='args[0]')(int)
 | 
			
		||||
    >>> safe_int_pt('five')
 | 
			
		||||
    'five'
 | 
			
		||||
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    def decorate(func):
 | 
			
		||||
        @functools.wraps(func)
 | 
			
		||||
        def wrapper(*args, **kwargs):
 | 
			
		||||
            try:
 | 
			
		||||
                return func(*args, **kwargs)
 | 
			
		||||
            except exceptions:
 | 
			
		||||
                try:
 | 
			
		||||
                    return eval(use)
 | 
			
		||||
                except TypeError:
 | 
			
		||||
                    return replace
 | 
			
		||||
 | 
			
		||||
        return wrapper
 | 
			
		||||
 | 
			
		||||
    return decorate
 | 
			
		||||
@@ -0,0 +1,599 @@
 | 
			
		||||
import re
 | 
			
		||||
import itertools
 | 
			
		||||
import textwrap
 | 
			
		||||
import functools
 | 
			
		||||
 | 
			
		||||
try:
 | 
			
		||||
    from importlib.resources import files  # type: ignore
 | 
			
		||||
except ImportError:  # pragma: nocover
 | 
			
		||||
    from setuptools.extern.importlib_resources import files  # type: ignore
 | 
			
		||||
 | 
			
		||||
from setuptools.extern.jaraco.functools import compose, method_cache
 | 
			
		||||
from setuptools.extern.jaraco.context import ExceptionTrap
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def substitution(old, new):
 | 
			
		||||
    """
 | 
			
		||||
    Return a function that will perform a substitution on a string
 | 
			
		||||
    """
 | 
			
		||||
    return lambda s: s.replace(old, new)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def multi_substitution(*substitutions):
 | 
			
		||||
    """
 | 
			
		||||
    Take a sequence of pairs specifying substitutions, and create
 | 
			
		||||
    a function that performs those substitutions.
 | 
			
		||||
 | 
			
		||||
    >>> multi_substitution(('foo', 'bar'), ('bar', 'baz'))('foo')
 | 
			
		||||
    'baz'
 | 
			
		||||
    """
 | 
			
		||||
    substitutions = itertools.starmap(substitution, substitutions)
 | 
			
		||||
    # compose function applies last function first, so reverse the
 | 
			
		||||
    #  substitutions to get the expected order.
 | 
			
		||||
    substitutions = reversed(tuple(substitutions))
 | 
			
		||||
    return compose(*substitutions)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class FoldedCase(str):
 | 
			
		||||
    """
 | 
			
		||||
    A case insensitive string class; behaves just like str
 | 
			
		||||
    except compares equal when the only variation is case.
 | 
			
		||||
 | 
			
		||||
    >>> s = FoldedCase('hello world')
 | 
			
		||||
 | 
			
		||||
    >>> s == 'Hello World'
 | 
			
		||||
    True
 | 
			
		||||
 | 
			
		||||
    >>> 'Hello World' == s
 | 
			
		||||
    True
 | 
			
		||||
 | 
			
		||||
    >>> s != 'Hello World'
 | 
			
		||||
    False
 | 
			
		||||
 | 
			
		||||
    >>> s.index('O')
 | 
			
		||||
    4
 | 
			
		||||
 | 
			
		||||
    >>> s.split('O')
 | 
			
		||||
    ['hell', ' w', 'rld']
 | 
			
		||||
 | 
			
		||||
    >>> sorted(map(FoldedCase, ['GAMMA', 'alpha', 'Beta']))
 | 
			
		||||
    ['alpha', 'Beta', 'GAMMA']
 | 
			
		||||
 | 
			
		||||
    Sequence membership is straightforward.
 | 
			
		||||
 | 
			
		||||
    >>> "Hello World" in [s]
 | 
			
		||||
    True
 | 
			
		||||
    >>> s in ["Hello World"]
 | 
			
		||||
    True
 | 
			
		||||
 | 
			
		||||
    You may test for set inclusion, but candidate and elements
 | 
			
		||||
    must both be folded.
 | 
			
		||||
 | 
			
		||||
    >>> FoldedCase("Hello World") in {s}
 | 
			
		||||
    True
 | 
			
		||||
    >>> s in {FoldedCase("Hello World")}
 | 
			
		||||
    True
 | 
			
		||||
 | 
			
		||||
    String inclusion works as long as the FoldedCase object
 | 
			
		||||
    is on the right.
 | 
			
		||||
 | 
			
		||||
    >>> "hello" in FoldedCase("Hello World")
 | 
			
		||||
    True
 | 
			
		||||
 | 
			
		||||
    But not if the FoldedCase object is on the left:
 | 
			
		||||
 | 
			
		||||
    >>> FoldedCase('hello') in 'Hello World'
 | 
			
		||||
    False
 | 
			
		||||
 | 
			
		||||
    In that case, use ``in_``:
 | 
			
		||||
 | 
			
		||||
    >>> FoldedCase('hello').in_('Hello World')
 | 
			
		||||
    True
 | 
			
		||||
 | 
			
		||||
    >>> FoldedCase('hello') > FoldedCase('Hello')
 | 
			
		||||
    False
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    def __lt__(self, other):
 | 
			
		||||
        return self.lower() < other.lower()
 | 
			
		||||
 | 
			
		||||
    def __gt__(self, other):
 | 
			
		||||
        return self.lower() > other.lower()
 | 
			
		||||
 | 
			
		||||
    def __eq__(self, other):
 | 
			
		||||
        return self.lower() == other.lower()
 | 
			
		||||
 | 
			
		||||
    def __ne__(self, other):
 | 
			
		||||
        return self.lower() != other.lower()
 | 
			
		||||
 | 
			
		||||
    def __hash__(self):
 | 
			
		||||
        return hash(self.lower())
 | 
			
		||||
 | 
			
		||||
    def __contains__(self, other):
 | 
			
		||||
        return super().lower().__contains__(other.lower())
 | 
			
		||||
 | 
			
		||||
    def in_(self, other):
 | 
			
		||||
        "Does self appear in other?"
 | 
			
		||||
        return self in FoldedCase(other)
 | 
			
		||||
 | 
			
		||||
    # cache lower since it's likely to be called frequently.
 | 
			
		||||
    @method_cache
 | 
			
		||||
    def lower(self):
 | 
			
		||||
        return super().lower()
 | 
			
		||||
 | 
			
		||||
    def index(self, sub):
 | 
			
		||||
        return self.lower().index(sub.lower())
 | 
			
		||||
 | 
			
		||||
    def split(self, splitter=' ', maxsplit=0):
 | 
			
		||||
        pattern = re.compile(re.escape(splitter), re.I)
 | 
			
		||||
        return pattern.split(self, maxsplit)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Python 3.8 compatibility
 | 
			
		||||
_unicode_trap = ExceptionTrap(UnicodeDecodeError)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@_unicode_trap.passes
 | 
			
		||||
def is_decodable(value):
 | 
			
		||||
    r"""
 | 
			
		||||
    Return True if the supplied value is decodable (using the default
 | 
			
		||||
    encoding).
 | 
			
		||||
 | 
			
		||||
    >>> is_decodable(b'\xff')
 | 
			
		||||
    False
 | 
			
		||||
    >>> is_decodable(b'\x32')
 | 
			
		||||
    True
 | 
			
		||||
    """
 | 
			
		||||
    value.decode()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def is_binary(value):
 | 
			
		||||
    r"""
 | 
			
		||||
    Return True if the value appears to be binary (that is, it's a byte
 | 
			
		||||
    string and isn't decodable).
 | 
			
		||||
 | 
			
		||||
    >>> is_binary(b'\xff')
 | 
			
		||||
    True
 | 
			
		||||
    >>> is_binary('\xff')
 | 
			
		||||
    False
 | 
			
		||||
    """
 | 
			
		||||
    return isinstance(value, bytes) and not is_decodable(value)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def trim(s):
 | 
			
		||||
    r"""
 | 
			
		||||
    Trim something like a docstring to remove the whitespace that
 | 
			
		||||
    is common due to indentation and formatting.
 | 
			
		||||
 | 
			
		||||
    >>> trim("\n\tfoo = bar\n\t\tbar = baz\n")
 | 
			
		||||
    'foo = bar\n\tbar = baz'
 | 
			
		||||
    """
 | 
			
		||||
    return textwrap.dedent(s).strip()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def wrap(s):
 | 
			
		||||
    """
 | 
			
		||||
    Wrap lines of text, retaining existing newlines as
 | 
			
		||||
    paragraph markers.
 | 
			
		||||
 | 
			
		||||
    >>> print(wrap(lorem_ipsum))
 | 
			
		||||
    Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do
 | 
			
		||||
    eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad
 | 
			
		||||
    minim veniam, quis nostrud exercitation ullamco laboris nisi ut
 | 
			
		||||
    aliquip ex ea commodo consequat. Duis aute irure dolor in
 | 
			
		||||
    reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla
 | 
			
		||||
    pariatur. Excepteur sint occaecat cupidatat non proident, sunt in
 | 
			
		||||
    culpa qui officia deserunt mollit anim id est laborum.
 | 
			
		||||
    <BLANKLINE>
 | 
			
		||||
    Curabitur pretium tincidunt lacus. Nulla gravida orci a odio. Nullam
 | 
			
		||||
    varius, turpis et commodo pharetra, est eros bibendum elit, nec luctus
 | 
			
		||||
    magna felis sollicitudin mauris. Integer in mauris eu nibh euismod
 | 
			
		||||
    gravida. Duis ac tellus et risus vulputate vehicula. Donec lobortis
 | 
			
		||||
    risus a elit. Etiam tempor. Ut ullamcorper, ligula eu tempor congue,
 | 
			
		||||
    eros est euismod turpis, id tincidunt sapien risus a quam. Maecenas
 | 
			
		||||
    fermentum consequat mi. Donec fermentum. Pellentesque malesuada nulla
 | 
			
		||||
    a mi. Duis sapien sem, aliquet nec, commodo eget, consequat quis,
 | 
			
		||||
    neque. Aliquam faucibus, elit ut dictum aliquet, felis nisl adipiscing
 | 
			
		||||
    sapien, sed malesuada diam lacus eget erat. Cras mollis scelerisque
 | 
			
		||||
    nunc. Nullam arcu. Aliquam consequat. Curabitur augue lorem, dapibus
 | 
			
		||||
    quis, laoreet et, pretium ac, nisi. Aenean magna nisl, mollis quis,
 | 
			
		||||
    molestie eu, feugiat in, orci. In hac habitasse platea dictumst.
 | 
			
		||||
    """
 | 
			
		||||
    paragraphs = s.splitlines()
 | 
			
		||||
    wrapped = ('\n'.join(textwrap.wrap(para)) for para in paragraphs)
 | 
			
		||||
    return '\n\n'.join(wrapped)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def unwrap(s):
 | 
			
		||||
    r"""
 | 
			
		||||
    Given a multi-line string, return an unwrapped version.
 | 
			
		||||
 | 
			
		||||
    >>> wrapped = wrap(lorem_ipsum)
 | 
			
		||||
    >>> wrapped.count('\n')
 | 
			
		||||
    20
 | 
			
		||||
    >>> unwrapped = unwrap(wrapped)
 | 
			
		||||
    >>> unwrapped.count('\n')
 | 
			
		||||
    1
 | 
			
		||||
    >>> print(unwrapped)
 | 
			
		||||
    Lorem ipsum dolor sit amet, consectetur adipiscing ...
 | 
			
		||||
    Curabitur pretium tincidunt lacus. Nulla gravida orci ...
 | 
			
		||||
 | 
			
		||||
    """
 | 
			
		||||
    paragraphs = re.split(r'\n\n+', s)
 | 
			
		||||
    cleaned = (para.replace('\n', ' ') for para in paragraphs)
 | 
			
		||||
    return '\n'.join(cleaned)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Splitter(object):
 | 
			
		||||
    """object that will split a string with the given arguments for each call
 | 
			
		||||
 | 
			
		||||
    >>> s = Splitter(',')
 | 
			
		||||
    >>> s('hello, world, this is your, master calling')
 | 
			
		||||
    ['hello', ' world', ' this is your', ' master calling']
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    def __init__(self, *args):
 | 
			
		||||
        self.args = args
 | 
			
		||||
 | 
			
		||||
    def __call__(self, s):
 | 
			
		||||
        return s.split(*self.args)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def indent(string, prefix=' ' * 4):
 | 
			
		||||
    """
 | 
			
		||||
    >>> indent('foo')
 | 
			
		||||
    '    foo'
 | 
			
		||||
    """
 | 
			
		||||
    return prefix + string
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class WordSet(tuple):
 | 
			
		||||
    """
 | 
			
		||||
    Given an identifier, return the words that identifier represents,
 | 
			
		||||
    whether in camel case, underscore-separated, etc.
 | 
			
		||||
 | 
			
		||||
    >>> WordSet.parse("camelCase")
 | 
			
		||||
    ('camel', 'Case')
 | 
			
		||||
 | 
			
		||||
    >>> WordSet.parse("under_sep")
 | 
			
		||||
    ('under', 'sep')
 | 
			
		||||
 | 
			
		||||
    Acronyms should be retained
 | 
			
		||||
 | 
			
		||||
    >>> WordSet.parse("firstSNL")
 | 
			
		||||
    ('first', 'SNL')
 | 
			
		||||
 | 
			
		||||
    >>> WordSet.parse("you_and_I")
 | 
			
		||||
    ('you', 'and', 'I')
 | 
			
		||||
 | 
			
		||||
    >>> WordSet.parse("A simple test")
 | 
			
		||||
    ('A', 'simple', 'test')
 | 
			
		||||
 | 
			
		||||
    Multiple caps should not interfere with the first cap of another word.
 | 
			
		||||
 | 
			
		||||
    >>> WordSet.parse("myABCClass")
 | 
			
		||||
    ('my', 'ABC', 'Class')
 | 
			
		||||
 | 
			
		||||
    The result is a WordSet, so you can get the form you need.
 | 
			
		||||
 | 
			
		||||
    >>> WordSet.parse("myABCClass").underscore_separated()
 | 
			
		||||
    'my_ABC_Class'
 | 
			
		||||
 | 
			
		||||
    >>> WordSet.parse('a-command').camel_case()
 | 
			
		||||
    'ACommand'
 | 
			
		||||
 | 
			
		||||
    >>> WordSet.parse('someIdentifier').lowered().space_separated()
 | 
			
		||||
    'some identifier'
 | 
			
		||||
 | 
			
		||||
    Slices of the result should return another WordSet.
 | 
			
		||||
 | 
			
		||||
    >>> WordSet.parse('taken-out-of-context')[1:].underscore_separated()
 | 
			
		||||
    'out_of_context'
 | 
			
		||||
 | 
			
		||||
    >>> WordSet.from_class_name(WordSet()).lowered().space_separated()
 | 
			
		||||
    'word set'
 | 
			
		||||
 | 
			
		||||
    >>> example = WordSet.parse('figured it out')
 | 
			
		||||
    >>> example.headless_camel_case()
 | 
			
		||||
    'figuredItOut'
 | 
			
		||||
    >>> example.dash_separated()
 | 
			
		||||
    'figured-it-out'
 | 
			
		||||
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    _pattern = re.compile('([A-Z]?[a-z]+)|([A-Z]+(?![a-z]))')
 | 
			
		||||
 | 
			
		||||
    def capitalized(self):
 | 
			
		||||
        return WordSet(word.capitalize() for word in self)
 | 
			
		||||
 | 
			
		||||
    def lowered(self):
 | 
			
		||||
        return WordSet(word.lower() for word in self)
 | 
			
		||||
 | 
			
		||||
    def camel_case(self):
 | 
			
		||||
        return ''.join(self.capitalized())
 | 
			
		||||
 | 
			
		||||
    def headless_camel_case(self):
 | 
			
		||||
        words = iter(self)
 | 
			
		||||
        first = next(words).lower()
 | 
			
		||||
        new_words = itertools.chain((first,), WordSet(words).camel_case())
 | 
			
		||||
        return ''.join(new_words)
 | 
			
		||||
 | 
			
		||||
    def underscore_separated(self):
 | 
			
		||||
        return '_'.join(self)
 | 
			
		||||
 | 
			
		||||
    def dash_separated(self):
 | 
			
		||||
        return '-'.join(self)
 | 
			
		||||
 | 
			
		||||
    def space_separated(self):
 | 
			
		||||
        return ' '.join(self)
 | 
			
		||||
 | 
			
		||||
    def trim_right(self, item):
 | 
			
		||||
        """
 | 
			
		||||
        Remove the item from the end of the set.
 | 
			
		||||
 | 
			
		||||
        >>> WordSet.parse('foo bar').trim_right('foo')
 | 
			
		||||
        ('foo', 'bar')
 | 
			
		||||
        >>> WordSet.parse('foo bar').trim_right('bar')
 | 
			
		||||
        ('foo',)
 | 
			
		||||
        >>> WordSet.parse('').trim_right('bar')
 | 
			
		||||
        ()
 | 
			
		||||
        """
 | 
			
		||||
        return self[:-1] if self and self[-1] == item else self
 | 
			
		||||
 | 
			
		||||
    def trim_left(self, item):
 | 
			
		||||
        """
 | 
			
		||||
        Remove the item from the beginning of the set.
 | 
			
		||||
 | 
			
		||||
        >>> WordSet.parse('foo bar').trim_left('foo')
 | 
			
		||||
        ('bar',)
 | 
			
		||||
        >>> WordSet.parse('foo bar').trim_left('bar')
 | 
			
		||||
        ('foo', 'bar')
 | 
			
		||||
        >>> WordSet.parse('').trim_left('bar')
 | 
			
		||||
        ()
 | 
			
		||||
        """
 | 
			
		||||
        return self[1:] if self and self[0] == item else self
 | 
			
		||||
 | 
			
		||||
    def trim(self, item):
 | 
			
		||||
        """
 | 
			
		||||
        >>> WordSet.parse('foo bar').trim('foo')
 | 
			
		||||
        ('bar',)
 | 
			
		||||
        """
 | 
			
		||||
        return self.trim_left(item).trim_right(item)
 | 
			
		||||
 | 
			
		||||
    def __getitem__(self, item):
 | 
			
		||||
        result = super(WordSet, self).__getitem__(item)
 | 
			
		||||
        if isinstance(item, slice):
 | 
			
		||||
            result = WordSet(result)
 | 
			
		||||
        return result
 | 
			
		||||
 | 
			
		||||
    @classmethod
 | 
			
		||||
    def parse(cls, identifier):
 | 
			
		||||
        matches = cls._pattern.finditer(identifier)
 | 
			
		||||
        return WordSet(match.group(0) for match in matches)
 | 
			
		||||
 | 
			
		||||
    @classmethod
 | 
			
		||||
    def from_class_name(cls, subject):
 | 
			
		||||
        return cls.parse(subject.__class__.__name__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# for backward compatibility
 | 
			
		||||
words = WordSet.parse
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def simple_html_strip(s):
 | 
			
		||||
    r"""
 | 
			
		||||
    Remove HTML from the string `s`.
 | 
			
		||||
 | 
			
		||||
    >>> str(simple_html_strip(''))
 | 
			
		||||
    ''
 | 
			
		||||
 | 
			
		||||
    >>> print(simple_html_strip('A <bold>stormy</bold> day in paradise'))
 | 
			
		||||
    A stormy day in paradise
 | 
			
		||||
 | 
			
		||||
    >>> print(simple_html_strip('Somebody <!-- do not --> tell the truth.'))
 | 
			
		||||
    Somebody  tell the truth.
 | 
			
		||||
 | 
			
		||||
    >>> print(simple_html_strip('What about<br/>\nmultiple lines?'))
 | 
			
		||||
    What about
 | 
			
		||||
    multiple lines?
 | 
			
		||||
    """
 | 
			
		||||
    html_stripper = re.compile('(<!--.*?-->)|(<[^>]*>)|([^<]+)', re.DOTALL)
 | 
			
		||||
    texts = (match.group(3) or '' for match in html_stripper.finditer(s))
 | 
			
		||||
    return ''.join(texts)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class SeparatedValues(str):
 | 
			
		||||
    """
 | 
			
		||||
    A string separated by a separator. Overrides __iter__ for getting
 | 
			
		||||
    the values.
 | 
			
		||||
 | 
			
		||||
    >>> list(SeparatedValues('a,b,c'))
 | 
			
		||||
    ['a', 'b', 'c']
 | 
			
		||||
 | 
			
		||||
    Whitespace is stripped and empty values are discarded.
 | 
			
		||||
 | 
			
		||||
    >>> list(SeparatedValues(' a,   b   , c,  '))
 | 
			
		||||
    ['a', 'b', 'c']
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    separator = ','
 | 
			
		||||
 | 
			
		||||
    def __iter__(self):
 | 
			
		||||
        parts = self.split(self.separator)
 | 
			
		||||
        return filter(None, (part.strip() for part in parts))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Stripper:
 | 
			
		||||
    r"""
 | 
			
		||||
    Given a series of lines, find the common prefix and strip it from them.
 | 
			
		||||
 | 
			
		||||
    >>> lines = [
 | 
			
		||||
    ...     'abcdefg\n',
 | 
			
		||||
    ...     'abc\n',
 | 
			
		||||
    ...     'abcde\n',
 | 
			
		||||
    ... ]
 | 
			
		||||
    >>> res = Stripper.strip_prefix(lines)
 | 
			
		||||
    >>> res.prefix
 | 
			
		||||
    'abc'
 | 
			
		||||
    >>> list(res.lines)
 | 
			
		||||
    ['defg\n', '\n', 'de\n']
 | 
			
		||||
 | 
			
		||||
    If no prefix is common, nothing should be stripped.
 | 
			
		||||
 | 
			
		||||
    >>> lines = [
 | 
			
		||||
    ...     'abcd\n',
 | 
			
		||||
    ...     '1234\n',
 | 
			
		||||
    ... ]
 | 
			
		||||
    >>> res = Stripper.strip_prefix(lines)
 | 
			
		||||
    >>> res.prefix = ''
 | 
			
		||||
    >>> list(res.lines)
 | 
			
		||||
    ['abcd\n', '1234\n']
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    def __init__(self, prefix, lines):
 | 
			
		||||
        self.prefix = prefix
 | 
			
		||||
        self.lines = map(self, lines)
 | 
			
		||||
 | 
			
		||||
    @classmethod
 | 
			
		||||
    def strip_prefix(cls, lines):
 | 
			
		||||
        prefix_lines, lines = itertools.tee(lines)
 | 
			
		||||
        prefix = functools.reduce(cls.common_prefix, prefix_lines)
 | 
			
		||||
        return cls(prefix, lines)
 | 
			
		||||
 | 
			
		||||
    def __call__(self, line):
 | 
			
		||||
        if not self.prefix:
 | 
			
		||||
            return line
 | 
			
		||||
        null, prefix, rest = line.partition(self.prefix)
 | 
			
		||||
        return rest
 | 
			
		||||
 | 
			
		||||
    @staticmethod
 | 
			
		||||
    def common_prefix(s1, s2):
 | 
			
		||||
        """
 | 
			
		||||
        Return the common prefix of two lines.
 | 
			
		||||
        """
 | 
			
		||||
        index = min(len(s1), len(s2))
 | 
			
		||||
        while s1[:index] != s2[:index]:
 | 
			
		||||
            index -= 1
 | 
			
		||||
        return s1[:index]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def remove_prefix(text, prefix):
 | 
			
		||||
    """
 | 
			
		||||
    Remove the prefix from the text if it exists.
 | 
			
		||||
 | 
			
		||||
    >>> remove_prefix('underwhelming performance', 'underwhelming ')
 | 
			
		||||
    'performance'
 | 
			
		||||
 | 
			
		||||
    >>> remove_prefix('something special', 'sample')
 | 
			
		||||
    'something special'
 | 
			
		||||
    """
 | 
			
		||||
    null, prefix, rest = text.rpartition(prefix)
 | 
			
		||||
    return rest
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def remove_suffix(text, suffix):
 | 
			
		||||
    """
 | 
			
		||||
    Remove the suffix from the text if it exists.
 | 
			
		||||
 | 
			
		||||
    >>> remove_suffix('name.git', '.git')
 | 
			
		||||
    'name'
 | 
			
		||||
 | 
			
		||||
    >>> remove_suffix('something special', 'sample')
 | 
			
		||||
    'something special'
 | 
			
		||||
    """
 | 
			
		||||
    rest, suffix, null = text.partition(suffix)
 | 
			
		||||
    return rest
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def normalize_newlines(text):
 | 
			
		||||
    r"""
 | 
			
		||||
    Replace alternate newlines with the canonical newline.
 | 
			
		||||
 | 
			
		||||
    >>> normalize_newlines('Lorem Ipsum\u2029')
 | 
			
		||||
    'Lorem Ipsum\n'
 | 
			
		||||
    >>> normalize_newlines('Lorem Ipsum\r\n')
 | 
			
		||||
    'Lorem Ipsum\n'
 | 
			
		||||
    >>> normalize_newlines('Lorem Ipsum\x85')
 | 
			
		||||
    'Lorem Ipsum\n'
 | 
			
		||||
    """
 | 
			
		||||
    newlines = ['\r\n', '\r', '\n', '\u0085', '\u2028', '\u2029']
 | 
			
		||||
    pattern = '|'.join(newlines)
 | 
			
		||||
    return re.sub(pattern, '\n', text)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _nonblank(str):
 | 
			
		||||
    return str and not str.startswith('#')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@functools.singledispatch
 | 
			
		||||
def yield_lines(iterable):
 | 
			
		||||
    r"""
 | 
			
		||||
    Yield valid lines of a string or iterable.
 | 
			
		||||
 | 
			
		||||
    >>> list(yield_lines(''))
 | 
			
		||||
    []
 | 
			
		||||
    >>> list(yield_lines(['foo', 'bar']))
 | 
			
		||||
    ['foo', 'bar']
 | 
			
		||||
    >>> list(yield_lines('foo\nbar'))
 | 
			
		||||
    ['foo', 'bar']
 | 
			
		||||
    >>> list(yield_lines('\nfoo\n#bar\nbaz #comment'))
 | 
			
		||||
    ['foo', 'baz #comment']
 | 
			
		||||
    >>> list(yield_lines(['foo\nbar', 'baz', 'bing\n\n\n']))
 | 
			
		||||
    ['foo', 'bar', 'baz', 'bing']
 | 
			
		||||
    """
 | 
			
		||||
    return itertools.chain.from_iterable(map(yield_lines, iterable))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@yield_lines.register(str)
 | 
			
		||||
def _(text):
 | 
			
		||||
    return filter(_nonblank, map(str.strip, text.splitlines()))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def drop_comment(line):
 | 
			
		||||
    """
 | 
			
		||||
    Drop comments.
 | 
			
		||||
 | 
			
		||||
    >>> drop_comment('foo # bar')
 | 
			
		||||
    'foo'
 | 
			
		||||
 | 
			
		||||
    A hash without a space may be in a URL.
 | 
			
		||||
 | 
			
		||||
    >>> drop_comment('http://example.com/foo#bar')
 | 
			
		||||
    'http://example.com/foo#bar'
 | 
			
		||||
    """
 | 
			
		||||
    return line.partition(' #')[0]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def join_continuation(lines):
 | 
			
		||||
    r"""
 | 
			
		||||
    Join lines continued by a trailing backslash.
 | 
			
		||||
 | 
			
		||||
    >>> list(join_continuation(['foo \\', 'bar', 'baz']))
 | 
			
		||||
    ['foobar', 'baz']
 | 
			
		||||
    >>> list(join_continuation(['foo \\', 'bar', 'baz']))
 | 
			
		||||
    ['foobar', 'baz']
 | 
			
		||||
    >>> list(join_continuation(['foo \\', 'bar \\', 'baz']))
 | 
			
		||||
    ['foobarbaz']
 | 
			
		||||
 | 
			
		||||
    Not sure why, but...
 | 
			
		||||
    The character preceeding the backslash is also elided.
 | 
			
		||||
 | 
			
		||||
    >>> list(join_continuation(['goo\\', 'dly']))
 | 
			
		||||
    ['godly']
 | 
			
		||||
 | 
			
		||||
    A terrible idea, but...
 | 
			
		||||
    If no line is available to continue, suppress the lines.
 | 
			
		||||
 | 
			
		||||
    >>> list(join_continuation(['foo', 'bar\\', 'baz\\']))
 | 
			
		||||
    ['foo']
 | 
			
		||||
    """
 | 
			
		||||
    lines = iter(lines)
 | 
			
		||||
    for item in lines:
 | 
			
		||||
        while item.endswith('\\'):
 | 
			
		||||
            try:
 | 
			
		||||
                item = item[:-2].strip() + next(lines)
 | 
			
		||||
            except StopIteration:
 | 
			
		||||
                return
 | 
			
		||||
        yield item
 | 
			
		||||
@@ -0,0 +1,4 @@
 | 
			
		||||
from .more import *  # noqa
 | 
			
		||||
from .recipes import *  # noqa
 | 
			
		||||
 | 
			
		||||
__version__ = '8.8.0'
 | 
			
		||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							@@ -0,0 +1,620 @@
 | 
			
		||||
"""Imported from the recipes section of the itertools documentation.
 | 
			
		||||
 | 
			
		||||
All functions taken from the recipes section of the itertools library docs
 | 
			
		||||
[1]_.
 | 
			
		||||
Some backward-compatible usability improvements have been made.
 | 
			
		||||
 | 
			
		||||
.. [1] http://docs.python.org/library/itertools.html#recipes
 | 
			
		||||
 | 
			
		||||
"""
 | 
			
		||||
import warnings
 | 
			
		||||
from collections import deque
 | 
			
		||||
from itertools import (
 | 
			
		||||
    chain,
 | 
			
		||||
    combinations,
 | 
			
		||||
    count,
 | 
			
		||||
    cycle,
 | 
			
		||||
    groupby,
 | 
			
		||||
    islice,
 | 
			
		||||
    repeat,
 | 
			
		||||
    starmap,
 | 
			
		||||
    tee,
 | 
			
		||||
    zip_longest,
 | 
			
		||||
)
 | 
			
		||||
import operator
 | 
			
		||||
from random import randrange, sample, choice
 | 
			
		||||
 | 
			
		||||
__all__ = [
 | 
			
		||||
    'all_equal',
 | 
			
		||||
    'consume',
 | 
			
		||||
    'convolve',
 | 
			
		||||
    'dotproduct',
 | 
			
		||||
    'first_true',
 | 
			
		||||
    'flatten',
 | 
			
		||||
    'grouper',
 | 
			
		||||
    'iter_except',
 | 
			
		||||
    'ncycles',
 | 
			
		||||
    'nth',
 | 
			
		||||
    'nth_combination',
 | 
			
		||||
    'padnone',
 | 
			
		||||
    'pad_none',
 | 
			
		||||
    'pairwise',
 | 
			
		||||
    'partition',
 | 
			
		||||
    'powerset',
 | 
			
		||||
    'prepend',
 | 
			
		||||
    'quantify',
 | 
			
		||||
    'random_combination_with_replacement',
 | 
			
		||||
    'random_combination',
 | 
			
		||||
    'random_permutation',
 | 
			
		||||
    'random_product',
 | 
			
		||||
    'repeatfunc',
 | 
			
		||||
    'roundrobin',
 | 
			
		||||
    'tabulate',
 | 
			
		||||
    'tail',
 | 
			
		||||
    'take',
 | 
			
		||||
    'unique_everseen',
 | 
			
		||||
    'unique_justseen',
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def take(n, iterable):
 | 
			
		||||
    """Return first *n* items of the iterable as a list.
 | 
			
		||||
 | 
			
		||||
        >>> take(3, range(10))
 | 
			
		||||
        [0, 1, 2]
 | 
			
		||||
 | 
			
		||||
    If there are fewer than *n* items in the iterable, all of them are
 | 
			
		||||
    returned.
 | 
			
		||||
 | 
			
		||||
        >>> take(10, range(3))
 | 
			
		||||
        [0, 1, 2]
 | 
			
		||||
 | 
			
		||||
    """
 | 
			
		||||
    return list(islice(iterable, n))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def tabulate(function, start=0):
 | 
			
		||||
    """Return an iterator over the results of ``func(start)``,
 | 
			
		||||
    ``func(start + 1)``, ``func(start + 2)``...
 | 
			
		||||
 | 
			
		||||
    *func* should be a function that accepts one integer argument.
 | 
			
		||||
 | 
			
		||||
    If *start* is not specified it defaults to 0. It will be incremented each
 | 
			
		||||
    time the iterator is advanced.
 | 
			
		||||
 | 
			
		||||
        >>> square = lambda x: x ** 2
 | 
			
		||||
        >>> iterator = tabulate(square, -3)
 | 
			
		||||
        >>> take(4, iterator)
 | 
			
		||||
        [9, 4, 1, 0]
 | 
			
		||||
 | 
			
		||||
    """
 | 
			
		||||
    return map(function, count(start))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def tail(n, iterable):
 | 
			
		||||
    """Return an iterator over the last *n* items of *iterable*.
 | 
			
		||||
 | 
			
		||||
    >>> t = tail(3, 'ABCDEFG')
 | 
			
		||||
    >>> list(t)
 | 
			
		||||
    ['E', 'F', 'G']
 | 
			
		||||
 | 
			
		||||
    """
 | 
			
		||||
    return iter(deque(iterable, maxlen=n))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def consume(iterator, n=None):
 | 
			
		||||
    """Advance *iterable* by *n* steps. If *n* is ``None``, consume it
 | 
			
		||||
    entirely.
 | 
			
		||||
 | 
			
		||||
    Efficiently exhausts an iterator without returning values. Defaults to
 | 
			
		||||
    consuming the whole iterator, but an optional second argument may be
 | 
			
		||||
    provided to limit consumption.
 | 
			
		||||
 | 
			
		||||
        >>> i = (x for x in range(10))
 | 
			
		||||
        >>> next(i)
 | 
			
		||||
        0
 | 
			
		||||
        >>> consume(i, 3)
 | 
			
		||||
        >>> next(i)
 | 
			
		||||
        4
 | 
			
		||||
        >>> consume(i)
 | 
			
		||||
        >>> next(i)
 | 
			
		||||
        Traceback (most recent call last):
 | 
			
		||||
          File "<stdin>", line 1, in <module>
 | 
			
		||||
        StopIteration
 | 
			
		||||
 | 
			
		||||
    If the iterator has fewer items remaining than the provided limit, the
 | 
			
		||||
    whole iterator will be consumed.
 | 
			
		||||
 | 
			
		||||
        >>> i = (x for x in range(3))
 | 
			
		||||
        >>> consume(i, 5)
 | 
			
		||||
        >>> next(i)
 | 
			
		||||
        Traceback (most recent call last):
 | 
			
		||||
          File "<stdin>", line 1, in <module>
 | 
			
		||||
        StopIteration
 | 
			
		||||
 | 
			
		||||
    """
 | 
			
		||||
    # Use functions that consume iterators at C speed.
 | 
			
		||||
    if n is None:
 | 
			
		||||
        # feed the entire iterator into a zero-length deque
 | 
			
		||||
        deque(iterator, maxlen=0)
 | 
			
		||||
    else:
 | 
			
		||||
        # advance to the empty slice starting at position n
 | 
			
		||||
        next(islice(iterator, n, n), None)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def nth(iterable, n, default=None):
 | 
			
		||||
    """Returns the nth item or a default value.
 | 
			
		||||
 | 
			
		||||
    >>> l = range(10)
 | 
			
		||||
    >>> nth(l, 3)
 | 
			
		||||
    3
 | 
			
		||||
    >>> nth(l, 20, "zebra")
 | 
			
		||||
    'zebra'
 | 
			
		||||
 | 
			
		||||
    """
 | 
			
		||||
    return next(islice(iterable, n, None), default)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def all_equal(iterable):
 | 
			
		||||
    """
 | 
			
		||||
    Returns ``True`` if all the elements are equal to each other.
 | 
			
		||||
 | 
			
		||||
        >>> all_equal('aaaa')
 | 
			
		||||
        True
 | 
			
		||||
        >>> all_equal('aaab')
 | 
			
		||||
        False
 | 
			
		||||
 | 
			
		||||
    """
 | 
			
		||||
    g = groupby(iterable)
 | 
			
		||||
    return next(g, True) and not next(g, False)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def quantify(iterable, pred=bool):
 | 
			
		||||
    """Return the how many times the predicate is true.
 | 
			
		||||
 | 
			
		||||
    >>> quantify([True, False, True])
 | 
			
		||||
    2
 | 
			
		||||
 | 
			
		||||
    """
 | 
			
		||||
    return sum(map(pred, iterable))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def pad_none(iterable):
 | 
			
		||||
    """Returns the sequence of elements and then returns ``None`` indefinitely.
 | 
			
		||||
 | 
			
		||||
        >>> take(5, pad_none(range(3)))
 | 
			
		||||
        [0, 1, 2, None, None]
 | 
			
		||||
 | 
			
		||||
    Useful for emulating the behavior of the built-in :func:`map` function.
 | 
			
		||||
 | 
			
		||||
    See also :func:`padded`.
 | 
			
		||||
 | 
			
		||||
    """
 | 
			
		||||
    return chain(iterable, repeat(None))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
padnone = pad_none
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def ncycles(iterable, n):
 | 
			
		||||
    """Returns the sequence elements *n* times
 | 
			
		||||
 | 
			
		||||
    >>> list(ncycles(["a", "b"], 3))
 | 
			
		||||
    ['a', 'b', 'a', 'b', 'a', 'b']
 | 
			
		||||
 | 
			
		||||
    """
 | 
			
		||||
    return chain.from_iterable(repeat(tuple(iterable), n))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def dotproduct(vec1, vec2):
 | 
			
		||||
    """Returns the dot product of the two iterables.
 | 
			
		||||
 | 
			
		||||
    >>> dotproduct([10, 10], [20, 20])
 | 
			
		||||
    400
 | 
			
		||||
 | 
			
		||||
    """
 | 
			
		||||
    return sum(map(operator.mul, vec1, vec2))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def flatten(listOfLists):
 | 
			
		||||
    """Return an iterator flattening one level of nesting in a list of lists.
 | 
			
		||||
 | 
			
		||||
        >>> list(flatten([[0, 1], [2, 3]]))
 | 
			
		||||
        [0, 1, 2, 3]
 | 
			
		||||
 | 
			
		||||
    See also :func:`collapse`, which can flatten multiple levels of nesting.
 | 
			
		||||
 | 
			
		||||
    """
 | 
			
		||||
    return chain.from_iterable(listOfLists)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def repeatfunc(func, times=None, *args):
 | 
			
		||||
    """Call *func* with *args* repeatedly, returning an iterable over the
 | 
			
		||||
    results.
 | 
			
		||||
 | 
			
		||||
    If *times* is specified, the iterable will terminate after that many
 | 
			
		||||
    repetitions:
 | 
			
		||||
 | 
			
		||||
        >>> from operator import add
 | 
			
		||||
        >>> times = 4
 | 
			
		||||
        >>> args = 3, 5
 | 
			
		||||
        >>> list(repeatfunc(add, times, *args))
 | 
			
		||||
        [8, 8, 8, 8]
 | 
			
		||||
 | 
			
		||||
    If *times* is ``None`` the iterable will not terminate:
 | 
			
		||||
 | 
			
		||||
        >>> from random import randrange
 | 
			
		||||
        >>> times = None
 | 
			
		||||
        >>> args = 1, 11
 | 
			
		||||
        >>> take(6, repeatfunc(randrange, times, *args))  # doctest:+SKIP
 | 
			
		||||
        [2, 4, 8, 1, 8, 4]
 | 
			
		||||
 | 
			
		||||
    """
 | 
			
		||||
    if times is None:
 | 
			
		||||
        return starmap(func, repeat(args))
 | 
			
		||||
    return starmap(func, repeat(args, times))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _pairwise(iterable):
 | 
			
		||||
    """Returns an iterator of paired items, overlapping, from the original
 | 
			
		||||
 | 
			
		||||
    >>> take(4, pairwise(count()))
 | 
			
		||||
    [(0, 1), (1, 2), (2, 3), (3, 4)]
 | 
			
		||||
 | 
			
		||||
    On Python 3.10 and above, this is an alias for :func:`itertools.pairwise`.
 | 
			
		||||
 | 
			
		||||
    """
 | 
			
		||||
    a, b = tee(iterable)
 | 
			
		||||
    next(b, None)
 | 
			
		||||
    yield from zip(a, b)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
try:
 | 
			
		||||
    from itertools import pairwise as itertools_pairwise
 | 
			
		||||
except ImportError:
 | 
			
		||||
    pairwise = _pairwise
 | 
			
		||||
else:
 | 
			
		||||
 | 
			
		||||
    def pairwise(iterable):
 | 
			
		||||
        yield from itertools_pairwise(iterable)
 | 
			
		||||
 | 
			
		||||
    pairwise.__doc__ = _pairwise.__doc__
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def grouper(iterable, n, fillvalue=None):
 | 
			
		||||
    """Collect data into fixed-length chunks or blocks.
 | 
			
		||||
 | 
			
		||||
    >>> list(grouper('ABCDEFG', 3, 'x'))
 | 
			
		||||
    [('A', 'B', 'C'), ('D', 'E', 'F'), ('G', 'x', 'x')]
 | 
			
		||||
 | 
			
		||||
    """
 | 
			
		||||
    if isinstance(iterable, int):
 | 
			
		||||
        warnings.warn(
 | 
			
		||||
            "grouper expects iterable as first parameter", DeprecationWarning
 | 
			
		||||
        )
 | 
			
		||||
        n, iterable = iterable, n
 | 
			
		||||
    args = [iter(iterable)] * n
 | 
			
		||||
    return zip_longest(fillvalue=fillvalue, *args)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def roundrobin(*iterables):
 | 
			
		||||
    """Yields an item from each iterable, alternating between them.
 | 
			
		||||
 | 
			
		||||
        >>> list(roundrobin('ABC', 'D', 'EF'))
 | 
			
		||||
        ['A', 'D', 'E', 'B', 'F', 'C']
 | 
			
		||||
 | 
			
		||||
    This function produces the same output as :func:`interleave_longest`, but
 | 
			
		||||
    may perform better for some inputs (in particular when the number of
 | 
			
		||||
    iterables is small).
 | 
			
		||||
 | 
			
		||||
    """
 | 
			
		||||
    # Recipe credited to George Sakkis
 | 
			
		||||
    pending = len(iterables)
 | 
			
		||||
    nexts = cycle(iter(it).__next__ for it in iterables)
 | 
			
		||||
    while pending:
 | 
			
		||||
        try:
 | 
			
		||||
            for next in nexts:
 | 
			
		||||
                yield next()
 | 
			
		||||
        except StopIteration:
 | 
			
		||||
            pending -= 1
 | 
			
		||||
            nexts = cycle(islice(nexts, pending))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def partition(pred, iterable):
 | 
			
		||||
    """
 | 
			
		||||
    Returns a 2-tuple of iterables derived from the input iterable.
 | 
			
		||||
    The first yields the items that have ``pred(item) == False``.
 | 
			
		||||
    The second yields the items that have ``pred(item) == True``.
 | 
			
		||||
 | 
			
		||||
        >>> is_odd = lambda x: x % 2 != 0
 | 
			
		||||
        >>> iterable = range(10)
 | 
			
		||||
        >>> even_items, odd_items = partition(is_odd, iterable)
 | 
			
		||||
        >>> list(even_items), list(odd_items)
 | 
			
		||||
        ([0, 2, 4, 6, 8], [1, 3, 5, 7, 9])
 | 
			
		||||
 | 
			
		||||
    If *pred* is None, :func:`bool` is used.
 | 
			
		||||
 | 
			
		||||
        >>> iterable = [0, 1, False, True, '', ' ']
 | 
			
		||||
        >>> false_items, true_items = partition(None, iterable)
 | 
			
		||||
        >>> list(false_items), list(true_items)
 | 
			
		||||
        ([0, False, ''], [1, True, ' '])
 | 
			
		||||
 | 
			
		||||
    """
 | 
			
		||||
    if pred is None:
 | 
			
		||||
        pred = bool
 | 
			
		||||
 | 
			
		||||
    evaluations = ((pred(x), x) for x in iterable)
 | 
			
		||||
    t1, t2 = tee(evaluations)
 | 
			
		||||
    return (
 | 
			
		||||
        (x for (cond, x) in t1 if not cond),
 | 
			
		||||
        (x for (cond, x) in t2 if cond),
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def powerset(iterable):
 | 
			
		||||
    """Yields all possible subsets of the iterable.
 | 
			
		||||
 | 
			
		||||
        >>> list(powerset([1, 2, 3]))
 | 
			
		||||
        [(), (1,), (2,), (3,), (1, 2), (1, 3), (2, 3), (1, 2, 3)]
 | 
			
		||||
 | 
			
		||||
    :func:`powerset` will operate on iterables that aren't :class:`set`
 | 
			
		||||
    instances, so repeated elements in the input will produce repeated elements
 | 
			
		||||
    in the output. Use :func:`unique_everseen` on the input to avoid generating
 | 
			
		||||
    duplicates:
 | 
			
		||||
 | 
			
		||||
        >>> seq = [1, 1, 0]
 | 
			
		||||
        >>> list(powerset(seq))
 | 
			
		||||
        [(), (1,), (1,), (0,), (1, 1), (1, 0), (1, 0), (1, 1, 0)]
 | 
			
		||||
        >>> from more_itertools import unique_everseen
 | 
			
		||||
        >>> list(powerset(unique_everseen(seq)))
 | 
			
		||||
        [(), (1,), (0,), (1, 0)]
 | 
			
		||||
 | 
			
		||||
    """
 | 
			
		||||
    s = list(iterable)
 | 
			
		||||
    return chain.from_iterable(combinations(s, r) for r in range(len(s) + 1))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def unique_everseen(iterable, key=None):
 | 
			
		||||
    """
 | 
			
		||||
    Yield unique elements, preserving order.
 | 
			
		||||
 | 
			
		||||
        >>> list(unique_everseen('AAAABBBCCDAABBB'))
 | 
			
		||||
        ['A', 'B', 'C', 'D']
 | 
			
		||||
        >>> list(unique_everseen('ABBCcAD', str.lower))
 | 
			
		||||
        ['A', 'B', 'C', 'D']
 | 
			
		||||
 | 
			
		||||
    Sequences with a mix of hashable and unhashable items can be used.
 | 
			
		||||
    The function will be slower (i.e., `O(n^2)`) for unhashable items.
 | 
			
		||||
 | 
			
		||||
    Remember that ``list`` objects are unhashable - you can use the *key*
 | 
			
		||||
    parameter to transform the list to a tuple (which is hashable) to
 | 
			
		||||
    avoid a slowdown.
 | 
			
		||||
 | 
			
		||||
        >>> iterable = ([1, 2], [2, 3], [1, 2])
 | 
			
		||||
        >>> list(unique_everseen(iterable))  # Slow
 | 
			
		||||
        [[1, 2], [2, 3]]
 | 
			
		||||
        >>> list(unique_everseen(iterable, key=tuple))  # Faster
 | 
			
		||||
        [[1, 2], [2, 3]]
 | 
			
		||||
 | 
			
		||||
    Similary, you may want to convert unhashable ``set`` objects with
 | 
			
		||||
    ``key=frozenset``. For ``dict`` objects,
 | 
			
		||||
    ``key=lambda x: frozenset(x.items())`` can be used.
 | 
			
		||||
 | 
			
		||||
    """
 | 
			
		||||
    seenset = set()
 | 
			
		||||
    seenset_add = seenset.add
 | 
			
		||||
    seenlist = []
 | 
			
		||||
    seenlist_add = seenlist.append
 | 
			
		||||
    use_key = key is not None
 | 
			
		||||
 | 
			
		||||
    for element in iterable:
 | 
			
		||||
        k = key(element) if use_key else element
 | 
			
		||||
        try:
 | 
			
		||||
            if k not in seenset:
 | 
			
		||||
                seenset_add(k)
 | 
			
		||||
                yield element
 | 
			
		||||
        except TypeError:
 | 
			
		||||
            if k not in seenlist:
 | 
			
		||||
                seenlist_add(k)
 | 
			
		||||
                yield element
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def unique_justseen(iterable, key=None):
 | 
			
		||||
    """Yields elements in order, ignoring serial duplicates
 | 
			
		||||
 | 
			
		||||
    >>> list(unique_justseen('AAAABBBCCDAABBB'))
 | 
			
		||||
    ['A', 'B', 'C', 'D', 'A', 'B']
 | 
			
		||||
    >>> list(unique_justseen('ABBCcAD', str.lower))
 | 
			
		||||
    ['A', 'B', 'C', 'A', 'D']
 | 
			
		||||
 | 
			
		||||
    """
 | 
			
		||||
    return map(next, map(operator.itemgetter(1), groupby(iterable, key)))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def iter_except(func, exception, first=None):
 | 
			
		||||
    """Yields results from a function repeatedly until an exception is raised.
 | 
			
		||||
 | 
			
		||||
    Converts a call-until-exception interface to an iterator interface.
 | 
			
		||||
    Like ``iter(func, sentinel)``, but uses an exception instead of a sentinel
 | 
			
		||||
    to end the loop.
 | 
			
		||||
 | 
			
		||||
        >>> l = [0, 1, 2]
 | 
			
		||||
        >>> list(iter_except(l.pop, IndexError))
 | 
			
		||||
        [2, 1, 0]
 | 
			
		||||
 | 
			
		||||
    """
 | 
			
		||||
    try:
 | 
			
		||||
        if first is not None:
 | 
			
		||||
            yield first()
 | 
			
		||||
        while 1:
 | 
			
		||||
            yield func()
 | 
			
		||||
    except exception:
 | 
			
		||||
        pass
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def first_true(iterable, default=None, pred=None):
 | 
			
		||||
    """
 | 
			
		||||
    Returns the first true value in the iterable.
 | 
			
		||||
 | 
			
		||||
    If no true value is found, returns *default*
 | 
			
		||||
 | 
			
		||||
    If *pred* is not None, returns the first item for which
 | 
			
		||||
    ``pred(item) == True`` .
 | 
			
		||||
 | 
			
		||||
        >>> first_true(range(10))
 | 
			
		||||
        1
 | 
			
		||||
        >>> first_true(range(10), pred=lambda x: x > 5)
 | 
			
		||||
        6
 | 
			
		||||
        >>> first_true(range(10), default='missing', pred=lambda x: x > 9)
 | 
			
		||||
        'missing'
 | 
			
		||||
 | 
			
		||||
    """
 | 
			
		||||
    return next(filter(pred, iterable), default)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def random_product(*args, repeat=1):
 | 
			
		||||
    """Draw an item at random from each of the input iterables.
 | 
			
		||||
 | 
			
		||||
        >>> random_product('abc', range(4), 'XYZ')  # doctest:+SKIP
 | 
			
		||||
        ('c', 3, 'Z')
 | 
			
		||||
 | 
			
		||||
    If *repeat* is provided as a keyword argument, that many items will be
 | 
			
		||||
    drawn from each iterable.
 | 
			
		||||
 | 
			
		||||
        >>> random_product('abcd', range(4), repeat=2)  # doctest:+SKIP
 | 
			
		||||
        ('a', 2, 'd', 3)
 | 
			
		||||
 | 
			
		||||
    This equivalent to taking a random selection from
 | 
			
		||||
    ``itertools.product(*args, **kwarg)``.
 | 
			
		||||
 | 
			
		||||
    """
 | 
			
		||||
    pools = [tuple(pool) for pool in args] * repeat
 | 
			
		||||
    return tuple(choice(pool) for pool in pools)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def random_permutation(iterable, r=None):
 | 
			
		||||
    """Return a random *r* length permutation of the elements in *iterable*.
 | 
			
		||||
 | 
			
		||||
    If *r* is not specified or is ``None``, then *r* defaults to the length of
 | 
			
		||||
    *iterable*.
 | 
			
		||||
 | 
			
		||||
        >>> random_permutation(range(5))  # doctest:+SKIP
 | 
			
		||||
        (3, 4, 0, 1, 2)
 | 
			
		||||
 | 
			
		||||
    This equivalent to taking a random selection from
 | 
			
		||||
    ``itertools.permutations(iterable, r)``.
 | 
			
		||||
 | 
			
		||||
    """
 | 
			
		||||
    pool = tuple(iterable)
 | 
			
		||||
    r = len(pool) if r is None else r
 | 
			
		||||
    return tuple(sample(pool, r))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def random_combination(iterable, r):
 | 
			
		||||
    """Return a random *r* length subsequence of the elements in *iterable*.
 | 
			
		||||
 | 
			
		||||
        >>> random_combination(range(5), 3)  # doctest:+SKIP
 | 
			
		||||
        (2, 3, 4)
 | 
			
		||||
 | 
			
		||||
    This equivalent to taking a random selection from
 | 
			
		||||
    ``itertools.combinations(iterable, r)``.
 | 
			
		||||
 | 
			
		||||
    """
 | 
			
		||||
    pool = tuple(iterable)
 | 
			
		||||
    n = len(pool)
 | 
			
		||||
    indices = sorted(sample(range(n), r))
 | 
			
		||||
    return tuple(pool[i] for i in indices)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def random_combination_with_replacement(iterable, r):
 | 
			
		||||
    """Return a random *r* length subsequence of elements in *iterable*,
 | 
			
		||||
    allowing individual elements to be repeated.
 | 
			
		||||
 | 
			
		||||
        >>> random_combination_with_replacement(range(3), 5) # doctest:+SKIP
 | 
			
		||||
        (0, 0, 1, 2, 2)
 | 
			
		||||
 | 
			
		||||
    This equivalent to taking a random selection from
 | 
			
		||||
    ``itertools.combinations_with_replacement(iterable, r)``.
 | 
			
		||||
 | 
			
		||||
    """
 | 
			
		||||
    pool = tuple(iterable)
 | 
			
		||||
    n = len(pool)
 | 
			
		||||
    indices = sorted(randrange(n) for i in range(r))
 | 
			
		||||
    return tuple(pool[i] for i in indices)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def nth_combination(iterable, r, index):
 | 
			
		||||
    """Equivalent to ``list(combinations(iterable, r))[index]``.
 | 
			
		||||
 | 
			
		||||
    The subsequences of *iterable* that are of length *r* can be ordered
 | 
			
		||||
    lexicographically. :func:`nth_combination` computes the subsequence at
 | 
			
		||||
    sort position *index* directly, without computing the previous
 | 
			
		||||
    subsequences.
 | 
			
		||||
 | 
			
		||||
        >>> nth_combination(range(5), 3, 5)
 | 
			
		||||
        (0, 3, 4)
 | 
			
		||||
 | 
			
		||||
    ``ValueError`` will be raised If *r* is negative or greater than the length
 | 
			
		||||
    of *iterable*.
 | 
			
		||||
    ``IndexError`` will be raised if the given *index* is invalid.
 | 
			
		||||
    """
 | 
			
		||||
    pool = tuple(iterable)
 | 
			
		||||
    n = len(pool)
 | 
			
		||||
    if (r < 0) or (r > n):
 | 
			
		||||
        raise ValueError
 | 
			
		||||
 | 
			
		||||
    c = 1
 | 
			
		||||
    k = min(r, n - r)
 | 
			
		||||
    for i in range(1, k + 1):
 | 
			
		||||
        c = c * (n - k + i) // i
 | 
			
		||||
 | 
			
		||||
    if index < 0:
 | 
			
		||||
        index += c
 | 
			
		||||
 | 
			
		||||
    if (index < 0) or (index >= c):
 | 
			
		||||
        raise IndexError
 | 
			
		||||
 | 
			
		||||
    result = []
 | 
			
		||||
    while r:
 | 
			
		||||
        c, n, r = c * r // n, n - 1, r - 1
 | 
			
		||||
        while index >= c:
 | 
			
		||||
            index -= c
 | 
			
		||||
            c, n = c * (n - r) // n, n - 1
 | 
			
		||||
        result.append(pool[-1 - n])
 | 
			
		||||
 | 
			
		||||
    return tuple(result)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def prepend(value, iterator):
 | 
			
		||||
    """Yield *value*, followed by the elements in *iterator*.
 | 
			
		||||
 | 
			
		||||
        >>> value = '0'
 | 
			
		||||
        >>> iterator = ['1', '2', '3']
 | 
			
		||||
        >>> list(prepend(value, iterator))
 | 
			
		||||
        ['0', '1', '2', '3']
 | 
			
		||||
 | 
			
		||||
    To prepend multiple values, see :func:`itertools.chain`
 | 
			
		||||
    or :func:`value_chain`.
 | 
			
		||||
 | 
			
		||||
    """
 | 
			
		||||
    return chain([value], iterator)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def convolve(signal, kernel):
 | 
			
		||||
    """Convolve the iterable *signal* with the iterable *kernel*.
 | 
			
		||||
 | 
			
		||||
        >>> signal = (1, 2, 3, 4, 5)
 | 
			
		||||
        >>> kernel = [3, 2, 1]
 | 
			
		||||
        >>> list(convolve(signal, kernel))
 | 
			
		||||
        [3, 8, 14, 20, 26, 14, 5]
 | 
			
		||||
 | 
			
		||||
    Note: the input arguments are not interchangeable, as the *kernel*
 | 
			
		||||
    is immediately consumed and stored.
 | 
			
		||||
 | 
			
		||||
    """
 | 
			
		||||
    kernel = tuple(kernel)[::-1]
 | 
			
		||||
    n = len(kernel)
 | 
			
		||||
    window = deque([0], maxlen=n) * n
 | 
			
		||||
    for x in chain(signal, repeat(0, n - 1)):
 | 
			
		||||
        window.append(x)
 | 
			
		||||
        yield sum(map(operator.mul, kernel, window))
 | 
			
		||||
@@ -0,0 +1,488 @@
 | 
			
		||||
"""
 | 
			
		||||
An OrderedSet is a custom MutableSet that remembers its order, so that every
 | 
			
		||||
entry has an index that can be looked up.
 | 
			
		||||
 | 
			
		||||
Based on a recipe originally posted to ActiveState Recipes by Raymond Hettiger,
 | 
			
		||||
and released under the MIT license.
 | 
			
		||||
"""
 | 
			
		||||
import itertools as it
 | 
			
		||||
from collections import deque
 | 
			
		||||
 | 
			
		||||
try:
 | 
			
		||||
    # Python 3
 | 
			
		||||
    from collections.abc import MutableSet, Sequence
 | 
			
		||||
except ImportError:
 | 
			
		||||
    # Python 2.7
 | 
			
		||||
    from collections import MutableSet, Sequence
 | 
			
		||||
 | 
			
		||||
SLICE_ALL = slice(None)
 | 
			
		||||
__version__ = "3.1"
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def is_iterable(obj):
 | 
			
		||||
    """
 | 
			
		||||
    Are we being asked to look up a list of things, instead of a single thing?
 | 
			
		||||
    We check for the `__iter__` attribute so that this can cover types that
 | 
			
		||||
    don't have to be known by this module, such as NumPy arrays.
 | 
			
		||||
 | 
			
		||||
    Strings, however, should be considered as atomic values to look up, not
 | 
			
		||||
    iterables. The same goes for tuples, since they are immutable and therefore
 | 
			
		||||
    valid entries.
 | 
			
		||||
 | 
			
		||||
    We don't need to check for the Python 2 `unicode` type, because it doesn't
 | 
			
		||||
    have an `__iter__` attribute anyway.
 | 
			
		||||
    """
 | 
			
		||||
    return (
 | 
			
		||||
        hasattr(obj, "__iter__")
 | 
			
		||||
        and not isinstance(obj, str)
 | 
			
		||||
        and not isinstance(obj, tuple)
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class OrderedSet(MutableSet, Sequence):
 | 
			
		||||
    """
 | 
			
		||||
    An OrderedSet is a custom MutableSet that remembers its order, so that
 | 
			
		||||
    every entry has an index that can be looked up.
 | 
			
		||||
 | 
			
		||||
    Example:
 | 
			
		||||
        >>> OrderedSet([1, 1, 2, 3, 2])
 | 
			
		||||
        OrderedSet([1, 2, 3])
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    def __init__(self, iterable=None):
 | 
			
		||||
        self.items = []
 | 
			
		||||
        self.map = {}
 | 
			
		||||
        if iterable is not None:
 | 
			
		||||
            self |= iterable
 | 
			
		||||
 | 
			
		||||
    def __len__(self):
 | 
			
		||||
        """
 | 
			
		||||
        Returns the number of unique elements in the ordered set
 | 
			
		||||
 | 
			
		||||
        Example:
 | 
			
		||||
            >>> len(OrderedSet([]))
 | 
			
		||||
            0
 | 
			
		||||
            >>> len(OrderedSet([1, 2]))
 | 
			
		||||
            2
 | 
			
		||||
        """
 | 
			
		||||
        return len(self.items)
 | 
			
		||||
 | 
			
		||||
    def __getitem__(self, index):
 | 
			
		||||
        """
 | 
			
		||||
        Get the item at a given index.
 | 
			
		||||
 | 
			
		||||
        If `index` is a slice, you will get back that slice of items, as a
 | 
			
		||||
        new OrderedSet.
 | 
			
		||||
 | 
			
		||||
        If `index` is a list or a similar iterable, you'll get a list of
 | 
			
		||||
        items corresponding to those indices. This is similar to NumPy's
 | 
			
		||||
        "fancy indexing". The result is not an OrderedSet because you may ask
 | 
			
		||||
        for duplicate indices, and the number of elements returned should be
 | 
			
		||||
        the number of elements asked for.
 | 
			
		||||
 | 
			
		||||
        Example:
 | 
			
		||||
            >>> oset = OrderedSet([1, 2, 3])
 | 
			
		||||
            >>> oset[1]
 | 
			
		||||
            2
 | 
			
		||||
        """
 | 
			
		||||
        if isinstance(index, slice) and index == SLICE_ALL:
 | 
			
		||||
            return self.copy()
 | 
			
		||||
        elif is_iterable(index):
 | 
			
		||||
            return [self.items[i] for i in index]
 | 
			
		||||
        elif hasattr(index, "__index__") or isinstance(index, slice):
 | 
			
		||||
            result = self.items[index]
 | 
			
		||||
            if isinstance(result, list):
 | 
			
		||||
                return self.__class__(result)
 | 
			
		||||
            else:
 | 
			
		||||
                return result
 | 
			
		||||
        else:
 | 
			
		||||
            raise TypeError("Don't know how to index an OrderedSet by %r" % index)
 | 
			
		||||
 | 
			
		||||
    def copy(self):
 | 
			
		||||
        """
 | 
			
		||||
        Return a shallow copy of this object.
 | 
			
		||||
 | 
			
		||||
        Example:
 | 
			
		||||
            >>> this = OrderedSet([1, 2, 3])
 | 
			
		||||
            >>> other = this.copy()
 | 
			
		||||
            >>> this == other
 | 
			
		||||
            True
 | 
			
		||||
            >>> this is other
 | 
			
		||||
            False
 | 
			
		||||
        """
 | 
			
		||||
        return self.__class__(self)
 | 
			
		||||
 | 
			
		||||
    def __getstate__(self):
 | 
			
		||||
        if len(self) == 0:
 | 
			
		||||
            # The state can't be an empty list.
 | 
			
		||||
            # We need to return a truthy value, or else __setstate__ won't be run.
 | 
			
		||||
            #
 | 
			
		||||
            # This could have been done more gracefully by always putting the state
 | 
			
		||||
            # in a tuple, but this way is backwards- and forwards- compatible with
 | 
			
		||||
            # previous versions of OrderedSet.
 | 
			
		||||
            return (None,)
 | 
			
		||||
        else:
 | 
			
		||||
            return list(self)
 | 
			
		||||
 | 
			
		||||
    def __setstate__(self, state):
 | 
			
		||||
        if state == (None,):
 | 
			
		||||
            self.__init__([])
 | 
			
		||||
        else:
 | 
			
		||||
            self.__init__(state)
 | 
			
		||||
 | 
			
		||||
    def __contains__(self, key):
 | 
			
		||||
        """
 | 
			
		||||
        Test if the item is in this ordered set
 | 
			
		||||
 | 
			
		||||
        Example:
 | 
			
		||||
            >>> 1 in OrderedSet([1, 3, 2])
 | 
			
		||||
            True
 | 
			
		||||
            >>> 5 in OrderedSet([1, 3, 2])
 | 
			
		||||
            False
 | 
			
		||||
        """
 | 
			
		||||
        return key in self.map
 | 
			
		||||
 | 
			
		||||
    def add(self, key):
 | 
			
		||||
        """
 | 
			
		||||
        Add `key` as an item to this OrderedSet, then return its index.
 | 
			
		||||
 | 
			
		||||
        If `key` is already in the OrderedSet, return the index it already
 | 
			
		||||
        had.
 | 
			
		||||
 | 
			
		||||
        Example:
 | 
			
		||||
            >>> oset = OrderedSet()
 | 
			
		||||
            >>> oset.append(3)
 | 
			
		||||
            0
 | 
			
		||||
            >>> print(oset)
 | 
			
		||||
            OrderedSet([3])
 | 
			
		||||
        """
 | 
			
		||||
        if key not in self.map:
 | 
			
		||||
            self.map[key] = len(self.items)
 | 
			
		||||
            self.items.append(key)
 | 
			
		||||
        return self.map[key]
 | 
			
		||||
 | 
			
		||||
    append = add
 | 
			
		||||
 | 
			
		||||
    def update(self, sequence):
 | 
			
		||||
        """
 | 
			
		||||
        Update the set with the given iterable sequence, then return the index
 | 
			
		||||
        of the last element inserted.
 | 
			
		||||
 | 
			
		||||
        Example:
 | 
			
		||||
            >>> oset = OrderedSet([1, 2, 3])
 | 
			
		||||
            >>> oset.update([3, 1, 5, 1, 4])
 | 
			
		||||
            4
 | 
			
		||||
            >>> print(oset)
 | 
			
		||||
            OrderedSet([1, 2, 3, 5, 4])
 | 
			
		||||
        """
 | 
			
		||||
        item_index = None
 | 
			
		||||
        try:
 | 
			
		||||
            for item in sequence:
 | 
			
		||||
                item_index = self.add(item)
 | 
			
		||||
        except TypeError:
 | 
			
		||||
            raise ValueError(
 | 
			
		||||
                "Argument needs to be an iterable, got %s" % type(sequence)
 | 
			
		||||
            )
 | 
			
		||||
        return item_index
 | 
			
		||||
 | 
			
		||||
    def index(self, key):
 | 
			
		||||
        """
 | 
			
		||||
        Get the index of a given entry, raising an IndexError if it's not
 | 
			
		||||
        present.
 | 
			
		||||
 | 
			
		||||
        `key` can be an iterable of entries that is not a string, in which case
 | 
			
		||||
        this returns a list of indices.
 | 
			
		||||
 | 
			
		||||
        Example:
 | 
			
		||||
            >>> oset = OrderedSet([1, 2, 3])
 | 
			
		||||
            >>> oset.index(2)
 | 
			
		||||
            1
 | 
			
		||||
        """
 | 
			
		||||
        if is_iterable(key):
 | 
			
		||||
            return [self.index(subkey) for subkey in key]
 | 
			
		||||
        return self.map[key]
 | 
			
		||||
 | 
			
		||||
    # Provide some compatibility with pd.Index
 | 
			
		||||
    get_loc = index
 | 
			
		||||
    get_indexer = index
 | 
			
		||||
 | 
			
		||||
    def pop(self):
 | 
			
		||||
        """
 | 
			
		||||
        Remove and return the last element from the set.
 | 
			
		||||
 | 
			
		||||
        Raises KeyError if the set is empty.
 | 
			
		||||
 | 
			
		||||
        Example:
 | 
			
		||||
            >>> oset = OrderedSet([1, 2, 3])
 | 
			
		||||
            >>> oset.pop()
 | 
			
		||||
            3
 | 
			
		||||
        """
 | 
			
		||||
        if not self.items:
 | 
			
		||||
            raise KeyError("Set is empty")
 | 
			
		||||
 | 
			
		||||
        elem = self.items[-1]
 | 
			
		||||
        del self.items[-1]
 | 
			
		||||
        del self.map[elem]
 | 
			
		||||
        return elem
 | 
			
		||||
 | 
			
		||||
    def discard(self, key):
 | 
			
		||||
        """
 | 
			
		||||
        Remove an element.  Do not raise an exception if absent.
 | 
			
		||||
 | 
			
		||||
        The MutableSet mixin uses this to implement the .remove() method, which
 | 
			
		||||
        *does* raise an error when asked to remove a non-existent item.
 | 
			
		||||
 | 
			
		||||
        Example:
 | 
			
		||||
            >>> oset = OrderedSet([1, 2, 3])
 | 
			
		||||
            >>> oset.discard(2)
 | 
			
		||||
            >>> print(oset)
 | 
			
		||||
            OrderedSet([1, 3])
 | 
			
		||||
            >>> oset.discard(2)
 | 
			
		||||
            >>> print(oset)
 | 
			
		||||
            OrderedSet([1, 3])
 | 
			
		||||
        """
 | 
			
		||||
        if key in self:
 | 
			
		||||
            i = self.map[key]
 | 
			
		||||
            del self.items[i]
 | 
			
		||||
            del self.map[key]
 | 
			
		||||
            for k, v in self.map.items():
 | 
			
		||||
                if v >= i:
 | 
			
		||||
                    self.map[k] = v - 1
 | 
			
		||||
 | 
			
		||||
    def clear(self):
 | 
			
		||||
        """
 | 
			
		||||
        Remove all items from this OrderedSet.
 | 
			
		||||
        """
 | 
			
		||||
        del self.items[:]
 | 
			
		||||
        self.map.clear()
 | 
			
		||||
 | 
			
		||||
    def __iter__(self):
 | 
			
		||||
        """
 | 
			
		||||
        Example:
 | 
			
		||||
            >>> list(iter(OrderedSet([1, 2, 3])))
 | 
			
		||||
            [1, 2, 3]
 | 
			
		||||
        """
 | 
			
		||||
        return iter(self.items)
 | 
			
		||||
 | 
			
		||||
    def __reversed__(self):
 | 
			
		||||
        """
 | 
			
		||||
        Example:
 | 
			
		||||
            >>> list(reversed(OrderedSet([1, 2, 3])))
 | 
			
		||||
            [3, 2, 1]
 | 
			
		||||
        """
 | 
			
		||||
        return reversed(self.items)
 | 
			
		||||
 | 
			
		||||
    def __repr__(self):
 | 
			
		||||
        if not self:
 | 
			
		||||
            return "%s()" % (self.__class__.__name__,)
 | 
			
		||||
        return "%s(%r)" % (self.__class__.__name__, list(self))
 | 
			
		||||
 | 
			
		||||
    def __eq__(self, other):
 | 
			
		||||
        """
 | 
			
		||||
        Returns true if the containers have the same items. If `other` is a
 | 
			
		||||
        Sequence, then order is checked, otherwise it is ignored.
 | 
			
		||||
 | 
			
		||||
        Example:
 | 
			
		||||
            >>> oset = OrderedSet([1, 3, 2])
 | 
			
		||||
            >>> oset == [1, 3, 2]
 | 
			
		||||
            True
 | 
			
		||||
            >>> oset == [1, 2, 3]
 | 
			
		||||
            False
 | 
			
		||||
            >>> oset == [2, 3]
 | 
			
		||||
            False
 | 
			
		||||
            >>> oset == OrderedSet([3, 2, 1])
 | 
			
		||||
            False
 | 
			
		||||
        """
 | 
			
		||||
        # In Python 2 deque is not a Sequence, so treat it as one for
 | 
			
		||||
        # consistent behavior with Python 3.
 | 
			
		||||
        if isinstance(other, (Sequence, deque)):
 | 
			
		||||
            # Check that this OrderedSet contains the same elements, in the
 | 
			
		||||
            # same order, as the other object.
 | 
			
		||||
            return list(self) == list(other)
 | 
			
		||||
        try:
 | 
			
		||||
            other_as_set = set(other)
 | 
			
		||||
        except TypeError:
 | 
			
		||||
            # If `other` can't be converted into a set, it's not equal.
 | 
			
		||||
            return False
 | 
			
		||||
        else:
 | 
			
		||||
            return set(self) == other_as_set
 | 
			
		||||
 | 
			
		||||
    def union(self, *sets):
 | 
			
		||||
        """
 | 
			
		||||
        Combines all unique items.
 | 
			
		||||
        Each items order is defined by its first appearance.
 | 
			
		||||
 | 
			
		||||
        Example:
 | 
			
		||||
            >>> oset = OrderedSet.union(OrderedSet([3, 1, 4, 1, 5]), [1, 3], [2, 0])
 | 
			
		||||
            >>> print(oset)
 | 
			
		||||
            OrderedSet([3, 1, 4, 5, 2, 0])
 | 
			
		||||
            >>> oset.union([8, 9])
 | 
			
		||||
            OrderedSet([3, 1, 4, 5, 2, 0, 8, 9])
 | 
			
		||||
            >>> oset | {10}
 | 
			
		||||
            OrderedSet([3, 1, 4, 5, 2, 0, 10])
 | 
			
		||||
        """
 | 
			
		||||
        cls = self.__class__ if isinstance(self, OrderedSet) else OrderedSet
 | 
			
		||||
        containers = map(list, it.chain([self], sets))
 | 
			
		||||
        items = it.chain.from_iterable(containers)
 | 
			
		||||
        return cls(items)
 | 
			
		||||
 | 
			
		||||
    def __and__(self, other):
 | 
			
		||||
        # the parent implementation of this is backwards
 | 
			
		||||
        return self.intersection(other)
 | 
			
		||||
 | 
			
		||||
    def intersection(self, *sets):
 | 
			
		||||
        """
 | 
			
		||||
        Returns elements in common between all sets. Order is defined only
 | 
			
		||||
        by the first set.
 | 
			
		||||
 | 
			
		||||
        Example:
 | 
			
		||||
            >>> oset = OrderedSet.intersection(OrderedSet([0, 1, 2, 3]), [1, 2, 3])
 | 
			
		||||
            >>> print(oset)
 | 
			
		||||
            OrderedSet([1, 2, 3])
 | 
			
		||||
            >>> oset.intersection([2, 4, 5], [1, 2, 3, 4])
 | 
			
		||||
            OrderedSet([2])
 | 
			
		||||
            >>> oset.intersection()
 | 
			
		||||
            OrderedSet([1, 2, 3])
 | 
			
		||||
        """
 | 
			
		||||
        cls = self.__class__ if isinstance(self, OrderedSet) else OrderedSet
 | 
			
		||||
        if sets:
 | 
			
		||||
            common = set.intersection(*map(set, sets))
 | 
			
		||||
            items = (item for item in self if item in common)
 | 
			
		||||
        else:
 | 
			
		||||
            items = self
 | 
			
		||||
        return cls(items)
 | 
			
		||||
 | 
			
		||||
    def difference(self, *sets):
 | 
			
		||||
        """
 | 
			
		||||
        Returns all elements that are in this set but not the others.
 | 
			
		||||
 | 
			
		||||
        Example:
 | 
			
		||||
            >>> OrderedSet([1, 2, 3]).difference(OrderedSet([2]))
 | 
			
		||||
            OrderedSet([1, 3])
 | 
			
		||||
            >>> OrderedSet([1, 2, 3]).difference(OrderedSet([2]), OrderedSet([3]))
 | 
			
		||||
            OrderedSet([1])
 | 
			
		||||
            >>> OrderedSet([1, 2, 3]) - OrderedSet([2])
 | 
			
		||||
            OrderedSet([1, 3])
 | 
			
		||||
            >>> OrderedSet([1, 2, 3]).difference()
 | 
			
		||||
            OrderedSet([1, 2, 3])
 | 
			
		||||
        """
 | 
			
		||||
        cls = self.__class__
 | 
			
		||||
        if sets:
 | 
			
		||||
            other = set.union(*map(set, sets))
 | 
			
		||||
            items = (item for item in self if item not in other)
 | 
			
		||||
        else:
 | 
			
		||||
            items = self
 | 
			
		||||
        return cls(items)
 | 
			
		||||
 | 
			
		||||
    def issubset(self, other):
 | 
			
		||||
        """
 | 
			
		||||
        Report whether another set contains this set.
 | 
			
		||||
 | 
			
		||||
        Example:
 | 
			
		||||
            >>> OrderedSet([1, 2, 3]).issubset({1, 2})
 | 
			
		||||
            False
 | 
			
		||||
            >>> OrderedSet([1, 2, 3]).issubset({1, 2, 3, 4})
 | 
			
		||||
            True
 | 
			
		||||
            >>> OrderedSet([1, 2, 3]).issubset({1, 4, 3, 5})
 | 
			
		||||
            False
 | 
			
		||||
        """
 | 
			
		||||
        if len(self) > len(other):  # Fast check for obvious cases
 | 
			
		||||
            return False
 | 
			
		||||
        return all(item in other for item in self)
 | 
			
		||||
 | 
			
		||||
    def issuperset(self, other):
 | 
			
		||||
        """
 | 
			
		||||
        Report whether this set contains another set.
 | 
			
		||||
 | 
			
		||||
        Example:
 | 
			
		||||
            >>> OrderedSet([1, 2]).issuperset([1, 2, 3])
 | 
			
		||||
            False
 | 
			
		||||
            >>> OrderedSet([1, 2, 3, 4]).issuperset({1, 2, 3})
 | 
			
		||||
            True
 | 
			
		||||
            >>> OrderedSet([1, 4, 3, 5]).issuperset({1, 2, 3})
 | 
			
		||||
            False
 | 
			
		||||
        """
 | 
			
		||||
        if len(self) < len(other):  # Fast check for obvious cases
 | 
			
		||||
            return False
 | 
			
		||||
        return all(item in self for item in other)
 | 
			
		||||
 | 
			
		||||
    def symmetric_difference(self, other):
 | 
			
		||||
        """
 | 
			
		||||
        Return the symmetric difference of two OrderedSets as a new set.
 | 
			
		||||
        That is, the new set will contain all elements that are in exactly
 | 
			
		||||
        one of the sets.
 | 
			
		||||
 | 
			
		||||
        Their order will be preserved, with elements from `self` preceding
 | 
			
		||||
        elements from `other`.
 | 
			
		||||
 | 
			
		||||
        Example:
 | 
			
		||||
            >>> this = OrderedSet([1, 4, 3, 5, 7])
 | 
			
		||||
            >>> other = OrderedSet([9, 7, 1, 3, 2])
 | 
			
		||||
            >>> this.symmetric_difference(other)
 | 
			
		||||
            OrderedSet([4, 5, 9, 2])
 | 
			
		||||
        """
 | 
			
		||||
        cls = self.__class__ if isinstance(self, OrderedSet) else OrderedSet
 | 
			
		||||
        diff1 = cls(self).difference(other)
 | 
			
		||||
        diff2 = cls(other).difference(self)
 | 
			
		||||
        return diff1.union(diff2)
 | 
			
		||||
 | 
			
		||||
    def _update_items(self, items):
 | 
			
		||||
        """
 | 
			
		||||
        Replace the 'items' list of this OrderedSet with a new one, updating
 | 
			
		||||
        self.map accordingly.
 | 
			
		||||
        """
 | 
			
		||||
        self.items = items
 | 
			
		||||
        self.map = {item: idx for (idx, item) in enumerate(items)}
 | 
			
		||||
 | 
			
		||||
    def difference_update(self, *sets):
 | 
			
		||||
        """
 | 
			
		||||
        Update this OrderedSet to remove items from one or more other sets.
 | 
			
		||||
 | 
			
		||||
        Example:
 | 
			
		||||
            >>> this = OrderedSet([1, 2, 3])
 | 
			
		||||
            >>> this.difference_update(OrderedSet([2, 4]))
 | 
			
		||||
            >>> print(this)
 | 
			
		||||
            OrderedSet([1, 3])
 | 
			
		||||
 | 
			
		||||
            >>> this = OrderedSet([1, 2, 3, 4, 5])
 | 
			
		||||
            >>> this.difference_update(OrderedSet([2, 4]), OrderedSet([1, 4, 6]))
 | 
			
		||||
            >>> print(this)
 | 
			
		||||
            OrderedSet([3, 5])
 | 
			
		||||
        """
 | 
			
		||||
        items_to_remove = set()
 | 
			
		||||
        for other in sets:
 | 
			
		||||
            items_to_remove |= set(other)
 | 
			
		||||
        self._update_items([item for item in self.items if item not in items_to_remove])
 | 
			
		||||
 | 
			
		||||
    def intersection_update(self, other):
 | 
			
		||||
        """
 | 
			
		||||
        Update this OrderedSet to keep only items in another set, preserving
 | 
			
		||||
        their order in this set.
 | 
			
		||||
 | 
			
		||||
        Example:
 | 
			
		||||
            >>> this = OrderedSet([1, 4, 3, 5, 7])
 | 
			
		||||
            >>> other = OrderedSet([9, 7, 1, 3, 2])
 | 
			
		||||
            >>> this.intersection_update(other)
 | 
			
		||||
            >>> print(this)
 | 
			
		||||
            OrderedSet([1, 3, 7])
 | 
			
		||||
        """
 | 
			
		||||
        other = set(other)
 | 
			
		||||
        self._update_items([item for item in self.items if item in other])
 | 
			
		||||
 | 
			
		||||
    def symmetric_difference_update(self, other):
 | 
			
		||||
        """
 | 
			
		||||
        Update this OrderedSet to remove items from another set, then
 | 
			
		||||
        add items from the other set that were not present in this set.
 | 
			
		||||
 | 
			
		||||
        Example:
 | 
			
		||||
            >>> this = OrderedSet([1, 4, 3, 5, 7])
 | 
			
		||||
            >>> other = OrderedSet([9, 7, 1, 3, 2])
 | 
			
		||||
            >>> this.symmetric_difference_update(other)
 | 
			
		||||
            >>> print(this)
 | 
			
		||||
            OrderedSet([4, 5, 9, 2])
 | 
			
		||||
        """
 | 
			
		||||
        items_to_add = [item for item in other if item not in self]
 | 
			
		||||
        items_to_remove = set(other)
 | 
			
		||||
        self._update_items(
 | 
			
		||||
            [item for item in self.items if item not in items_to_remove] + items_to_add
 | 
			
		||||
        )
 | 
			
		||||
@@ -0,0 +1,26 @@
 | 
			
		||||
# This file is dual licensed under the terms of the Apache License, Version
 | 
			
		||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
 | 
			
		||||
# for complete details.
 | 
			
		||||
 | 
			
		||||
__all__ = [
 | 
			
		||||
    "__title__",
 | 
			
		||||
    "__summary__",
 | 
			
		||||
    "__uri__",
 | 
			
		||||
    "__version__",
 | 
			
		||||
    "__author__",
 | 
			
		||||
    "__email__",
 | 
			
		||||
    "__license__",
 | 
			
		||||
    "__copyright__",
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
__title__ = "packaging"
 | 
			
		||||
__summary__ = "Core utilities for Python packages"
 | 
			
		||||
__uri__ = "https://github.com/pypa/packaging"
 | 
			
		||||
 | 
			
		||||
__version__ = "21.3"
 | 
			
		||||
 | 
			
		||||
__author__ = "Donald Stufft and individual contributors"
 | 
			
		||||
__email__ = "donald@stufft.io"
 | 
			
		||||
 | 
			
		||||
__license__ = "BSD-2-Clause or Apache-2.0"
 | 
			
		||||
__copyright__ = "2014-2019 %s" % __author__
 | 
			
		||||
@@ -0,0 +1,25 @@
 | 
			
		||||
# This file is dual licensed under the terms of the Apache License, Version
 | 
			
		||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
 | 
			
		||||
# for complete details.
 | 
			
		||||
 | 
			
		||||
from .__about__ import (
 | 
			
		||||
    __author__,
 | 
			
		||||
    __copyright__,
 | 
			
		||||
    __email__,
 | 
			
		||||
    __license__,
 | 
			
		||||
    __summary__,
 | 
			
		||||
    __title__,
 | 
			
		||||
    __uri__,
 | 
			
		||||
    __version__,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
__all__ = [
 | 
			
		||||
    "__title__",
 | 
			
		||||
    "__summary__",
 | 
			
		||||
    "__uri__",
 | 
			
		||||
    "__version__",
 | 
			
		||||
    "__author__",
 | 
			
		||||
    "__email__",
 | 
			
		||||
    "__license__",
 | 
			
		||||
    "__copyright__",
 | 
			
		||||
]
 | 
			
		||||
@@ -0,0 +1,301 @@
 | 
			
		||||
import collections
 | 
			
		||||
import functools
 | 
			
		||||
import os
 | 
			
		||||
import re
 | 
			
		||||
import struct
 | 
			
		||||
import sys
 | 
			
		||||
import warnings
 | 
			
		||||
from typing import IO, Dict, Iterator, NamedTuple, Optional, Tuple
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Python does not provide platform information at sufficient granularity to
 | 
			
		||||
# identify the architecture of the running executable in some cases, so we
 | 
			
		||||
# determine it dynamically by reading the information from the running
 | 
			
		||||
# process. This only applies on Linux, which uses the ELF format.
 | 
			
		||||
class _ELFFileHeader:
 | 
			
		||||
    # https://en.wikipedia.org/wiki/Executable_and_Linkable_Format#File_header
 | 
			
		||||
    class _InvalidELFFileHeader(ValueError):
 | 
			
		||||
        """
 | 
			
		||||
        An invalid ELF file header was found.
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
    ELF_MAGIC_NUMBER = 0x7F454C46
 | 
			
		||||
    ELFCLASS32 = 1
 | 
			
		||||
    ELFCLASS64 = 2
 | 
			
		||||
    ELFDATA2LSB = 1
 | 
			
		||||
    ELFDATA2MSB = 2
 | 
			
		||||
    EM_386 = 3
 | 
			
		||||
    EM_S390 = 22
 | 
			
		||||
    EM_ARM = 40
 | 
			
		||||
    EM_X86_64 = 62
 | 
			
		||||
    EF_ARM_ABIMASK = 0xFF000000
 | 
			
		||||
    EF_ARM_ABI_VER5 = 0x05000000
 | 
			
		||||
    EF_ARM_ABI_FLOAT_HARD = 0x00000400
 | 
			
		||||
 | 
			
		||||
    def __init__(self, file: IO[bytes]) -> None:
 | 
			
		||||
        def unpack(fmt: str) -> int:
 | 
			
		||||
            try:
 | 
			
		||||
                data = file.read(struct.calcsize(fmt))
 | 
			
		||||
                result: Tuple[int, ...] = struct.unpack(fmt, data)
 | 
			
		||||
            except struct.error:
 | 
			
		||||
                raise _ELFFileHeader._InvalidELFFileHeader()
 | 
			
		||||
            return result[0]
 | 
			
		||||
 | 
			
		||||
        self.e_ident_magic = unpack(">I")
 | 
			
		||||
        if self.e_ident_magic != self.ELF_MAGIC_NUMBER:
 | 
			
		||||
            raise _ELFFileHeader._InvalidELFFileHeader()
 | 
			
		||||
        self.e_ident_class = unpack("B")
 | 
			
		||||
        if self.e_ident_class not in {self.ELFCLASS32, self.ELFCLASS64}:
 | 
			
		||||
            raise _ELFFileHeader._InvalidELFFileHeader()
 | 
			
		||||
        self.e_ident_data = unpack("B")
 | 
			
		||||
        if self.e_ident_data not in {self.ELFDATA2LSB, self.ELFDATA2MSB}:
 | 
			
		||||
            raise _ELFFileHeader._InvalidELFFileHeader()
 | 
			
		||||
        self.e_ident_version = unpack("B")
 | 
			
		||||
        self.e_ident_osabi = unpack("B")
 | 
			
		||||
        self.e_ident_abiversion = unpack("B")
 | 
			
		||||
        self.e_ident_pad = file.read(7)
 | 
			
		||||
        format_h = "<H" if self.e_ident_data == self.ELFDATA2LSB else ">H"
 | 
			
		||||
        format_i = "<I" if self.e_ident_data == self.ELFDATA2LSB else ">I"
 | 
			
		||||
        format_q = "<Q" if self.e_ident_data == self.ELFDATA2LSB else ">Q"
 | 
			
		||||
        format_p = format_i if self.e_ident_class == self.ELFCLASS32 else format_q
 | 
			
		||||
        self.e_type = unpack(format_h)
 | 
			
		||||
        self.e_machine = unpack(format_h)
 | 
			
		||||
        self.e_version = unpack(format_i)
 | 
			
		||||
        self.e_entry = unpack(format_p)
 | 
			
		||||
        self.e_phoff = unpack(format_p)
 | 
			
		||||
        self.e_shoff = unpack(format_p)
 | 
			
		||||
        self.e_flags = unpack(format_i)
 | 
			
		||||
        self.e_ehsize = unpack(format_h)
 | 
			
		||||
        self.e_phentsize = unpack(format_h)
 | 
			
		||||
        self.e_phnum = unpack(format_h)
 | 
			
		||||
        self.e_shentsize = unpack(format_h)
 | 
			
		||||
        self.e_shnum = unpack(format_h)
 | 
			
		||||
        self.e_shstrndx = unpack(format_h)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _get_elf_header() -> Optional[_ELFFileHeader]:
 | 
			
		||||
    try:
 | 
			
		||||
        with open(sys.executable, "rb") as f:
 | 
			
		||||
            elf_header = _ELFFileHeader(f)
 | 
			
		||||
    except (OSError, TypeError, _ELFFileHeader._InvalidELFFileHeader):
 | 
			
		||||
        return None
 | 
			
		||||
    return elf_header
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _is_linux_armhf() -> bool:
 | 
			
		||||
    # hard-float ABI can be detected from the ELF header of the running
 | 
			
		||||
    # process
 | 
			
		||||
    # https://static.docs.arm.com/ihi0044/g/aaelf32.pdf
 | 
			
		||||
    elf_header = _get_elf_header()
 | 
			
		||||
    if elf_header is None:
 | 
			
		||||
        return False
 | 
			
		||||
    result = elf_header.e_ident_class == elf_header.ELFCLASS32
 | 
			
		||||
    result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB
 | 
			
		||||
    result &= elf_header.e_machine == elf_header.EM_ARM
 | 
			
		||||
    result &= (
 | 
			
		||||
        elf_header.e_flags & elf_header.EF_ARM_ABIMASK
 | 
			
		||||
    ) == elf_header.EF_ARM_ABI_VER5
 | 
			
		||||
    result &= (
 | 
			
		||||
        elf_header.e_flags & elf_header.EF_ARM_ABI_FLOAT_HARD
 | 
			
		||||
    ) == elf_header.EF_ARM_ABI_FLOAT_HARD
 | 
			
		||||
    return result
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _is_linux_i686() -> bool:
 | 
			
		||||
    elf_header = _get_elf_header()
 | 
			
		||||
    if elf_header is None:
 | 
			
		||||
        return False
 | 
			
		||||
    result = elf_header.e_ident_class == elf_header.ELFCLASS32
 | 
			
		||||
    result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB
 | 
			
		||||
    result &= elf_header.e_machine == elf_header.EM_386
 | 
			
		||||
    return result
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _have_compatible_abi(arch: str) -> bool:
 | 
			
		||||
    if arch == "armv7l":
 | 
			
		||||
        return _is_linux_armhf()
 | 
			
		||||
    if arch == "i686":
 | 
			
		||||
        return _is_linux_i686()
 | 
			
		||||
    return arch in {"x86_64", "aarch64", "ppc64", "ppc64le", "s390x"}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# If glibc ever changes its major version, we need to know what the last
 | 
			
		||||
# minor version was, so we can build the complete list of all versions.
 | 
			
		||||
# For now, guess what the highest minor version might be, assume it will
 | 
			
		||||
# be 50 for testing. Once this actually happens, update the dictionary
 | 
			
		||||
# with the actual value.
 | 
			
		||||
_LAST_GLIBC_MINOR: Dict[int, int] = collections.defaultdict(lambda: 50)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class _GLibCVersion(NamedTuple):
 | 
			
		||||
    major: int
 | 
			
		||||
    minor: int
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _glibc_version_string_confstr() -> Optional[str]:
 | 
			
		||||
    """
 | 
			
		||||
    Primary implementation of glibc_version_string using os.confstr.
 | 
			
		||||
    """
 | 
			
		||||
    # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely
 | 
			
		||||
    # to be broken or missing. This strategy is used in the standard library
 | 
			
		||||
    # platform module.
 | 
			
		||||
    # https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183
 | 
			
		||||
    try:
 | 
			
		||||
        # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17".
 | 
			
		||||
        version_string = os.confstr("CS_GNU_LIBC_VERSION")
 | 
			
		||||
        assert version_string is not None
 | 
			
		||||
        _, version = version_string.split()
 | 
			
		||||
    except (AssertionError, AttributeError, OSError, ValueError):
 | 
			
		||||
        # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...
 | 
			
		||||
        return None
 | 
			
		||||
    return version
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _glibc_version_string_ctypes() -> Optional[str]:
 | 
			
		||||
    """
 | 
			
		||||
    Fallback implementation of glibc_version_string using ctypes.
 | 
			
		||||
    """
 | 
			
		||||
    try:
 | 
			
		||||
        import ctypes
 | 
			
		||||
    except ImportError:
 | 
			
		||||
        return None
 | 
			
		||||
 | 
			
		||||
    # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
 | 
			
		||||
    # manpage says, "If filename is NULL, then the returned handle is for the
 | 
			
		||||
    # main program". This way we can let the linker do the work to figure out
 | 
			
		||||
    # which libc our process is actually using.
 | 
			
		||||
    #
 | 
			
		||||
    # We must also handle the special case where the executable is not a
 | 
			
		||||
    # dynamically linked executable. This can occur when using musl libc,
 | 
			
		||||
    # for example. In this situation, dlopen() will error, leading to an
 | 
			
		||||
    # OSError. Interestingly, at least in the case of musl, there is no
 | 
			
		||||
    # errno set on the OSError. The single string argument used to construct
 | 
			
		||||
    # OSError comes from libc itself and is therefore not portable to
 | 
			
		||||
    # hard code here. In any case, failure to call dlopen() means we
 | 
			
		||||
    # can proceed, so we bail on our attempt.
 | 
			
		||||
    try:
 | 
			
		||||
        process_namespace = ctypes.CDLL(None)
 | 
			
		||||
    except OSError:
 | 
			
		||||
        return None
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        gnu_get_libc_version = process_namespace.gnu_get_libc_version
 | 
			
		||||
    except AttributeError:
 | 
			
		||||
        # Symbol doesn't exist -> therefore, we are not linked to
 | 
			
		||||
        # glibc.
 | 
			
		||||
        return None
 | 
			
		||||
 | 
			
		||||
    # Call gnu_get_libc_version, which returns a string like "2.5"
 | 
			
		||||
    gnu_get_libc_version.restype = ctypes.c_char_p
 | 
			
		||||
    version_str: str = gnu_get_libc_version()
 | 
			
		||||
    # py2 / py3 compatibility:
 | 
			
		||||
    if not isinstance(version_str, str):
 | 
			
		||||
        version_str = version_str.decode("ascii")
 | 
			
		||||
 | 
			
		||||
    return version_str
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _glibc_version_string() -> Optional[str]:
 | 
			
		||||
    """Returns glibc version string, or None if not using glibc."""
 | 
			
		||||
    return _glibc_version_string_confstr() or _glibc_version_string_ctypes()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _parse_glibc_version(version_str: str) -> Tuple[int, int]:
 | 
			
		||||
    """Parse glibc version.
 | 
			
		||||
 | 
			
		||||
    We use a regexp instead of str.split because we want to discard any
 | 
			
		||||
    random junk that might come after the minor version -- this might happen
 | 
			
		||||
    in patched/forked versions of glibc (e.g. Linaro's version of glibc
 | 
			
		||||
    uses version strings like "2.20-2014.11"). See gh-3588.
 | 
			
		||||
    """
 | 
			
		||||
    m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)
 | 
			
		||||
    if not m:
 | 
			
		||||
        warnings.warn(
 | 
			
		||||
            "Expected glibc version with 2 components major.minor,"
 | 
			
		||||
            " got: %s" % version_str,
 | 
			
		||||
            RuntimeWarning,
 | 
			
		||||
        )
 | 
			
		||||
        return -1, -1
 | 
			
		||||
    return int(m.group("major")), int(m.group("minor"))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@functools.lru_cache()
 | 
			
		||||
def _get_glibc_version() -> Tuple[int, int]:
 | 
			
		||||
    version_str = _glibc_version_string()
 | 
			
		||||
    if version_str is None:
 | 
			
		||||
        return (-1, -1)
 | 
			
		||||
    return _parse_glibc_version(version_str)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# From PEP 513, PEP 600
 | 
			
		||||
def _is_compatible(name: str, arch: str, version: _GLibCVersion) -> bool:
 | 
			
		||||
    sys_glibc = _get_glibc_version()
 | 
			
		||||
    if sys_glibc < version:
 | 
			
		||||
        return False
 | 
			
		||||
    # Check for presence of _manylinux module.
 | 
			
		||||
    try:
 | 
			
		||||
        import _manylinux  # noqa
 | 
			
		||||
    except ImportError:
 | 
			
		||||
        return True
 | 
			
		||||
    if hasattr(_manylinux, "manylinux_compatible"):
 | 
			
		||||
        result = _manylinux.manylinux_compatible(version[0], version[1], arch)
 | 
			
		||||
        if result is not None:
 | 
			
		||||
            return bool(result)
 | 
			
		||||
        return True
 | 
			
		||||
    if version == _GLibCVersion(2, 5):
 | 
			
		||||
        if hasattr(_manylinux, "manylinux1_compatible"):
 | 
			
		||||
            return bool(_manylinux.manylinux1_compatible)
 | 
			
		||||
    if version == _GLibCVersion(2, 12):
 | 
			
		||||
        if hasattr(_manylinux, "manylinux2010_compatible"):
 | 
			
		||||
            return bool(_manylinux.manylinux2010_compatible)
 | 
			
		||||
    if version == _GLibCVersion(2, 17):
 | 
			
		||||
        if hasattr(_manylinux, "manylinux2014_compatible"):
 | 
			
		||||
            return bool(_manylinux.manylinux2014_compatible)
 | 
			
		||||
    return True
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
_LEGACY_MANYLINUX_MAP = {
 | 
			
		||||
    # CentOS 7 w/ glibc 2.17 (PEP 599)
 | 
			
		||||
    (2, 17): "manylinux2014",
 | 
			
		||||
    # CentOS 6 w/ glibc 2.12 (PEP 571)
 | 
			
		||||
    (2, 12): "manylinux2010",
 | 
			
		||||
    # CentOS 5 w/ glibc 2.5 (PEP 513)
 | 
			
		||||
    (2, 5): "manylinux1",
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def platform_tags(linux: str, arch: str) -> Iterator[str]:
 | 
			
		||||
    if not _have_compatible_abi(arch):
 | 
			
		||||
        return
 | 
			
		||||
    # Oldest glibc to be supported regardless of architecture is (2, 17).
 | 
			
		||||
    too_old_glibc2 = _GLibCVersion(2, 16)
 | 
			
		||||
    if arch in {"x86_64", "i686"}:
 | 
			
		||||
        # On x86/i686 also oldest glibc to be supported is (2, 5).
 | 
			
		||||
        too_old_glibc2 = _GLibCVersion(2, 4)
 | 
			
		||||
    current_glibc = _GLibCVersion(*_get_glibc_version())
 | 
			
		||||
    glibc_max_list = [current_glibc]
 | 
			
		||||
    # We can assume compatibility across glibc major versions.
 | 
			
		||||
    # https://sourceware.org/bugzilla/show_bug.cgi?id=24636
 | 
			
		||||
    #
 | 
			
		||||
    # Build a list of maximum glibc versions so that we can
 | 
			
		||||
    # output the canonical list of all glibc from current_glibc
 | 
			
		||||
    # down to too_old_glibc2, including all intermediary versions.
 | 
			
		||||
    for glibc_major in range(current_glibc.major - 1, 1, -1):
 | 
			
		||||
        glibc_minor = _LAST_GLIBC_MINOR[glibc_major]
 | 
			
		||||
        glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor))
 | 
			
		||||
    for glibc_max in glibc_max_list:
 | 
			
		||||
        if glibc_max.major == too_old_glibc2.major:
 | 
			
		||||
            min_minor = too_old_glibc2.minor
 | 
			
		||||
        else:
 | 
			
		||||
            # For other glibc major versions oldest supported is (x, 0).
 | 
			
		||||
            min_minor = -1
 | 
			
		||||
        for glibc_minor in range(glibc_max.minor, min_minor, -1):
 | 
			
		||||
            glibc_version = _GLibCVersion(glibc_max.major, glibc_minor)
 | 
			
		||||
            tag = "manylinux_{}_{}".format(*glibc_version)
 | 
			
		||||
            if _is_compatible(tag, arch, glibc_version):
 | 
			
		||||
                yield linux.replace("linux", tag)
 | 
			
		||||
            # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags.
 | 
			
		||||
            if glibc_version in _LEGACY_MANYLINUX_MAP:
 | 
			
		||||
                legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version]
 | 
			
		||||
                if _is_compatible(legacy_tag, arch, glibc_version):
 | 
			
		||||
                    yield linux.replace("linux", legacy_tag)
 | 
			
		||||
@@ -0,0 +1,136 @@
 | 
			
		||||
"""PEP 656 support.
 | 
			
		||||
 | 
			
		||||
This module implements logic to detect if the currently running Python is
 | 
			
		||||
linked against musl, and what musl version is used.
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
import contextlib
 | 
			
		||||
import functools
 | 
			
		||||
import operator
 | 
			
		||||
import os
 | 
			
		||||
import re
 | 
			
		||||
import struct
 | 
			
		||||
import subprocess
 | 
			
		||||
import sys
 | 
			
		||||
from typing import IO, Iterator, NamedTuple, Optional, Tuple
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _read_unpacked(f: IO[bytes], fmt: str) -> Tuple[int, ...]:
 | 
			
		||||
    return struct.unpack(fmt, f.read(struct.calcsize(fmt)))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _parse_ld_musl_from_elf(f: IO[bytes]) -> Optional[str]:
 | 
			
		||||
    """Detect musl libc location by parsing the Python executable.
 | 
			
		||||
 | 
			
		||||
    Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca
 | 
			
		||||
    ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html
 | 
			
		||||
    """
 | 
			
		||||
    f.seek(0)
 | 
			
		||||
    try:
 | 
			
		||||
        ident = _read_unpacked(f, "16B")
 | 
			
		||||
    except struct.error:
 | 
			
		||||
        return None
 | 
			
		||||
    if ident[:4] != tuple(b"\x7fELF"):  # Invalid magic, not ELF.
 | 
			
		||||
        return None
 | 
			
		||||
    f.seek(struct.calcsize("HHI"), 1)  # Skip file type, machine, and version.
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        # e_fmt: Format for program header.
 | 
			
		||||
        # p_fmt: Format for section header.
 | 
			
		||||
        # p_idx: Indexes to find p_type, p_offset, and p_filesz.
 | 
			
		||||
        e_fmt, p_fmt, p_idx = {
 | 
			
		||||
            1: ("IIIIHHH", "IIIIIIII", (0, 1, 4)),  # 32-bit.
 | 
			
		||||
            2: ("QQQIHHH", "IIQQQQQQ", (0, 2, 5)),  # 64-bit.
 | 
			
		||||
        }[ident[4]]
 | 
			
		||||
    except KeyError:
 | 
			
		||||
        return None
 | 
			
		||||
    else:
 | 
			
		||||
        p_get = operator.itemgetter(*p_idx)
 | 
			
		||||
 | 
			
		||||
    # Find the interpreter section and return its content.
 | 
			
		||||
    try:
 | 
			
		||||
        _, e_phoff, _, _, _, e_phentsize, e_phnum = _read_unpacked(f, e_fmt)
 | 
			
		||||
    except struct.error:
 | 
			
		||||
        return None
 | 
			
		||||
    for i in range(e_phnum + 1):
 | 
			
		||||
        f.seek(e_phoff + e_phentsize * i)
 | 
			
		||||
        try:
 | 
			
		||||
            p_type, p_offset, p_filesz = p_get(_read_unpacked(f, p_fmt))
 | 
			
		||||
        except struct.error:
 | 
			
		||||
            return None
 | 
			
		||||
        if p_type != 3:  # Not PT_INTERP.
 | 
			
		||||
            continue
 | 
			
		||||
        f.seek(p_offset)
 | 
			
		||||
        interpreter = os.fsdecode(f.read(p_filesz)).strip("\0")
 | 
			
		||||
        if "musl" not in interpreter:
 | 
			
		||||
            return None
 | 
			
		||||
        return interpreter
 | 
			
		||||
    return None
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class _MuslVersion(NamedTuple):
 | 
			
		||||
    major: int
 | 
			
		||||
    minor: int
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _parse_musl_version(output: str) -> Optional[_MuslVersion]:
 | 
			
		||||
    lines = [n for n in (n.strip() for n in output.splitlines()) if n]
 | 
			
		||||
    if len(lines) < 2 or lines[0][:4] != "musl":
 | 
			
		||||
        return None
 | 
			
		||||
    m = re.match(r"Version (\d+)\.(\d+)", lines[1])
 | 
			
		||||
    if not m:
 | 
			
		||||
        return None
 | 
			
		||||
    return _MuslVersion(major=int(m.group(1)), minor=int(m.group(2)))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@functools.lru_cache()
 | 
			
		||||
def _get_musl_version(executable: str) -> Optional[_MuslVersion]:
 | 
			
		||||
    """Detect currently-running musl runtime version.
 | 
			
		||||
 | 
			
		||||
    This is done by checking the specified executable's dynamic linking
 | 
			
		||||
    information, and invoking the loader to parse its output for a version
 | 
			
		||||
    string. If the loader is musl, the output would be something like::
 | 
			
		||||
 | 
			
		||||
        musl libc (x86_64)
 | 
			
		||||
        Version 1.2.2
 | 
			
		||||
        Dynamic Program Loader
 | 
			
		||||
    """
 | 
			
		||||
    with contextlib.ExitStack() as stack:
 | 
			
		||||
        try:
 | 
			
		||||
            f = stack.enter_context(open(executable, "rb"))
 | 
			
		||||
        except OSError:
 | 
			
		||||
            return None
 | 
			
		||||
        ld = _parse_ld_musl_from_elf(f)
 | 
			
		||||
    if not ld:
 | 
			
		||||
        return None
 | 
			
		||||
    proc = subprocess.run([ld], stderr=subprocess.PIPE, universal_newlines=True)
 | 
			
		||||
    return _parse_musl_version(proc.stderr)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def platform_tags(arch: str) -> Iterator[str]:
 | 
			
		||||
    """Generate musllinux tags compatible to the current platform.
 | 
			
		||||
 | 
			
		||||
    :param arch: Should be the part of platform tag after the ``linux_``
 | 
			
		||||
        prefix, e.g. ``x86_64``. The ``linux_`` prefix is assumed as a
 | 
			
		||||
        prerequisite for the current platform to be musllinux-compatible.
 | 
			
		||||
 | 
			
		||||
    :returns: An iterator of compatible musllinux tags.
 | 
			
		||||
    """
 | 
			
		||||
    sys_musl = _get_musl_version(sys.executable)
 | 
			
		||||
    if sys_musl is None:  # Python not dynamically linked against musl.
 | 
			
		||||
        return
 | 
			
		||||
    for minor in range(sys_musl.minor, -1, -1):
 | 
			
		||||
        yield f"musllinux_{sys_musl.major}_{minor}_{arch}"
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
if __name__ == "__main__":  # pragma: no cover
 | 
			
		||||
    import sysconfig
 | 
			
		||||
 | 
			
		||||
    plat = sysconfig.get_platform()
 | 
			
		||||
    assert plat.startswith("linux-"), "not linux"
 | 
			
		||||
 | 
			
		||||
    print("plat:", plat)
 | 
			
		||||
    print("musl:", _get_musl_version(sys.executable))
 | 
			
		||||
    print("tags:", end=" ")
 | 
			
		||||
    for t in platform_tags(re.sub(r"[.-]", "_", plat.split("-", 1)[-1])):
 | 
			
		||||
        print(t, end="\n      ")
 | 
			
		||||
@@ -0,0 +1,61 @@
 | 
			
		||||
# This file is dual licensed under the terms of the Apache License, Version
 | 
			
		||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
 | 
			
		||||
# for complete details.
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class InfinityType:
 | 
			
		||||
    def __repr__(self) -> str:
 | 
			
		||||
        return "Infinity"
 | 
			
		||||
 | 
			
		||||
    def __hash__(self) -> int:
 | 
			
		||||
        return hash(repr(self))
 | 
			
		||||
 | 
			
		||||
    def __lt__(self, other: object) -> bool:
 | 
			
		||||
        return False
 | 
			
		||||
 | 
			
		||||
    def __le__(self, other: object) -> bool:
 | 
			
		||||
        return False
 | 
			
		||||
 | 
			
		||||
    def __eq__(self, other: object) -> bool:
 | 
			
		||||
        return isinstance(other, self.__class__)
 | 
			
		||||
 | 
			
		||||
    def __gt__(self, other: object) -> bool:
 | 
			
		||||
        return True
 | 
			
		||||
 | 
			
		||||
    def __ge__(self, other: object) -> bool:
 | 
			
		||||
        return True
 | 
			
		||||
 | 
			
		||||
    def __neg__(self: object) -> "NegativeInfinityType":
 | 
			
		||||
        return NegativeInfinity
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
Infinity = InfinityType()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class NegativeInfinityType:
 | 
			
		||||
    def __repr__(self) -> str:
 | 
			
		||||
        return "-Infinity"
 | 
			
		||||
 | 
			
		||||
    def __hash__(self) -> int:
 | 
			
		||||
        return hash(repr(self))
 | 
			
		||||
 | 
			
		||||
    def __lt__(self, other: object) -> bool:
 | 
			
		||||
        return True
 | 
			
		||||
 | 
			
		||||
    def __le__(self, other: object) -> bool:
 | 
			
		||||
        return True
 | 
			
		||||
 | 
			
		||||
    def __eq__(self, other: object) -> bool:
 | 
			
		||||
        return isinstance(other, self.__class__)
 | 
			
		||||
 | 
			
		||||
    def __gt__(self, other: object) -> bool:
 | 
			
		||||
        return False
 | 
			
		||||
 | 
			
		||||
    def __ge__(self, other: object) -> bool:
 | 
			
		||||
        return False
 | 
			
		||||
 | 
			
		||||
    def __neg__(self: object) -> InfinityType:
 | 
			
		||||
        return Infinity
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
NegativeInfinity = NegativeInfinityType()
 | 
			
		||||
@@ -0,0 +1,304 @@
 | 
			
		||||
# This file is dual licensed under the terms of the Apache License, Version
 | 
			
		||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
 | 
			
		||||
# for complete details.
 | 
			
		||||
 | 
			
		||||
import operator
 | 
			
		||||
import os
 | 
			
		||||
import platform
 | 
			
		||||
import sys
 | 
			
		||||
from typing import Any, Callable, Dict, List, Optional, Tuple, Union
 | 
			
		||||
 | 
			
		||||
from setuptools.extern.pyparsing import (  # noqa: N817
 | 
			
		||||
    Forward,
 | 
			
		||||
    Group,
 | 
			
		||||
    Literal as L,
 | 
			
		||||
    ParseException,
 | 
			
		||||
    ParseResults,
 | 
			
		||||
    QuotedString,
 | 
			
		||||
    ZeroOrMore,
 | 
			
		||||
    stringEnd,
 | 
			
		||||
    stringStart,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
from .specifiers import InvalidSpecifier, Specifier
 | 
			
		||||
 | 
			
		||||
__all__ = [
 | 
			
		||||
    "InvalidMarker",
 | 
			
		||||
    "UndefinedComparison",
 | 
			
		||||
    "UndefinedEnvironmentName",
 | 
			
		||||
    "Marker",
 | 
			
		||||
    "default_environment",
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
Operator = Callable[[str, str], bool]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class InvalidMarker(ValueError):
 | 
			
		||||
    """
 | 
			
		||||
    An invalid marker was found, users should refer to PEP 508.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class UndefinedComparison(ValueError):
 | 
			
		||||
    """
 | 
			
		||||
    An invalid operation was attempted on a value that doesn't support it.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class UndefinedEnvironmentName(ValueError):
 | 
			
		||||
    """
 | 
			
		||||
    A name was attempted to be used that does not exist inside of the
 | 
			
		||||
    environment.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Node:
 | 
			
		||||
    def __init__(self, value: Any) -> None:
 | 
			
		||||
        self.value = value
 | 
			
		||||
 | 
			
		||||
    def __str__(self) -> str:
 | 
			
		||||
        return str(self.value)
 | 
			
		||||
 | 
			
		||||
    def __repr__(self) -> str:
 | 
			
		||||
        return f"<{self.__class__.__name__}('{self}')>"
 | 
			
		||||
 | 
			
		||||
    def serialize(self) -> str:
 | 
			
		||||
        raise NotImplementedError
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Variable(Node):
 | 
			
		||||
    def serialize(self) -> str:
 | 
			
		||||
        return str(self)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Value(Node):
 | 
			
		||||
    def serialize(self) -> str:
 | 
			
		||||
        return f'"{self}"'
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Op(Node):
 | 
			
		||||
    def serialize(self) -> str:
 | 
			
		||||
        return str(self)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
VARIABLE = (
 | 
			
		||||
    L("implementation_version")
 | 
			
		||||
    | L("platform_python_implementation")
 | 
			
		||||
    | L("implementation_name")
 | 
			
		||||
    | L("python_full_version")
 | 
			
		||||
    | L("platform_release")
 | 
			
		||||
    | L("platform_version")
 | 
			
		||||
    | L("platform_machine")
 | 
			
		||||
    | L("platform_system")
 | 
			
		||||
    | L("python_version")
 | 
			
		||||
    | L("sys_platform")
 | 
			
		||||
    | L("os_name")
 | 
			
		||||
    | L("os.name")  # PEP-345
 | 
			
		||||
    | L("sys.platform")  # PEP-345
 | 
			
		||||
    | L("platform.version")  # PEP-345
 | 
			
		||||
    | L("platform.machine")  # PEP-345
 | 
			
		||||
    | L("platform.python_implementation")  # PEP-345
 | 
			
		||||
    | L("python_implementation")  # undocumented setuptools legacy
 | 
			
		||||
    | L("extra")  # PEP-508
 | 
			
		||||
)
 | 
			
		||||
ALIASES = {
 | 
			
		||||
    "os.name": "os_name",
 | 
			
		||||
    "sys.platform": "sys_platform",
 | 
			
		||||
    "platform.version": "platform_version",
 | 
			
		||||
    "platform.machine": "platform_machine",
 | 
			
		||||
    "platform.python_implementation": "platform_python_implementation",
 | 
			
		||||
    "python_implementation": "platform_python_implementation",
 | 
			
		||||
}
 | 
			
		||||
VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0])))
 | 
			
		||||
 | 
			
		||||
VERSION_CMP = (
 | 
			
		||||
    L("===") | L("==") | L(">=") | L("<=") | L("!=") | L("~=") | L(">") | L("<")
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
MARKER_OP = VERSION_CMP | L("not in") | L("in")
 | 
			
		||||
MARKER_OP.setParseAction(lambda s, l, t: Op(t[0]))
 | 
			
		||||
 | 
			
		||||
MARKER_VALUE = QuotedString("'") | QuotedString('"')
 | 
			
		||||
MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0]))
 | 
			
		||||
 | 
			
		||||
BOOLOP = L("and") | L("or")
 | 
			
		||||
 | 
			
		||||
MARKER_VAR = VARIABLE | MARKER_VALUE
 | 
			
		||||
 | 
			
		||||
MARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR)
 | 
			
		||||
MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0]))
 | 
			
		||||
 | 
			
		||||
LPAREN = L("(").suppress()
 | 
			
		||||
RPAREN = L(")").suppress()
 | 
			
		||||
 | 
			
		||||
MARKER_EXPR = Forward()
 | 
			
		||||
MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN)
 | 
			
		||||
MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR)
 | 
			
		||||
 | 
			
		||||
MARKER = stringStart + MARKER_EXPR + stringEnd
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _coerce_parse_result(results: Union[ParseResults, List[Any]]) -> List[Any]:
 | 
			
		||||
    if isinstance(results, ParseResults):
 | 
			
		||||
        return [_coerce_parse_result(i) for i in results]
 | 
			
		||||
    else:
 | 
			
		||||
        return results
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _format_marker(
 | 
			
		||||
    marker: Union[List[str], Tuple[Node, ...], str], first: Optional[bool] = True
 | 
			
		||||
) -> str:
 | 
			
		||||
 | 
			
		||||
    assert isinstance(marker, (list, tuple, str))
 | 
			
		||||
 | 
			
		||||
    # Sometimes we have a structure like [[...]] which is a single item list
 | 
			
		||||
    # where the single item is itself it's own list. In that case we want skip
 | 
			
		||||
    # the rest of this function so that we don't get extraneous () on the
 | 
			
		||||
    # outside.
 | 
			
		||||
    if (
 | 
			
		||||
        isinstance(marker, list)
 | 
			
		||||
        and len(marker) == 1
 | 
			
		||||
        and isinstance(marker[0], (list, tuple))
 | 
			
		||||
    ):
 | 
			
		||||
        return _format_marker(marker[0])
 | 
			
		||||
 | 
			
		||||
    if isinstance(marker, list):
 | 
			
		||||
        inner = (_format_marker(m, first=False) for m in marker)
 | 
			
		||||
        if first:
 | 
			
		||||
            return " ".join(inner)
 | 
			
		||||
        else:
 | 
			
		||||
            return "(" + " ".join(inner) + ")"
 | 
			
		||||
    elif isinstance(marker, tuple):
 | 
			
		||||
        return " ".join([m.serialize() for m in marker])
 | 
			
		||||
    else:
 | 
			
		||||
        return marker
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
_operators: Dict[str, Operator] = {
 | 
			
		||||
    "in": lambda lhs, rhs: lhs in rhs,
 | 
			
		||||
    "not in": lambda lhs, rhs: lhs not in rhs,
 | 
			
		||||
    "<": operator.lt,
 | 
			
		||||
    "<=": operator.le,
 | 
			
		||||
    "==": operator.eq,
 | 
			
		||||
    "!=": operator.ne,
 | 
			
		||||
    ">=": operator.ge,
 | 
			
		||||
    ">": operator.gt,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _eval_op(lhs: str, op: Op, rhs: str) -> bool:
 | 
			
		||||
    try:
 | 
			
		||||
        spec = Specifier("".join([op.serialize(), rhs]))
 | 
			
		||||
    except InvalidSpecifier:
 | 
			
		||||
        pass
 | 
			
		||||
    else:
 | 
			
		||||
        return spec.contains(lhs)
 | 
			
		||||
 | 
			
		||||
    oper: Optional[Operator] = _operators.get(op.serialize())
 | 
			
		||||
    if oper is None:
 | 
			
		||||
        raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.")
 | 
			
		||||
 | 
			
		||||
    return oper(lhs, rhs)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Undefined:
 | 
			
		||||
    pass
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
_undefined = Undefined()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _get_env(environment: Dict[str, str], name: str) -> str:
 | 
			
		||||
    value: Union[str, Undefined] = environment.get(name, _undefined)
 | 
			
		||||
 | 
			
		||||
    if isinstance(value, Undefined):
 | 
			
		||||
        raise UndefinedEnvironmentName(
 | 
			
		||||
            f"{name!r} does not exist in evaluation environment."
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    return value
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _evaluate_markers(markers: List[Any], environment: Dict[str, str]) -> bool:
 | 
			
		||||
    groups: List[List[bool]] = [[]]
 | 
			
		||||
 | 
			
		||||
    for marker in markers:
 | 
			
		||||
        assert isinstance(marker, (list, tuple, str))
 | 
			
		||||
 | 
			
		||||
        if isinstance(marker, list):
 | 
			
		||||
            groups[-1].append(_evaluate_markers(marker, environment))
 | 
			
		||||
        elif isinstance(marker, tuple):
 | 
			
		||||
            lhs, op, rhs = marker
 | 
			
		||||
 | 
			
		||||
            if isinstance(lhs, Variable):
 | 
			
		||||
                lhs_value = _get_env(environment, lhs.value)
 | 
			
		||||
                rhs_value = rhs.value
 | 
			
		||||
            else:
 | 
			
		||||
                lhs_value = lhs.value
 | 
			
		||||
                rhs_value = _get_env(environment, rhs.value)
 | 
			
		||||
 | 
			
		||||
            groups[-1].append(_eval_op(lhs_value, op, rhs_value))
 | 
			
		||||
        else:
 | 
			
		||||
            assert marker in ["and", "or"]
 | 
			
		||||
            if marker == "or":
 | 
			
		||||
                groups.append([])
 | 
			
		||||
 | 
			
		||||
    return any(all(item) for item in groups)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def format_full_version(info: "sys._version_info") -> str:
 | 
			
		||||
    version = "{0.major}.{0.minor}.{0.micro}".format(info)
 | 
			
		||||
    kind = info.releaselevel
 | 
			
		||||
    if kind != "final":
 | 
			
		||||
        version += kind[0] + str(info.serial)
 | 
			
		||||
    return version
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def default_environment() -> Dict[str, str]:
 | 
			
		||||
    iver = format_full_version(sys.implementation.version)
 | 
			
		||||
    implementation_name = sys.implementation.name
 | 
			
		||||
    return {
 | 
			
		||||
        "implementation_name": implementation_name,
 | 
			
		||||
        "implementation_version": iver,
 | 
			
		||||
        "os_name": os.name,
 | 
			
		||||
        "platform_machine": platform.machine(),
 | 
			
		||||
        "platform_release": platform.release(),
 | 
			
		||||
        "platform_system": platform.system(),
 | 
			
		||||
        "platform_version": platform.version(),
 | 
			
		||||
        "python_full_version": platform.python_version(),
 | 
			
		||||
        "platform_python_implementation": platform.python_implementation(),
 | 
			
		||||
        "python_version": ".".join(platform.python_version_tuple()[:2]),
 | 
			
		||||
        "sys_platform": sys.platform,
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Marker:
 | 
			
		||||
    def __init__(self, marker: str) -> None:
 | 
			
		||||
        try:
 | 
			
		||||
            self._markers = _coerce_parse_result(MARKER.parseString(marker))
 | 
			
		||||
        except ParseException as e:
 | 
			
		||||
            raise InvalidMarker(
 | 
			
		||||
                f"Invalid marker: {marker!r}, parse error at "
 | 
			
		||||
                f"{marker[e.loc : e.loc + 8]!r}"
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
    def __str__(self) -> str:
 | 
			
		||||
        return _format_marker(self._markers)
 | 
			
		||||
 | 
			
		||||
    def __repr__(self) -> str:
 | 
			
		||||
        return f"<Marker('{self}')>"
 | 
			
		||||
 | 
			
		||||
    def evaluate(self, environment: Optional[Dict[str, str]] = None) -> bool:
 | 
			
		||||
        """Evaluate a marker.
 | 
			
		||||
 | 
			
		||||
        Return the boolean from evaluating the given marker against the
 | 
			
		||||
        environment. environment is an optional argument to override all or
 | 
			
		||||
        part of the determined environment.
 | 
			
		||||
 | 
			
		||||
        The environment is determined from the current Python process.
 | 
			
		||||
        """
 | 
			
		||||
        current_environment = default_environment()
 | 
			
		||||
        if environment is not None:
 | 
			
		||||
            current_environment.update(environment)
 | 
			
		||||
 | 
			
		||||
        return _evaluate_markers(self._markers, current_environment)
 | 
			
		||||
@@ -0,0 +1,146 @@
 | 
			
		||||
# This file is dual licensed under the terms of the Apache License, Version
 | 
			
		||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
 | 
			
		||||
# for complete details.
 | 
			
		||||
 | 
			
		||||
import re
 | 
			
		||||
import string
 | 
			
		||||
import urllib.parse
 | 
			
		||||
from typing import List, Optional as TOptional, Set
 | 
			
		||||
 | 
			
		||||
from setuptools.extern.pyparsing import (  # noqa
 | 
			
		||||
    Combine,
 | 
			
		||||
    Literal as L,
 | 
			
		||||
    Optional,
 | 
			
		||||
    ParseException,
 | 
			
		||||
    Regex,
 | 
			
		||||
    Word,
 | 
			
		||||
    ZeroOrMore,
 | 
			
		||||
    originalTextFor,
 | 
			
		||||
    stringEnd,
 | 
			
		||||
    stringStart,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
from .markers import MARKER_EXPR, Marker
 | 
			
		||||
from .specifiers import LegacySpecifier, Specifier, SpecifierSet
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class InvalidRequirement(ValueError):
 | 
			
		||||
    """
 | 
			
		||||
    An invalid requirement was found, users should refer to PEP 508.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
ALPHANUM = Word(string.ascii_letters + string.digits)
 | 
			
		||||
 | 
			
		||||
LBRACKET = L("[").suppress()
 | 
			
		||||
RBRACKET = L("]").suppress()
 | 
			
		||||
LPAREN = L("(").suppress()
 | 
			
		||||
RPAREN = L(")").suppress()
 | 
			
		||||
COMMA = L(",").suppress()
 | 
			
		||||
SEMICOLON = L(";").suppress()
 | 
			
		||||
AT = L("@").suppress()
 | 
			
		||||
 | 
			
		||||
PUNCTUATION = Word("-_.")
 | 
			
		||||
IDENTIFIER_END = ALPHANUM | (ZeroOrMore(PUNCTUATION) + ALPHANUM)
 | 
			
		||||
IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END))
 | 
			
		||||
 | 
			
		||||
NAME = IDENTIFIER("name")
 | 
			
		||||
EXTRA = IDENTIFIER
 | 
			
		||||
 | 
			
		||||
URI = Regex(r"[^ ]+")("url")
 | 
			
		||||
URL = AT + URI
 | 
			
		||||
 | 
			
		||||
EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA)
 | 
			
		||||
EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras")
 | 
			
		||||
 | 
			
		||||
VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE)
 | 
			
		||||
VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE)
 | 
			
		||||
 | 
			
		||||
VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY
 | 
			
		||||
VERSION_MANY = Combine(
 | 
			
		||||
    VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE), joinString=",", adjacent=False
 | 
			
		||||
)("_raw_spec")
 | 
			
		||||
_VERSION_SPEC = Optional((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY)
 | 
			
		||||
_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or "")
 | 
			
		||||
 | 
			
		||||
VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier")
 | 
			
		||||
VERSION_SPEC.setParseAction(lambda s, l, t: t[1])
 | 
			
		||||
 | 
			
		||||
MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker")
 | 
			
		||||
MARKER_EXPR.setParseAction(
 | 
			
		||||
    lambda s, l, t: Marker(s[t._original_start : t._original_end])
 | 
			
		||||
)
 | 
			
		||||
MARKER_SEPARATOR = SEMICOLON
 | 
			
		||||
MARKER = MARKER_SEPARATOR + MARKER_EXPR
 | 
			
		||||
 | 
			
		||||
VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER)
 | 
			
		||||
URL_AND_MARKER = URL + Optional(MARKER)
 | 
			
		||||
 | 
			
		||||
NAMED_REQUIREMENT = NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER)
 | 
			
		||||
 | 
			
		||||
REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd
 | 
			
		||||
# setuptools.extern.pyparsing isn't thread safe during initialization, so we do it eagerly, see
 | 
			
		||||
# issue #104
 | 
			
		||||
REQUIREMENT.parseString("x[]")
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Requirement:
 | 
			
		||||
    """Parse a requirement.
 | 
			
		||||
 | 
			
		||||
    Parse a given requirement string into its parts, such as name, specifier,
 | 
			
		||||
    URL, and extras. Raises InvalidRequirement on a badly-formed requirement
 | 
			
		||||
    string.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    # TODO: Can we test whether something is contained within a requirement?
 | 
			
		||||
    #       If so how do we do that? Do we need to test against the _name_ of
 | 
			
		||||
    #       the thing as well as the version? What about the markers?
 | 
			
		||||
    # TODO: Can we normalize the name and extra name?
 | 
			
		||||
 | 
			
		||||
    def __init__(self, requirement_string: str) -> None:
 | 
			
		||||
        try:
 | 
			
		||||
            req = REQUIREMENT.parseString(requirement_string)
 | 
			
		||||
        except ParseException as e:
 | 
			
		||||
            raise InvalidRequirement(
 | 
			
		||||
                f'Parse error at "{ requirement_string[e.loc : e.loc + 8]!r}": {e.msg}'
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        self.name: str = req.name
 | 
			
		||||
        if req.url:
 | 
			
		||||
            parsed_url = urllib.parse.urlparse(req.url)
 | 
			
		||||
            if parsed_url.scheme == "file":
 | 
			
		||||
                if urllib.parse.urlunparse(parsed_url) != req.url:
 | 
			
		||||
                    raise InvalidRequirement("Invalid URL given")
 | 
			
		||||
            elif not (parsed_url.scheme and parsed_url.netloc) or (
 | 
			
		||||
                not parsed_url.scheme and not parsed_url.netloc
 | 
			
		||||
            ):
 | 
			
		||||
                raise InvalidRequirement(f"Invalid URL: {req.url}")
 | 
			
		||||
            self.url: TOptional[str] = req.url
 | 
			
		||||
        else:
 | 
			
		||||
            self.url = None
 | 
			
		||||
        self.extras: Set[str] = set(req.extras.asList() if req.extras else [])
 | 
			
		||||
        self.specifier: SpecifierSet = SpecifierSet(req.specifier)
 | 
			
		||||
        self.marker: TOptional[Marker] = req.marker if req.marker else None
 | 
			
		||||
 | 
			
		||||
    def __str__(self) -> str:
 | 
			
		||||
        parts: List[str] = [self.name]
 | 
			
		||||
 | 
			
		||||
        if self.extras:
 | 
			
		||||
            formatted_extras = ",".join(sorted(self.extras))
 | 
			
		||||
            parts.append(f"[{formatted_extras}]")
 | 
			
		||||
 | 
			
		||||
        if self.specifier:
 | 
			
		||||
            parts.append(str(self.specifier))
 | 
			
		||||
 | 
			
		||||
        if self.url:
 | 
			
		||||
            parts.append(f"@ {self.url}")
 | 
			
		||||
            if self.marker:
 | 
			
		||||
                parts.append(" ")
 | 
			
		||||
 | 
			
		||||
        if self.marker:
 | 
			
		||||
            parts.append(f"; {self.marker}")
 | 
			
		||||
 | 
			
		||||
        return "".join(parts)
 | 
			
		||||
 | 
			
		||||
    def __repr__(self) -> str:
 | 
			
		||||
        return f"<Requirement('{self}')>"
 | 
			
		||||
@@ -0,0 +1,802 @@
 | 
			
		||||
# This file is dual licensed under the terms of the Apache License, Version
 | 
			
		||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
 | 
			
		||||
# for complete details.
 | 
			
		||||
 | 
			
		||||
import abc
 | 
			
		||||
import functools
 | 
			
		||||
import itertools
 | 
			
		||||
import re
 | 
			
		||||
import warnings
 | 
			
		||||
from typing import (
 | 
			
		||||
    Callable,
 | 
			
		||||
    Dict,
 | 
			
		||||
    Iterable,
 | 
			
		||||
    Iterator,
 | 
			
		||||
    List,
 | 
			
		||||
    Optional,
 | 
			
		||||
    Pattern,
 | 
			
		||||
    Set,
 | 
			
		||||
    Tuple,
 | 
			
		||||
    TypeVar,
 | 
			
		||||
    Union,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
from .utils import canonicalize_version
 | 
			
		||||
from .version import LegacyVersion, Version, parse
 | 
			
		||||
 | 
			
		||||
ParsedVersion = Union[Version, LegacyVersion]
 | 
			
		||||
UnparsedVersion = Union[Version, LegacyVersion, str]
 | 
			
		||||
VersionTypeVar = TypeVar("VersionTypeVar", bound=UnparsedVersion)
 | 
			
		||||
CallableOperator = Callable[[ParsedVersion, str], bool]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class InvalidSpecifier(ValueError):
 | 
			
		||||
    """
 | 
			
		||||
    An invalid specifier was found, users should refer to PEP 440.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class BaseSpecifier(metaclass=abc.ABCMeta):
 | 
			
		||||
    @abc.abstractmethod
 | 
			
		||||
    def __str__(self) -> str:
 | 
			
		||||
        """
 | 
			
		||||
        Returns the str representation of this Specifier like object. This
 | 
			
		||||
        should be representative of the Specifier itself.
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
    @abc.abstractmethod
 | 
			
		||||
    def __hash__(self) -> int:
 | 
			
		||||
        """
 | 
			
		||||
        Returns a hash value for this Specifier like object.
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
    @abc.abstractmethod
 | 
			
		||||
    def __eq__(self, other: object) -> bool:
 | 
			
		||||
        """
 | 
			
		||||
        Returns a boolean representing whether or not the two Specifier like
 | 
			
		||||
        objects are equal.
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
    @abc.abstractproperty
 | 
			
		||||
    def prereleases(self) -> Optional[bool]:
 | 
			
		||||
        """
 | 
			
		||||
        Returns whether or not pre-releases as a whole are allowed by this
 | 
			
		||||
        specifier.
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
    @prereleases.setter
 | 
			
		||||
    def prereleases(self, value: bool) -> None:
 | 
			
		||||
        """
 | 
			
		||||
        Sets whether or not pre-releases as a whole are allowed by this
 | 
			
		||||
        specifier.
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
    @abc.abstractmethod
 | 
			
		||||
    def contains(self, item: str, prereleases: Optional[bool] = None) -> bool:
 | 
			
		||||
        """
 | 
			
		||||
        Determines if the given item is contained within this specifier.
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
    @abc.abstractmethod
 | 
			
		||||
    def filter(
 | 
			
		||||
        self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None
 | 
			
		||||
    ) -> Iterable[VersionTypeVar]:
 | 
			
		||||
        """
 | 
			
		||||
        Takes an iterable of items and filters them so that only items which
 | 
			
		||||
        are contained within this specifier are allowed in it.
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class _IndividualSpecifier(BaseSpecifier):
 | 
			
		||||
 | 
			
		||||
    _operators: Dict[str, str] = {}
 | 
			
		||||
    _regex: Pattern[str]
 | 
			
		||||
 | 
			
		||||
    def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None:
 | 
			
		||||
        match = self._regex.search(spec)
 | 
			
		||||
        if not match:
 | 
			
		||||
            raise InvalidSpecifier(f"Invalid specifier: '{spec}'")
 | 
			
		||||
 | 
			
		||||
        self._spec: Tuple[str, str] = (
 | 
			
		||||
            match.group("operator").strip(),
 | 
			
		||||
            match.group("version").strip(),
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        # Store whether or not this Specifier should accept prereleases
 | 
			
		||||
        self._prereleases = prereleases
 | 
			
		||||
 | 
			
		||||
    def __repr__(self) -> str:
 | 
			
		||||
        pre = (
 | 
			
		||||
            f", prereleases={self.prereleases!r}"
 | 
			
		||||
            if self._prereleases is not None
 | 
			
		||||
            else ""
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        return f"<{self.__class__.__name__}({str(self)!r}{pre})>"
 | 
			
		||||
 | 
			
		||||
    def __str__(self) -> str:
 | 
			
		||||
        return "{}{}".format(*self._spec)
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def _canonical_spec(self) -> Tuple[str, str]:
 | 
			
		||||
        return self._spec[0], canonicalize_version(self._spec[1])
 | 
			
		||||
 | 
			
		||||
    def __hash__(self) -> int:
 | 
			
		||||
        return hash(self._canonical_spec)
 | 
			
		||||
 | 
			
		||||
    def __eq__(self, other: object) -> bool:
 | 
			
		||||
        if isinstance(other, str):
 | 
			
		||||
            try:
 | 
			
		||||
                other = self.__class__(str(other))
 | 
			
		||||
            except InvalidSpecifier:
 | 
			
		||||
                return NotImplemented
 | 
			
		||||
        elif not isinstance(other, self.__class__):
 | 
			
		||||
            return NotImplemented
 | 
			
		||||
 | 
			
		||||
        return self._canonical_spec == other._canonical_spec
 | 
			
		||||
 | 
			
		||||
    def _get_operator(self, op: str) -> CallableOperator:
 | 
			
		||||
        operator_callable: CallableOperator = getattr(
 | 
			
		||||
            self, f"_compare_{self._operators[op]}"
 | 
			
		||||
        )
 | 
			
		||||
        return operator_callable
 | 
			
		||||
 | 
			
		||||
    def _coerce_version(self, version: UnparsedVersion) -> ParsedVersion:
 | 
			
		||||
        if not isinstance(version, (LegacyVersion, Version)):
 | 
			
		||||
            version = parse(version)
 | 
			
		||||
        return version
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def operator(self) -> str:
 | 
			
		||||
        return self._spec[0]
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def version(self) -> str:
 | 
			
		||||
        return self._spec[1]
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def prereleases(self) -> Optional[bool]:
 | 
			
		||||
        return self._prereleases
 | 
			
		||||
 | 
			
		||||
    @prereleases.setter
 | 
			
		||||
    def prereleases(self, value: bool) -> None:
 | 
			
		||||
        self._prereleases = value
 | 
			
		||||
 | 
			
		||||
    def __contains__(self, item: str) -> bool:
 | 
			
		||||
        return self.contains(item)
 | 
			
		||||
 | 
			
		||||
    def contains(
 | 
			
		||||
        self, item: UnparsedVersion, prereleases: Optional[bool] = None
 | 
			
		||||
    ) -> bool:
 | 
			
		||||
 | 
			
		||||
        # Determine if prereleases are to be allowed or not.
 | 
			
		||||
        if prereleases is None:
 | 
			
		||||
            prereleases = self.prereleases
 | 
			
		||||
 | 
			
		||||
        # Normalize item to a Version or LegacyVersion, this allows us to have
 | 
			
		||||
        # a shortcut for ``"2.0" in Specifier(">=2")
 | 
			
		||||
        normalized_item = self._coerce_version(item)
 | 
			
		||||
 | 
			
		||||
        # Determine if we should be supporting prereleases in this specifier
 | 
			
		||||
        # or not, if we do not support prereleases than we can short circuit
 | 
			
		||||
        # logic if this version is a prereleases.
 | 
			
		||||
        if normalized_item.is_prerelease and not prereleases:
 | 
			
		||||
            return False
 | 
			
		||||
 | 
			
		||||
        # Actually do the comparison to determine if this item is contained
 | 
			
		||||
        # within this Specifier or not.
 | 
			
		||||
        operator_callable: CallableOperator = self._get_operator(self.operator)
 | 
			
		||||
        return operator_callable(normalized_item, self.version)
 | 
			
		||||
 | 
			
		||||
    def filter(
 | 
			
		||||
        self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None
 | 
			
		||||
    ) -> Iterable[VersionTypeVar]:
 | 
			
		||||
 | 
			
		||||
        yielded = False
 | 
			
		||||
        found_prereleases = []
 | 
			
		||||
 | 
			
		||||
        kw = {"prereleases": prereleases if prereleases is not None else True}
 | 
			
		||||
 | 
			
		||||
        # Attempt to iterate over all the values in the iterable and if any of
 | 
			
		||||
        # them match, yield them.
 | 
			
		||||
        for version in iterable:
 | 
			
		||||
            parsed_version = self._coerce_version(version)
 | 
			
		||||
 | 
			
		||||
            if self.contains(parsed_version, **kw):
 | 
			
		||||
                # If our version is a prerelease, and we were not set to allow
 | 
			
		||||
                # prereleases, then we'll store it for later in case nothing
 | 
			
		||||
                # else matches this specifier.
 | 
			
		||||
                if parsed_version.is_prerelease and not (
 | 
			
		||||
                    prereleases or self.prereleases
 | 
			
		||||
                ):
 | 
			
		||||
                    found_prereleases.append(version)
 | 
			
		||||
                # Either this is not a prerelease, or we should have been
 | 
			
		||||
                # accepting prereleases from the beginning.
 | 
			
		||||
                else:
 | 
			
		||||
                    yielded = True
 | 
			
		||||
                    yield version
 | 
			
		||||
 | 
			
		||||
        # Now that we've iterated over everything, determine if we've yielded
 | 
			
		||||
        # any values, and if we have not and we have any prereleases stored up
 | 
			
		||||
        # then we will go ahead and yield the prereleases.
 | 
			
		||||
        if not yielded and found_prereleases:
 | 
			
		||||
            for version in found_prereleases:
 | 
			
		||||
                yield version
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class LegacySpecifier(_IndividualSpecifier):
 | 
			
		||||
 | 
			
		||||
    _regex_str = r"""
 | 
			
		||||
        (?P<operator>(==|!=|<=|>=|<|>))
 | 
			
		||||
        \s*
 | 
			
		||||
        (?P<version>
 | 
			
		||||
            [^,;\s)]* # Since this is a "legacy" specifier, and the version
 | 
			
		||||
                      # string can be just about anything, we match everything
 | 
			
		||||
                      # except for whitespace, a semi-colon for marker support,
 | 
			
		||||
                      # a closing paren since versions can be enclosed in
 | 
			
		||||
                      # them, and a comma since it's a version separator.
 | 
			
		||||
        )
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
    _regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
 | 
			
		||||
 | 
			
		||||
    _operators = {
 | 
			
		||||
        "==": "equal",
 | 
			
		||||
        "!=": "not_equal",
 | 
			
		||||
        "<=": "less_than_equal",
 | 
			
		||||
        ">=": "greater_than_equal",
 | 
			
		||||
        "<": "less_than",
 | 
			
		||||
        ">": "greater_than",
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None:
 | 
			
		||||
        super().__init__(spec, prereleases)
 | 
			
		||||
 | 
			
		||||
        warnings.warn(
 | 
			
		||||
            "Creating a LegacyVersion has been deprecated and will be "
 | 
			
		||||
            "removed in the next major release",
 | 
			
		||||
            DeprecationWarning,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def _coerce_version(self, version: UnparsedVersion) -> LegacyVersion:
 | 
			
		||||
        if not isinstance(version, LegacyVersion):
 | 
			
		||||
            version = LegacyVersion(str(version))
 | 
			
		||||
        return version
 | 
			
		||||
 | 
			
		||||
    def _compare_equal(self, prospective: LegacyVersion, spec: str) -> bool:
 | 
			
		||||
        return prospective == self._coerce_version(spec)
 | 
			
		||||
 | 
			
		||||
    def _compare_not_equal(self, prospective: LegacyVersion, spec: str) -> bool:
 | 
			
		||||
        return prospective != self._coerce_version(spec)
 | 
			
		||||
 | 
			
		||||
    def _compare_less_than_equal(self, prospective: LegacyVersion, spec: str) -> bool:
 | 
			
		||||
        return prospective <= self._coerce_version(spec)
 | 
			
		||||
 | 
			
		||||
    def _compare_greater_than_equal(
 | 
			
		||||
        self, prospective: LegacyVersion, spec: str
 | 
			
		||||
    ) -> bool:
 | 
			
		||||
        return prospective >= self._coerce_version(spec)
 | 
			
		||||
 | 
			
		||||
    def _compare_less_than(self, prospective: LegacyVersion, spec: str) -> bool:
 | 
			
		||||
        return prospective < self._coerce_version(spec)
 | 
			
		||||
 | 
			
		||||
    def _compare_greater_than(self, prospective: LegacyVersion, spec: str) -> bool:
 | 
			
		||||
        return prospective > self._coerce_version(spec)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _require_version_compare(
 | 
			
		||||
    fn: Callable[["Specifier", ParsedVersion, str], bool]
 | 
			
		||||
) -> Callable[["Specifier", ParsedVersion, str], bool]:
 | 
			
		||||
    @functools.wraps(fn)
 | 
			
		||||
    def wrapped(self: "Specifier", prospective: ParsedVersion, spec: str) -> bool:
 | 
			
		||||
        if not isinstance(prospective, Version):
 | 
			
		||||
            return False
 | 
			
		||||
        return fn(self, prospective, spec)
 | 
			
		||||
 | 
			
		||||
    return wrapped
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Specifier(_IndividualSpecifier):
 | 
			
		||||
 | 
			
		||||
    _regex_str = r"""
 | 
			
		||||
        (?P<operator>(~=|==|!=|<=|>=|<|>|===))
 | 
			
		||||
        (?P<version>
 | 
			
		||||
            (?:
 | 
			
		||||
                # The identity operators allow for an escape hatch that will
 | 
			
		||||
                # do an exact string match of the version you wish to install.
 | 
			
		||||
                # This will not be parsed by PEP 440 and we cannot determine
 | 
			
		||||
                # any semantic meaning from it. This operator is discouraged
 | 
			
		||||
                # but included entirely as an escape hatch.
 | 
			
		||||
                (?<====)  # Only match for the identity operator
 | 
			
		||||
                \s*
 | 
			
		||||
                [^\s]*    # We just match everything, except for whitespace
 | 
			
		||||
                          # since we are only testing for strict identity.
 | 
			
		||||
            )
 | 
			
		||||
            |
 | 
			
		||||
            (?:
 | 
			
		||||
                # The (non)equality operators allow for wild card and local
 | 
			
		||||
                # versions to be specified so we have to define these two
 | 
			
		||||
                # operators separately to enable that.
 | 
			
		||||
                (?<===|!=)            # Only match for equals and not equals
 | 
			
		||||
 | 
			
		||||
                \s*
 | 
			
		||||
                v?
 | 
			
		||||
                (?:[0-9]+!)?          # epoch
 | 
			
		||||
                [0-9]+(?:\.[0-9]+)*   # release
 | 
			
		||||
                (?:                   # pre release
 | 
			
		||||
                    [-_\.]?
 | 
			
		||||
                    (a|b|c|rc|alpha|beta|pre|preview)
 | 
			
		||||
                    [-_\.]?
 | 
			
		||||
                    [0-9]*
 | 
			
		||||
                )?
 | 
			
		||||
                (?:                   # post release
 | 
			
		||||
                    (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
 | 
			
		||||
                )?
 | 
			
		||||
 | 
			
		||||
                # You cannot use a wild card and a dev or local version
 | 
			
		||||
                # together so group them with a | and make them optional.
 | 
			
		||||
                (?:
 | 
			
		||||
                    (?:[-_\.]?dev[-_\.]?[0-9]*)?         # dev release
 | 
			
		||||
                    (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local
 | 
			
		||||
                    |
 | 
			
		||||
                    \.\*  # Wild card syntax of .*
 | 
			
		||||
                )?
 | 
			
		||||
            )
 | 
			
		||||
            |
 | 
			
		||||
            (?:
 | 
			
		||||
                # The compatible operator requires at least two digits in the
 | 
			
		||||
                # release segment.
 | 
			
		||||
                (?<=~=)               # Only match for the compatible operator
 | 
			
		||||
 | 
			
		||||
                \s*
 | 
			
		||||
                v?
 | 
			
		||||
                (?:[0-9]+!)?          # epoch
 | 
			
		||||
                [0-9]+(?:\.[0-9]+)+   # release  (We have a + instead of a *)
 | 
			
		||||
                (?:                   # pre release
 | 
			
		||||
                    [-_\.]?
 | 
			
		||||
                    (a|b|c|rc|alpha|beta|pre|preview)
 | 
			
		||||
                    [-_\.]?
 | 
			
		||||
                    [0-9]*
 | 
			
		||||
                )?
 | 
			
		||||
                (?:                                   # post release
 | 
			
		||||
                    (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
 | 
			
		||||
                )?
 | 
			
		||||
                (?:[-_\.]?dev[-_\.]?[0-9]*)?          # dev release
 | 
			
		||||
            )
 | 
			
		||||
            |
 | 
			
		||||
            (?:
 | 
			
		||||
                # All other operators only allow a sub set of what the
 | 
			
		||||
                # (non)equality operators do. Specifically they do not allow
 | 
			
		||||
                # local versions to be specified nor do they allow the prefix
 | 
			
		||||
                # matching wild cards.
 | 
			
		||||
                (?<!==|!=|~=)         # We have special cases for these
 | 
			
		||||
                                      # operators so we want to make sure they
 | 
			
		||||
                                      # don't match here.
 | 
			
		||||
 | 
			
		||||
                \s*
 | 
			
		||||
                v?
 | 
			
		||||
                (?:[0-9]+!)?          # epoch
 | 
			
		||||
                [0-9]+(?:\.[0-9]+)*   # release
 | 
			
		||||
                (?:                   # pre release
 | 
			
		||||
                    [-_\.]?
 | 
			
		||||
                    (a|b|c|rc|alpha|beta|pre|preview)
 | 
			
		||||
                    [-_\.]?
 | 
			
		||||
                    [0-9]*
 | 
			
		||||
                )?
 | 
			
		||||
                (?:                                   # post release
 | 
			
		||||
                    (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
 | 
			
		||||
                )?
 | 
			
		||||
                (?:[-_\.]?dev[-_\.]?[0-9]*)?          # dev release
 | 
			
		||||
            )
 | 
			
		||||
        )
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
    _regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
 | 
			
		||||
 | 
			
		||||
    _operators = {
 | 
			
		||||
        "~=": "compatible",
 | 
			
		||||
        "==": "equal",
 | 
			
		||||
        "!=": "not_equal",
 | 
			
		||||
        "<=": "less_than_equal",
 | 
			
		||||
        ">=": "greater_than_equal",
 | 
			
		||||
        "<": "less_than",
 | 
			
		||||
        ">": "greater_than",
 | 
			
		||||
        "===": "arbitrary",
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    @_require_version_compare
 | 
			
		||||
    def _compare_compatible(self, prospective: ParsedVersion, spec: str) -> bool:
 | 
			
		||||
 | 
			
		||||
        # Compatible releases have an equivalent combination of >= and ==. That
 | 
			
		||||
        # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to
 | 
			
		||||
        # implement this in terms of the other specifiers instead of
 | 
			
		||||
        # implementing it ourselves. The only thing we need to do is construct
 | 
			
		||||
        # the other specifiers.
 | 
			
		||||
 | 
			
		||||
        # We want everything but the last item in the version, but we want to
 | 
			
		||||
        # ignore suffix segments.
 | 
			
		||||
        prefix = ".".join(
 | 
			
		||||
            list(itertools.takewhile(_is_not_suffix, _version_split(spec)))[:-1]
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        # Add the prefix notation to the end of our string
 | 
			
		||||
        prefix += ".*"
 | 
			
		||||
 | 
			
		||||
        return self._get_operator(">=")(prospective, spec) and self._get_operator("==")(
 | 
			
		||||
            prospective, prefix
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    @_require_version_compare
 | 
			
		||||
    def _compare_equal(self, prospective: ParsedVersion, spec: str) -> bool:
 | 
			
		||||
 | 
			
		||||
        # We need special logic to handle prefix matching
 | 
			
		||||
        if spec.endswith(".*"):
 | 
			
		||||
            # In the case of prefix matching we want to ignore local segment.
 | 
			
		||||
            prospective = Version(prospective.public)
 | 
			
		||||
            # Split the spec out by dots, and pretend that there is an implicit
 | 
			
		||||
            # dot in between a release segment and a pre-release segment.
 | 
			
		||||
            split_spec = _version_split(spec[:-2])  # Remove the trailing .*
 | 
			
		||||
 | 
			
		||||
            # Split the prospective version out by dots, and pretend that there
 | 
			
		||||
            # is an implicit dot in between a release segment and a pre-release
 | 
			
		||||
            # segment.
 | 
			
		||||
            split_prospective = _version_split(str(prospective))
 | 
			
		||||
 | 
			
		||||
            # Shorten the prospective version to be the same length as the spec
 | 
			
		||||
            # so that we can determine if the specifier is a prefix of the
 | 
			
		||||
            # prospective version or not.
 | 
			
		||||
            shortened_prospective = split_prospective[: len(split_spec)]
 | 
			
		||||
 | 
			
		||||
            # Pad out our two sides with zeros so that they both equal the same
 | 
			
		||||
            # length.
 | 
			
		||||
            padded_spec, padded_prospective = _pad_version(
 | 
			
		||||
                split_spec, shortened_prospective
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
            return padded_prospective == padded_spec
 | 
			
		||||
        else:
 | 
			
		||||
            # Convert our spec string into a Version
 | 
			
		||||
            spec_version = Version(spec)
 | 
			
		||||
 | 
			
		||||
            # If the specifier does not have a local segment, then we want to
 | 
			
		||||
            # act as if the prospective version also does not have a local
 | 
			
		||||
            # segment.
 | 
			
		||||
            if not spec_version.local:
 | 
			
		||||
                prospective = Version(prospective.public)
 | 
			
		||||
 | 
			
		||||
            return prospective == spec_version
 | 
			
		||||
 | 
			
		||||
    @_require_version_compare
 | 
			
		||||
    def _compare_not_equal(self, prospective: ParsedVersion, spec: str) -> bool:
 | 
			
		||||
        return not self._compare_equal(prospective, spec)
 | 
			
		||||
 | 
			
		||||
    @_require_version_compare
 | 
			
		||||
    def _compare_less_than_equal(self, prospective: ParsedVersion, spec: str) -> bool:
 | 
			
		||||
 | 
			
		||||
        # NB: Local version identifiers are NOT permitted in the version
 | 
			
		||||
        # specifier, so local version labels can be universally removed from
 | 
			
		||||
        # the prospective version.
 | 
			
		||||
        return Version(prospective.public) <= Version(spec)
 | 
			
		||||
 | 
			
		||||
    @_require_version_compare
 | 
			
		||||
    def _compare_greater_than_equal(
 | 
			
		||||
        self, prospective: ParsedVersion, spec: str
 | 
			
		||||
    ) -> bool:
 | 
			
		||||
 | 
			
		||||
        # NB: Local version identifiers are NOT permitted in the version
 | 
			
		||||
        # specifier, so local version labels can be universally removed from
 | 
			
		||||
        # the prospective version.
 | 
			
		||||
        return Version(prospective.public) >= Version(spec)
 | 
			
		||||
 | 
			
		||||
    @_require_version_compare
 | 
			
		||||
    def _compare_less_than(self, prospective: ParsedVersion, spec_str: str) -> bool:
 | 
			
		||||
 | 
			
		||||
        # Convert our spec to a Version instance, since we'll want to work with
 | 
			
		||||
        # it as a version.
 | 
			
		||||
        spec = Version(spec_str)
 | 
			
		||||
 | 
			
		||||
        # Check to see if the prospective version is less than the spec
 | 
			
		||||
        # version. If it's not we can short circuit and just return False now
 | 
			
		||||
        # instead of doing extra unneeded work.
 | 
			
		||||
        if not prospective < spec:
 | 
			
		||||
            return False
 | 
			
		||||
 | 
			
		||||
        # This special case is here so that, unless the specifier itself
 | 
			
		||||
        # includes is a pre-release version, that we do not accept pre-release
 | 
			
		||||
        # versions for the version mentioned in the specifier (e.g. <3.1 should
 | 
			
		||||
        # not match 3.1.dev0, but should match 3.0.dev0).
 | 
			
		||||
        if not spec.is_prerelease and prospective.is_prerelease:
 | 
			
		||||
            if Version(prospective.base_version) == Version(spec.base_version):
 | 
			
		||||
                return False
 | 
			
		||||
 | 
			
		||||
        # If we've gotten to here, it means that prospective version is both
 | 
			
		||||
        # less than the spec version *and* it's not a pre-release of the same
 | 
			
		||||
        # version in the spec.
 | 
			
		||||
        return True
 | 
			
		||||
 | 
			
		||||
    @_require_version_compare
 | 
			
		||||
    def _compare_greater_than(self, prospective: ParsedVersion, spec_str: str) -> bool:
 | 
			
		||||
 | 
			
		||||
        # Convert our spec to a Version instance, since we'll want to work with
 | 
			
		||||
        # it as a version.
 | 
			
		||||
        spec = Version(spec_str)
 | 
			
		||||
 | 
			
		||||
        # Check to see if the prospective version is greater than the spec
 | 
			
		||||
        # version. If it's not we can short circuit and just return False now
 | 
			
		||||
        # instead of doing extra unneeded work.
 | 
			
		||||
        if not prospective > spec:
 | 
			
		||||
            return False
 | 
			
		||||
 | 
			
		||||
        # This special case is here so that, unless the specifier itself
 | 
			
		||||
        # includes is a post-release version, that we do not accept
 | 
			
		||||
        # post-release versions for the version mentioned in the specifier
 | 
			
		||||
        # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0).
 | 
			
		||||
        if not spec.is_postrelease and prospective.is_postrelease:
 | 
			
		||||
            if Version(prospective.base_version) == Version(spec.base_version):
 | 
			
		||||
                return False
 | 
			
		||||
 | 
			
		||||
        # Ensure that we do not allow a local version of the version mentioned
 | 
			
		||||
        # in the specifier, which is technically greater than, to match.
 | 
			
		||||
        if prospective.local is not None:
 | 
			
		||||
            if Version(prospective.base_version) == Version(spec.base_version):
 | 
			
		||||
                return False
 | 
			
		||||
 | 
			
		||||
        # If we've gotten to here, it means that prospective version is both
 | 
			
		||||
        # greater than the spec version *and* it's not a pre-release of the
 | 
			
		||||
        # same version in the spec.
 | 
			
		||||
        return True
 | 
			
		||||
 | 
			
		||||
    def _compare_arbitrary(self, prospective: Version, spec: str) -> bool:
 | 
			
		||||
        return str(prospective).lower() == str(spec).lower()
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def prereleases(self) -> bool:
 | 
			
		||||
 | 
			
		||||
        # If there is an explicit prereleases set for this, then we'll just
 | 
			
		||||
        # blindly use that.
 | 
			
		||||
        if self._prereleases is not None:
 | 
			
		||||
            return self._prereleases
 | 
			
		||||
 | 
			
		||||
        # Look at all of our specifiers and determine if they are inclusive
 | 
			
		||||
        # operators, and if they are if they are including an explicit
 | 
			
		||||
        # prerelease.
 | 
			
		||||
        operator, version = self._spec
 | 
			
		||||
        if operator in ["==", ">=", "<=", "~=", "==="]:
 | 
			
		||||
            # The == specifier can include a trailing .*, if it does we
 | 
			
		||||
            # want to remove before parsing.
 | 
			
		||||
            if operator == "==" and version.endswith(".*"):
 | 
			
		||||
                version = version[:-2]
 | 
			
		||||
 | 
			
		||||
            # Parse the version, and if it is a pre-release than this
 | 
			
		||||
            # specifier allows pre-releases.
 | 
			
		||||
            if parse(version).is_prerelease:
 | 
			
		||||
                return True
 | 
			
		||||
 | 
			
		||||
        return False
 | 
			
		||||
 | 
			
		||||
    @prereleases.setter
 | 
			
		||||
    def prereleases(self, value: bool) -> None:
 | 
			
		||||
        self._prereleases = value
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$")
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _version_split(version: str) -> List[str]:
 | 
			
		||||
    result: List[str] = []
 | 
			
		||||
    for item in version.split("."):
 | 
			
		||||
        match = _prefix_regex.search(item)
 | 
			
		||||
        if match:
 | 
			
		||||
            result.extend(match.groups())
 | 
			
		||||
        else:
 | 
			
		||||
            result.append(item)
 | 
			
		||||
    return result
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _is_not_suffix(segment: str) -> bool:
 | 
			
		||||
    return not any(
 | 
			
		||||
        segment.startswith(prefix) for prefix in ("dev", "a", "b", "rc", "post")
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _pad_version(left: List[str], right: List[str]) -> Tuple[List[str], List[str]]:
 | 
			
		||||
    left_split, right_split = [], []
 | 
			
		||||
 | 
			
		||||
    # Get the release segment of our versions
 | 
			
		||||
    left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left)))
 | 
			
		||||
    right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right)))
 | 
			
		||||
 | 
			
		||||
    # Get the rest of our versions
 | 
			
		||||
    left_split.append(left[len(left_split[0]) :])
 | 
			
		||||
    right_split.append(right[len(right_split[0]) :])
 | 
			
		||||
 | 
			
		||||
    # Insert our padding
 | 
			
		||||
    left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0])))
 | 
			
		||||
    right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0])))
 | 
			
		||||
 | 
			
		||||
    return (list(itertools.chain(*left_split)), list(itertools.chain(*right_split)))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class SpecifierSet(BaseSpecifier):
 | 
			
		||||
    def __init__(
 | 
			
		||||
        self, specifiers: str = "", prereleases: Optional[bool] = None
 | 
			
		||||
    ) -> None:
 | 
			
		||||
 | 
			
		||||
        # Split on , to break each individual specifier into it's own item, and
 | 
			
		||||
        # strip each item to remove leading/trailing whitespace.
 | 
			
		||||
        split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
 | 
			
		||||
 | 
			
		||||
        # Parsed each individual specifier, attempting first to make it a
 | 
			
		||||
        # Specifier and falling back to a LegacySpecifier.
 | 
			
		||||
        parsed: Set[_IndividualSpecifier] = set()
 | 
			
		||||
        for specifier in split_specifiers:
 | 
			
		||||
            try:
 | 
			
		||||
                parsed.add(Specifier(specifier))
 | 
			
		||||
            except InvalidSpecifier:
 | 
			
		||||
                parsed.add(LegacySpecifier(specifier))
 | 
			
		||||
 | 
			
		||||
        # Turn our parsed specifiers into a frozen set and save them for later.
 | 
			
		||||
        self._specs = frozenset(parsed)
 | 
			
		||||
 | 
			
		||||
        # Store our prereleases value so we can use it later to determine if
 | 
			
		||||
        # we accept prereleases or not.
 | 
			
		||||
        self._prereleases = prereleases
 | 
			
		||||
 | 
			
		||||
    def __repr__(self) -> str:
 | 
			
		||||
        pre = (
 | 
			
		||||
            f", prereleases={self.prereleases!r}"
 | 
			
		||||
            if self._prereleases is not None
 | 
			
		||||
            else ""
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        return f"<SpecifierSet({str(self)!r}{pre})>"
 | 
			
		||||
 | 
			
		||||
    def __str__(self) -> str:
 | 
			
		||||
        return ",".join(sorted(str(s) for s in self._specs))
 | 
			
		||||
 | 
			
		||||
    def __hash__(self) -> int:
 | 
			
		||||
        return hash(self._specs)
 | 
			
		||||
 | 
			
		||||
    def __and__(self, other: Union["SpecifierSet", str]) -> "SpecifierSet":
 | 
			
		||||
        if isinstance(other, str):
 | 
			
		||||
            other = SpecifierSet(other)
 | 
			
		||||
        elif not isinstance(other, SpecifierSet):
 | 
			
		||||
            return NotImplemented
 | 
			
		||||
 | 
			
		||||
        specifier = SpecifierSet()
 | 
			
		||||
        specifier._specs = frozenset(self._specs | other._specs)
 | 
			
		||||
 | 
			
		||||
        if self._prereleases is None and other._prereleases is not None:
 | 
			
		||||
            specifier._prereleases = other._prereleases
 | 
			
		||||
        elif self._prereleases is not None and other._prereleases is None:
 | 
			
		||||
            specifier._prereleases = self._prereleases
 | 
			
		||||
        elif self._prereleases == other._prereleases:
 | 
			
		||||
            specifier._prereleases = self._prereleases
 | 
			
		||||
        else:
 | 
			
		||||
            raise ValueError(
 | 
			
		||||
                "Cannot combine SpecifierSets with True and False prerelease "
 | 
			
		||||
                "overrides."
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        return specifier
 | 
			
		||||
 | 
			
		||||
    def __eq__(self, other: object) -> bool:
 | 
			
		||||
        if isinstance(other, (str, _IndividualSpecifier)):
 | 
			
		||||
            other = SpecifierSet(str(other))
 | 
			
		||||
        elif not isinstance(other, SpecifierSet):
 | 
			
		||||
            return NotImplemented
 | 
			
		||||
 | 
			
		||||
        return self._specs == other._specs
 | 
			
		||||
 | 
			
		||||
    def __len__(self) -> int:
 | 
			
		||||
        return len(self._specs)
 | 
			
		||||
 | 
			
		||||
    def __iter__(self) -> Iterator[_IndividualSpecifier]:
 | 
			
		||||
        return iter(self._specs)
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def prereleases(self) -> Optional[bool]:
 | 
			
		||||
 | 
			
		||||
        # If we have been given an explicit prerelease modifier, then we'll
 | 
			
		||||
        # pass that through here.
 | 
			
		||||
        if self._prereleases is not None:
 | 
			
		||||
            return self._prereleases
 | 
			
		||||
 | 
			
		||||
        # If we don't have any specifiers, and we don't have a forced value,
 | 
			
		||||
        # then we'll just return None since we don't know if this should have
 | 
			
		||||
        # pre-releases or not.
 | 
			
		||||
        if not self._specs:
 | 
			
		||||
            return None
 | 
			
		||||
 | 
			
		||||
        # Otherwise we'll see if any of the given specifiers accept
 | 
			
		||||
        # prereleases, if any of them do we'll return True, otherwise False.
 | 
			
		||||
        return any(s.prereleases for s in self._specs)
 | 
			
		||||
 | 
			
		||||
    @prereleases.setter
 | 
			
		||||
    def prereleases(self, value: bool) -> None:
 | 
			
		||||
        self._prereleases = value
 | 
			
		||||
 | 
			
		||||
    def __contains__(self, item: UnparsedVersion) -> bool:
 | 
			
		||||
        return self.contains(item)
 | 
			
		||||
 | 
			
		||||
    def contains(
 | 
			
		||||
        self, item: UnparsedVersion, prereleases: Optional[bool] = None
 | 
			
		||||
    ) -> bool:
 | 
			
		||||
 | 
			
		||||
        # Ensure that our item is a Version or LegacyVersion instance.
 | 
			
		||||
        if not isinstance(item, (LegacyVersion, Version)):
 | 
			
		||||
            item = parse(item)
 | 
			
		||||
 | 
			
		||||
        # Determine if we're forcing a prerelease or not, if we're not forcing
 | 
			
		||||
        # one for this particular filter call, then we'll use whatever the
 | 
			
		||||
        # SpecifierSet thinks for whether or not we should support prereleases.
 | 
			
		||||
        if prereleases is None:
 | 
			
		||||
            prereleases = self.prereleases
 | 
			
		||||
 | 
			
		||||
        # We can determine if we're going to allow pre-releases by looking to
 | 
			
		||||
        # see if any of the underlying items supports them. If none of them do
 | 
			
		||||
        # and this item is a pre-release then we do not allow it and we can
 | 
			
		||||
        # short circuit that here.
 | 
			
		||||
        # Note: This means that 1.0.dev1 would not be contained in something
 | 
			
		||||
        #       like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0
 | 
			
		||||
        if not prereleases and item.is_prerelease:
 | 
			
		||||
            return False
 | 
			
		||||
 | 
			
		||||
        # We simply dispatch to the underlying specs here to make sure that the
 | 
			
		||||
        # given version is contained within all of them.
 | 
			
		||||
        # Note: This use of all() here means that an empty set of specifiers
 | 
			
		||||
        #       will always return True, this is an explicit design decision.
 | 
			
		||||
        return all(s.contains(item, prereleases=prereleases) for s in self._specs)
 | 
			
		||||
 | 
			
		||||
    def filter(
 | 
			
		||||
        self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None
 | 
			
		||||
    ) -> Iterable[VersionTypeVar]:
 | 
			
		||||
 | 
			
		||||
        # Determine if we're forcing a prerelease or not, if we're not forcing
 | 
			
		||||
        # one for this particular filter call, then we'll use whatever the
 | 
			
		||||
        # SpecifierSet thinks for whether or not we should support prereleases.
 | 
			
		||||
        if prereleases is None:
 | 
			
		||||
            prereleases = self.prereleases
 | 
			
		||||
 | 
			
		||||
        # If we have any specifiers, then we want to wrap our iterable in the
 | 
			
		||||
        # filter method for each one, this will act as a logical AND amongst
 | 
			
		||||
        # each specifier.
 | 
			
		||||
        if self._specs:
 | 
			
		||||
            for spec in self._specs:
 | 
			
		||||
                iterable = spec.filter(iterable, prereleases=bool(prereleases))
 | 
			
		||||
            return iterable
 | 
			
		||||
        # If we do not have any specifiers, then we need to have a rough filter
 | 
			
		||||
        # which will filter out any pre-releases, unless there are no final
 | 
			
		||||
        # releases, and which will filter out LegacyVersion in general.
 | 
			
		||||
        else:
 | 
			
		||||
            filtered: List[VersionTypeVar] = []
 | 
			
		||||
            found_prereleases: List[VersionTypeVar] = []
 | 
			
		||||
 | 
			
		||||
            item: UnparsedVersion
 | 
			
		||||
            parsed_version: Union[Version, LegacyVersion]
 | 
			
		||||
 | 
			
		||||
            for item in iterable:
 | 
			
		||||
                # Ensure that we some kind of Version class for this item.
 | 
			
		||||
                if not isinstance(item, (LegacyVersion, Version)):
 | 
			
		||||
                    parsed_version = parse(item)
 | 
			
		||||
                else:
 | 
			
		||||
                    parsed_version = item
 | 
			
		||||
 | 
			
		||||
                # Filter out any item which is parsed as a LegacyVersion
 | 
			
		||||
                if isinstance(parsed_version, LegacyVersion):
 | 
			
		||||
                    continue
 | 
			
		||||
 | 
			
		||||
                # Store any item which is a pre-release for later unless we've
 | 
			
		||||
                # already found a final version or we are accepting prereleases
 | 
			
		||||
                if parsed_version.is_prerelease and not prereleases:
 | 
			
		||||
                    if not filtered:
 | 
			
		||||
                        found_prereleases.append(item)
 | 
			
		||||
                else:
 | 
			
		||||
                    filtered.append(item)
 | 
			
		||||
 | 
			
		||||
            # If we've found no items except for pre-releases, then we'll go
 | 
			
		||||
            # ahead and use the pre-releases
 | 
			
		||||
            if not filtered and found_prereleases and prereleases is None:
 | 
			
		||||
                return found_prereleases
 | 
			
		||||
 | 
			
		||||
            return filtered
 | 
			
		||||
@@ -0,0 +1,487 @@
 | 
			
		||||
# This file is dual licensed under the terms of the Apache License, Version
 | 
			
		||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
 | 
			
		||||
# for complete details.
 | 
			
		||||
 | 
			
		||||
import logging
 | 
			
		||||
import platform
 | 
			
		||||
import sys
 | 
			
		||||
import sysconfig
 | 
			
		||||
from importlib.machinery import EXTENSION_SUFFIXES
 | 
			
		||||
from typing import (
 | 
			
		||||
    Dict,
 | 
			
		||||
    FrozenSet,
 | 
			
		||||
    Iterable,
 | 
			
		||||
    Iterator,
 | 
			
		||||
    List,
 | 
			
		||||
    Optional,
 | 
			
		||||
    Sequence,
 | 
			
		||||
    Tuple,
 | 
			
		||||
    Union,
 | 
			
		||||
    cast,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
from . import _manylinux, _musllinux
 | 
			
		||||
 | 
			
		||||
logger = logging.getLogger(__name__)
 | 
			
		||||
 | 
			
		||||
PythonVersion = Sequence[int]
 | 
			
		||||
MacVersion = Tuple[int, int]
 | 
			
		||||
 | 
			
		||||
INTERPRETER_SHORT_NAMES: Dict[str, str] = {
 | 
			
		||||
    "python": "py",  # Generic.
 | 
			
		||||
    "cpython": "cp",
 | 
			
		||||
    "pypy": "pp",
 | 
			
		||||
    "ironpython": "ip",
 | 
			
		||||
    "jython": "jy",
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
_32_BIT_INTERPRETER = sys.maxsize <= 2 ** 32
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Tag:
 | 
			
		||||
    """
 | 
			
		||||
    A representation of the tag triple for a wheel.
 | 
			
		||||
 | 
			
		||||
    Instances are considered immutable and thus are hashable. Equality checking
 | 
			
		||||
    is also supported.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    __slots__ = ["_interpreter", "_abi", "_platform", "_hash"]
 | 
			
		||||
 | 
			
		||||
    def __init__(self, interpreter: str, abi: str, platform: str) -> None:
 | 
			
		||||
        self._interpreter = interpreter.lower()
 | 
			
		||||
        self._abi = abi.lower()
 | 
			
		||||
        self._platform = platform.lower()
 | 
			
		||||
        # The __hash__ of every single element in a Set[Tag] will be evaluated each time
 | 
			
		||||
        # that a set calls its `.disjoint()` method, which may be called hundreds of
 | 
			
		||||
        # times when scanning a page of links for packages with tags matching that
 | 
			
		||||
        # Set[Tag]. Pre-computing the value here produces significant speedups for
 | 
			
		||||
        # downstream consumers.
 | 
			
		||||
        self._hash = hash((self._interpreter, self._abi, self._platform))
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def interpreter(self) -> str:
 | 
			
		||||
        return self._interpreter
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def abi(self) -> str:
 | 
			
		||||
        return self._abi
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def platform(self) -> str:
 | 
			
		||||
        return self._platform
 | 
			
		||||
 | 
			
		||||
    def __eq__(self, other: object) -> bool:
 | 
			
		||||
        if not isinstance(other, Tag):
 | 
			
		||||
            return NotImplemented
 | 
			
		||||
 | 
			
		||||
        return (
 | 
			
		||||
            (self._hash == other._hash)  # Short-circuit ASAP for perf reasons.
 | 
			
		||||
            and (self._platform == other._platform)
 | 
			
		||||
            and (self._abi == other._abi)
 | 
			
		||||
            and (self._interpreter == other._interpreter)
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def __hash__(self) -> int:
 | 
			
		||||
        return self._hash
 | 
			
		||||
 | 
			
		||||
    def __str__(self) -> str:
 | 
			
		||||
        return f"{self._interpreter}-{self._abi}-{self._platform}"
 | 
			
		||||
 | 
			
		||||
    def __repr__(self) -> str:
 | 
			
		||||
        return f"<{self} @ {id(self)}>"
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def parse_tag(tag: str) -> FrozenSet[Tag]:
 | 
			
		||||
    """
 | 
			
		||||
    Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances.
 | 
			
		||||
 | 
			
		||||
    Returning a set is required due to the possibility that the tag is a
 | 
			
		||||
    compressed tag set.
 | 
			
		||||
    """
 | 
			
		||||
    tags = set()
 | 
			
		||||
    interpreters, abis, platforms = tag.split("-")
 | 
			
		||||
    for interpreter in interpreters.split("."):
 | 
			
		||||
        for abi in abis.split("."):
 | 
			
		||||
            for platform_ in platforms.split("."):
 | 
			
		||||
                tags.add(Tag(interpreter, abi, platform_))
 | 
			
		||||
    return frozenset(tags)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _get_config_var(name: str, warn: bool = False) -> Union[int, str, None]:
 | 
			
		||||
    value = sysconfig.get_config_var(name)
 | 
			
		||||
    if value is None and warn:
 | 
			
		||||
        logger.debug(
 | 
			
		||||
            "Config variable '%s' is unset, Python ABI tag may be incorrect", name
 | 
			
		||||
        )
 | 
			
		||||
    return value
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _normalize_string(string: str) -> str:
 | 
			
		||||
    return string.replace(".", "_").replace("-", "_")
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _abi3_applies(python_version: PythonVersion) -> bool:
 | 
			
		||||
    """
 | 
			
		||||
    Determine if the Python version supports abi3.
 | 
			
		||||
 | 
			
		||||
    PEP 384 was first implemented in Python 3.2.
 | 
			
		||||
    """
 | 
			
		||||
    return len(python_version) > 1 and tuple(python_version) >= (3, 2)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]:
 | 
			
		||||
    py_version = tuple(py_version)  # To allow for version comparison.
 | 
			
		||||
    abis = []
 | 
			
		||||
    version = _version_nodot(py_version[:2])
 | 
			
		||||
    debug = pymalloc = ucs4 = ""
 | 
			
		||||
    with_debug = _get_config_var("Py_DEBUG", warn)
 | 
			
		||||
    has_refcount = hasattr(sys, "gettotalrefcount")
 | 
			
		||||
    # Windows doesn't set Py_DEBUG, so checking for support of debug-compiled
 | 
			
		||||
    # extension modules is the best option.
 | 
			
		||||
    # https://github.com/pypa/pip/issues/3383#issuecomment-173267692
 | 
			
		||||
    has_ext = "_d.pyd" in EXTENSION_SUFFIXES
 | 
			
		||||
    if with_debug or (with_debug is None and (has_refcount or has_ext)):
 | 
			
		||||
        debug = "d"
 | 
			
		||||
    if py_version < (3, 8):
 | 
			
		||||
        with_pymalloc = _get_config_var("WITH_PYMALLOC", warn)
 | 
			
		||||
        if with_pymalloc or with_pymalloc is None:
 | 
			
		||||
            pymalloc = "m"
 | 
			
		||||
        if py_version < (3, 3):
 | 
			
		||||
            unicode_size = _get_config_var("Py_UNICODE_SIZE", warn)
 | 
			
		||||
            if unicode_size == 4 or (
 | 
			
		||||
                unicode_size is None and sys.maxunicode == 0x10FFFF
 | 
			
		||||
            ):
 | 
			
		||||
                ucs4 = "u"
 | 
			
		||||
    elif debug:
 | 
			
		||||
        # Debug builds can also load "normal" extension modules.
 | 
			
		||||
        # We can also assume no UCS-4 or pymalloc requirement.
 | 
			
		||||
        abis.append(f"cp{version}")
 | 
			
		||||
    abis.insert(
 | 
			
		||||
        0,
 | 
			
		||||
        "cp{version}{debug}{pymalloc}{ucs4}".format(
 | 
			
		||||
            version=version, debug=debug, pymalloc=pymalloc, ucs4=ucs4
 | 
			
		||||
        ),
 | 
			
		||||
    )
 | 
			
		||||
    return abis
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def cpython_tags(
 | 
			
		||||
    python_version: Optional[PythonVersion] = None,
 | 
			
		||||
    abis: Optional[Iterable[str]] = None,
 | 
			
		||||
    platforms: Optional[Iterable[str]] = None,
 | 
			
		||||
    *,
 | 
			
		||||
    warn: bool = False,
 | 
			
		||||
) -> Iterator[Tag]:
 | 
			
		||||
    """
 | 
			
		||||
    Yields the tags for a CPython interpreter.
 | 
			
		||||
 | 
			
		||||
    The tags consist of:
 | 
			
		||||
    - cp<python_version>-<abi>-<platform>
 | 
			
		||||
    - cp<python_version>-abi3-<platform>
 | 
			
		||||
    - cp<python_version>-none-<platform>
 | 
			
		||||
    - cp<less than python_version>-abi3-<platform>  # Older Python versions down to 3.2.
 | 
			
		||||
 | 
			
		||||
    If python_version only specifies a major version then user-provided ABIs and
 | 
			
		||||
    the 'none' ABItag will be used.
 | 
			
		||||
 | 
			
		||||
    If 'abi3' or 'none' are specified in 'abis' then they will be yielded at
 | 
			
		||||
    their normal position and not at the beginning.
 | 
			
		||||
    """
 | 
			
		||||
    if not python_version:
 | 
			
		||||
        python_version = sys.version_info[:2]
 | 
			
		||||
 | 
			
		||||
    interpreter = f"cp{_version_nodot(python_version[:2])}"
 | 
			
		||||
 | 
			
		||||
    if abis is None:
 | 
			
		||||
        if len(python_version) > 1:
 | 
			
		||||
            abis = _cpython_abis(python_version, warn)
 | 
			
		||||
        else:
 | 
			
		||||
            abis = []
 | 
			
		||||
    abis = list(abis)
 | 
			
		||||
    # 'abi3' and 'none' are explicitly handled later.
 | 
			
		||||
    for explicit_abi in ("abi3", "none"):
 | 
			
		||||
        try:
 | 
			
		||||
            abis.remove(explicit_abi)
 | 
			
		||||
        except ValueError:
 | 
			
		||||
            pass
 | 
			
		||||
 | 
			
		||||
    platforms = list(platforms or platform_tags())
 | 
			
		||||
    for abi in abis:
 | 
			
		||||
        for platform_ in platforms:
 | 
			
		||||
            yield Tag(interpreter, abi, platform_)
 | 
			
		||||
    if _abi3_applies(python_version):
 | 
			
		||||
        yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms)
 | 
			
		||||
    yield from (Tag(interpreter, "none", platform_) for platform_ in platforms)
 | 
			
		||||
 | 
			
		||||
    if _abi3_applies(python_version):
 | 
			
		||||
        for minor_version in range(python_version[1] - 1, 1, -1):
 | 
			
		||||
            for platform_ in platforms:
 | 
			
		||||
                interpreter = "cp{version}".format(
 | 
			
		||||
                    version=_version_nodot((python_version[0], minor_version))
 | 
			
		||||
                )
 | 
			
		||||
                yield Tag(interpreter, "abi3", platform_)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _generic_abi() -> Iterator[str]:
 | 
			
		||||
    abi = sysconfig.get_config_var("SOABI")
 | 
			
		||||
    if abi:
 | 
			
		||||
        yield _normalize_string(abi)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def generic_tags(
 | 
			
		||||
    interpreter: Optional[str] = None,
 | 
			
		||||
    abis: Optional[Iterable[str]] = None,
 | 
			
		||||
    platforms: Optional[Iterable[str]] = None,
 | 
			
		||||
    *,
 | 
			
		||||
    warn: bool = False,
 | 
			
		||||
) -> Iterator[Tag]:
 | 
			
		||||
    """
 | 
			
		||||
    Yields the tags for a generic interpreter.
 | 
			
		||||
 | 
			
		||||
    The tags consist of:
 | 
			
		||||
    - <interpreter>-<abi>-<platform>
 | 
			
		||||
 | 
			
		||||
    The "none" ABI will be added if it was not explicitly provided.
 | 
			
		||||
    """
 | 
			
		||||
    if not interpreter:
 | 
			
		||||
        interp_name = interpreter_name()
 | 
			
		||||
        interp_version = interpreter_version(warn=warn)
 | 
			
		||||
        interpreter = "".join([interp_name, interp_version])
 | 
			
		||||
    if abis is None:
 | 
			
		||||
        abis = _generic_abi()
 | 
			
		||||
    platforms = list(platforms or platform_tags())
 | 
			
		||||
    abis = list(abis)
 | 
			
		||||
    if "none" not in abis:
 | 
			
		||||
        abis.append("none")
 | 
			
		||||
    for abi in abis:
 | 
			
		||||
        for platform_ in platforms:
 | 
			
		||||
            yield Tag(interpreter, abi, platform_)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]:
 | 
			
		||||
    """
 | 
			
		||||
    Yields Python versions in descending order.
 | 
			
		||||
 | 
			
		||||
    After the latest version, the major-only version will be yielded, and then
 | 
			
		||||
    all previous versions of that major version.
 | 
			
		||||
    """
 | 
			
		||||
    if len(py_version) > 1:
 | 
			
		||||
        yield f"py{_version_nodot(py_version[:2])}"
 | 
			
		||||
    yield f"py{py_version[0]}"
 | 
			
		||||
    if len(py_version) > 1:
 | 
			
		||||
        for minor in range(py_version[1] - 1, -1, -1):
 | 
			
		||||
            yield f"py{_version_nodot((py_version[0], minor))}"
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def compatible_tags(
 | 
			
		||||
    python_version: Optional[PythonVersion] = None,
 | 
			
		||||
    interpreter: Optional[str] = None,
 | 
			
		||||
    platforms: Optional[Iterable[str]] = None,
 | 
			
		||||
) -> Iterator[Tag]:
 | 
			
		||||
    """
 | 
			
		||||
    Yields the sequence of tags that are compatible with a specific version of Python.
 | 
			
		||||
 | 
			
		||||
    The tags consist of:
 | 
			
		||||
    - py*-none-<platform>
 | 
			
		||||
    - <interpreter>-none-any  # ... if `interpreter` is provided.
 | 
			
		||||
    - py*-none-any
 | 
			
		||||
    """
 | 
			
		||||
    if not python_version:
 | 
			
		||||
        python_version = sys.version_info[:2]
 | 
			
		||||
    platforms = list(platforms or platform_tags())
 | 
			
		||||
    for version in _py_interpreter_range(python_version):
 | 
			
		||||
        for platform_ in platforms:
 | 
			
		||||
            yield Tag(version, "none", platform_)
 | 
			
		||||
    if interpreter:
 | 
			
		||||
        yield Tag(interpreter, "none", "any")
 | 
			
		||||
    for version in _py_interpreter_range(python_version):
 | 
			
		||||
        yield Tag(version, "none", "any")
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str:
 | 
			
		||||
    if not is_32bit:
 | 
			
		||||
        return arch
 | 
			
		||||
 | 
			
		||||
    if arch.startswith("ppc"):
 | 
			
		||||
        return "ppc"
 | 
			
		||||
 | 
			
		||||
    return "i386"
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> List[str]:
 | 
			
		||||
    formats = [cpu_arch]
 | 
			
		||||
    if cpu_arch == "x86_64":
 | 
			
		||||
        if version < (10, 4):
 | 
			
		||||
            return []
 | 
			
		||||
        formats.extend(["intel", "fat64", "fat32"])
 | 
			
		||||
 | 
			
		||||
    elif cpu_arch == "i386":
 | 
			
		||||
        if version < (10, 4):
 | 
			
		||||
            return []
 | 
			
		||||
        formats.extend(["intel", "fat32", "fat"])
 | 
			
		||||
 | 
			
		||||
    elif cpu_arch == "ppc64":
 | 
			
		||||
        # TODO: Need to care about 32-bit PPC for ppc64 through 10.2?
 | 
			
		||||
        if version > (10, 5) or version < (10, 4):
 | 
			
		||||
            return []
 | 
			
		||||
        formats.append("fat64")
 | 
			
		||||
 | 
			
		||||
    elif cpu_arch == "ppc":
 | 
			
		||||
        if version > (10, 6):
 | 
			
		||||
            return []
 | 
			
		||||
        formats.extend(["fat32", "fat"])
 | 
			
		||||
 | 
			
		||||
    if cpu_arch in {"arm64", "x86_64"}:
 | 
			
		||||
        formats.append("universal2")
 | 
			
		||||
 | 
			
		||||
    if cpu_arch in {"x86_64", "i386", "ppc64", "ppc", "intel"}:
 | 
			
		||||
        formats.append("universal")
 | 
			
		||||
 | 
			
		||||
    return formats
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def mac_platforms(
 | 
			
		||||
    version: Optional[MacVersion] = None, arch: Optional[str] = None
 | 
			
		||||
) -> Iterator[str]:
 | 
			
		||||
    """
 | 
			
		||||
    Yields the platform tags for a macOS system.
 | 
			
		||||
 | 
			
		||||
    The `version` parameter is a two-item tuple specifying the macOS version to
 | 
			
		||||
    generate platform tags for. The `arch` parameter is the CPU architecture to
 | 
			
		||||
    generate platform tags for. Both parameters default to the appropriate value
 | 
			
		||||
    for the current system.
 | 
			
		||||
    """
 | 
			
		||||
    version_str, _, cpu_arch = platform.mac_ver()
 | 
			
		||||
    if version is None:
 | 
			
		||||
        version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2])))
 | 
			
		||||
    else:
 | 
			
		||||
        version = version
 | 
			
		||||
    if arch is None:
 | 
			
		||||
        arch = _mac_arch(cpu_arch)
 | 
			
		||||
    else:
 | 
			
		||||
        arch = arch
 | 
			
		||||
 | 
			
		||||
    if (10, 0) <= version and version < (11, 0):
 | 
			
		||||
        # Prior to Mac OS 11, each yearly release of Mac OS bumped the
 | 
			
		||||
        # "minor" version number.  The major version was always 10.
 | 
			
		||||
        for minor_version in range(version[1], -1, -1):
 | 
			
		||||
            compat_version = 10, minor_version
 | 
			
		||||
            binary_formats = _mac_binary_formats(compat_version, arch)
 | 
			
		||||
            for binary_format in binary_formats:
 | 
			
		||||
                yield "macosx_{major}_{minor}_{binary_format}".format(
 | 
			
		||||
                    major=10, minor=minor_version, binary_format=binary_format
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
    if version >= (11, 0):
 | 
			
		||||
        # Starting with Mac OS 11, each yearly release bumps the major version
 | 
			
		||||
        # number.   The minor versions are now the midyear updates.
 | 
			
		||||
        for major_version in range(version[0], 10, -1):
 | 
			
		||||
            compat_version = major_version, 0
 | 
			
		||||
            binary_formats = _mac_binary_formats(compat_version, arch)
 | 
			
		||||
            for binary_format in binary_formats:
 | 
			
		||||
                yield "macosx_{major}_{minor}_{binary_format}".format(
 | 
			
		||||
                    major=major_version, minor=0, binary_format=binary_format
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
    if version >= (11, 0):
 | 
			
		||||
        # Mac OS 11 on x86_64 is compatible with binaries from previous releases.
 | 
			
		||||
        # Arm64 support was introduced in 11.0, so no Arm binaries from previous
 | 
			
		||||
        # releases exist.
 | 
			
		||||
        #
 | 
			
		||||
        # However, the "universal2" binary format can have a
 | 
			
		||||
        # macOS version earlier than 11.0 when the x86_64 part of the binary supports
 | 
			
		||||
        # that version of macOS.
 | 
			
		||||
        if arch == "x86_64":
 | 
			
		||||
            for minor_version in range(16, 3, -1):
 | 
			
		||||
                compat_version = 10, minor_version
 | 
			
		||||
                binary_formats = _mac_binary_formats(compat_version, arch)
 | 
			
		||||
                for binary_format in binary_formats:
 | 
			
		||||
                    yield "macosx_{major}_{minor}_{binary_format}".format(
 | 
			
		||||
                        major=compat_version[0],
 | 
			
		||||
                        minor=compat_version[1],
 | 
			
		||||
                        binary_format=binary_format,
 | 
			
		||||
                    )
 | 
			
		||||
        else:
 | 
			
		||||
            for minor_version in range(16, 3, -1):
 | 
			
		||||
                compat_version = 10, minor_version
 | 
			
		||||
                binary_format = "universal2"
 | 
			
		||||
                yield "macosx_{major}_{minor}_{binary_format}".format(
 | 
			
		||||
                    major=compat_version[0],
 | 
			
		||||
                    minor=compat_version[1],
 | 
			
		||||
                    binary_format=binary_format,
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]:
 | 
			
		||||
    linux = _normalize_string(sysconfig.get_platform())
 | 
			
		||||
    if is_32bit:
 | 
			
		||||
        if linux == "linux_x86_64":
 | 
			
		||||
            linux = "linux_i686"
 | 
			
		||||
        elif linux == "linux_aarch64":
 | 
			
		||||
            linux = "linux_armv7l"
 | 
			
		||||
    _, arch = linux.split("_", 1)
 | 
			
		||||
    yield from _manylinux.platform_tags(linux, arch)
 | 
			
		||||
    yield from _musllinux.platform_tags(arch)
 | 
			
		||||
    yield linux
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _generic_platforms() -> Iterator[str]:
 | 
			
		||||
    yield _normalize_string(sysconfig.get_platform())
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def platform_tags() -> Iterator[str]:
 | 
			
		||||
    """
 | 
			
		||||
    Provides the platform tags for this installation.
 | 
			
		||||
    """
 | 
			
		||||
    if platform.system() == "Darwin":
 | 
			
		||||
        return mac_platforms()
 | 
			
		||||
    elif platform.system() == "Linux":
 | 
			
		||||
        return _linux_platforms()
 | 
			
		||||
    else:
 | 
			
		||||
        return _generic_platforms()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def interpreter_name() -> str:
 | 
			
		||||
    """
 | 
			
		||||
    Returns the name of the running interpreter.
 | 
			
		||||
    """
 | 
			
		||||
    name = sys.implementation.name
 | 
			
		||||
    return INTERPRETER_SHORT_NAMES.get(name) or name
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def interpreter_version(*, warn: bool = False) -> str:
 | 
			
		||||
    """
 | 
			
		||||
    Returns the version of the running interpreter.
 | 
			
		||||
    """
 | 
			
		||||
    version = _get_config_var("py_version_nodot", warn=warn)
 | 
			
		||||
    if version:
 | 
			
		||||
        version = str(version)
 | 
			
		||||
    else:
 | 
			
		||||
        version = _version_nodot(sys.version_info[:2])
 | 
			
		||||
    return version
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _version_nodot(version: PythonVersion) -> str:
 | 
			
		||||
    return "".join(map(str, version))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def sys_tags(*, warn: bool = False) -> Iterator[Tag]:
 | 
			
		||||
    """
 | 
			
		||||
    Returns the sequence of tag triples for the running interpreter.
 | 
			
		||||
 | 
			
		||||
    The order of the sequence corresponds to priority order for the
 | 
			
		||||
    interpreter, from most to least important.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    interp_name = interpreter_name()
 | 
			
		||||
    if interp_name == "cp":
 | 
			
		||||
        yield from cpython_tags(warn=warn)
 | 
			
		||||
    else:
 | 
			
		||||
        yield from generic_tags()
 | 
			
		||||
 | 
			
		||||
    if interp_name == "pp":
 | 
			
		||||
        yield from compatible_tags(interpreter="pp3")
 | 
			
		||||
    else:
 | 
			
		||||
        yield from compatible_tags()
 | 
			
		||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user