aboutsummaryrefslogtreecommitdiff
path: root/setuptools
diff options
context:
space:
mode:
authorNan Zhang <nanzhang@google.com>2018-05-24 18:59:04 -0700
committerandroid-build-merger <android-build-merger@google.com>2018-05-24 18:59:04 -0700
commita3209a1079bbe0d57820a93b1d2153719818c259 (patch)
tree7f63dc937b886389e53ee3b386c70baebd1da8e1 /setuptools
parente493fcf90483ade4b2a5ac331a4f89cb15ae3fd2 (diff)
parent21d497cb588c40b2a891d9a862f56e108b427e4c (diff)
downloadsetuptools-a3209a1079bbe0d57820a93b1d2153719818c259.tar.gz
initial drop of setuptools v39.1.0 am: 8539a2ae46 am: c498e8445a
am: 21d497cb58 Change-Id: I0d9ec0d7323b37baa8a7550c6aec27e05af57415
Diffstat (limited to 'setuptools')
-rw-r--r--setuptools/__init__.py180
-rw-r--r--setuptools/_vendor/__init__.py0
-rw-r--r--setuptools/_vendor/__pycache__/__init__.cpython-36.pycbin0 -> 148 bytes
-rw-r--r--setuptools/_vendor/__pycache__/six.cpython-36.pycbin0 -> 24445 bytes
-rw-r--r--setuptools/_vendor/packaging/__about__.py21
-rw-r--r--setuptools/_vendor/packaging/__init__.py14
-rw-r--r--setuptools/_vendor/packaging/__pycache__/__about__.cpython-36.pycbin0 -> 684 bytes
-rw-r--r--setuptools/_vendor/packaging/__pycache__/__init__.cpython-36.pycbin0 -> 522 bytes
-rw-r--r--setuptools/_vendor/packaging/__pycache__/_compat.cpython-36.pycbin0 -> 969 bytes
-rw-r--r--setuptools/_vendor/packaging/__pycache__/_structures.cpython-36.pycbin0 -> 2826 bytes
-rw-r--r--setuptools/_vendor/packaging/__pycache__/specifiers.cpython-36.pycbin0 -> 19788 bytes
-rw-r--r--setuptools/_vendor/packaging/__pycache__/version.cpython-36.pycbin0 -> 10563 bytes
-rw-r--r--setuptools/_vendor/packaging/_compat.py30
-rw-r--r--setuptools/_vendor/packaging/_structures.py68
-rw-r--r--setuptools/_vendor/packaging/markers.py301
-rw-r--r--setuptools/_vendor/packaging/requirements.py127
-rw-r--r--setuptools/_vendor/packaging/specifiers.py774
-rw-r--r--setuptools/_vendor/packaging/utils.py14
-rw-r--r--setuptools/_vendor/packaging/version.py393
-rw-r--r--setuptools/_vendor/pyparsing.py5696
-rw-r--r--setuptools/_vendor/six.py868
-rw-r--r--setuptools/_vendor/vendored.txt3
-rwxr-xr-xsetuptools/archive_util.py173
-rw-r--r--setuptools/build_meta.py172
-rw-r--r--setuptools/cli-32.exebin0 -> 65536 bytes
-rw-r--r--setuptools/cli-64.exebin0 -> 74752 bytes
-rw-r--r--setuptools/cli.exebin0 -> 65536 bytes
-rw-r--r--setuptools/command/__init__.py18
-rwxr-xr-xsetuptools/command/alias.py80
-rw-r--r--setuptools/command/bdist_egg.py502
-rwxr-xr-xsetuptools/command/bdist_rpm.py43
-rwxr-xr-xsetuptools/command/bdist_wininst.py21
-rw-r--r--setuptools/command/build_clib.py98
-rw-r--r--setuptools/command/build_ext.py331
-rw-r--r--setuptools/command/build_py.py270
-rwxr-xr-xsetuptools/command/develop.py216
-rw-r--r--setuptools/command/dist_info.py36
-rwxr-xr-xsetuptools/command/easy_install.py2334
-rwxr-xr-xsetuptools/command/egg_info.py696
-rw-r--r--setuptools/command/install.py125
-rwxr-xr-xsetuptools/command/install_egg_info.py62
-rw-r--r--setuptools/command/install_lib.py121
-rwxr-xr-xsetuptools/command/install_scripts.py65
-rw-r--r--setuptools/command/launcher manifest.xml15
-rw-r--r--setuptools/command/py36compat.py136
-rwxr-xr-xsetuptools/command/register.py10
-rwxr-xr-xsetuptools/command/rotate.py66
-rwxr-xr-xsetuptools/command/saveopts.py22
-rwxr-xr-xsetuptools/command/sdist.py200
-rwxr-xr-xsetuptools/command/setopt.py149
-rw-r--r--setuptools/command/test.py268
-rw-r--r--setuptools/command/upload.py42
-rw-r--r--setuptools/command/upload_docs.py206
-rw-r--r--setuptools/config.py556
-rw-r--r--setuptools/dep_util.py23
-rw-r--r--setuptools/depends.py186
-rw-r--r--setuptools/dist.py1061
-rw-r--r--setuptools/extension.py57
-rw-r--r--setuptools/extern/__init__.py73
-rw-r--r--setuptools/glibc.py86
-rw-r--r--setuptools/glob.py176
-rw-r--r--setuptools/gui-32.exebin0 -> 65536 bytes
-rw-r--r--setuptools/gui-64.exebin0 -> 75264 bytes
-rw-r--r--setuptools/gui.exebin0 -> 65536 bytes
-rw-r--r--setuptools/launch.py35
-rw-r--r--setuptools/lib2to3_ex.py62
-rw-r--r--setuptools/monkey.py181
-rw-r--r--setuptools/msvc.py1302
-rwxr-xr-xsetuptools/namespaces.py107
-rwxr-xr-xsetuptools/package_index.py1119
-rw-r--r--setuptools/pep425tags.py317
-rw-r--r--setuptools/py27compat.py28
-rw-r--r--setuptools/py31compat.py41
-rw-r--r--setuptools/py33compat.py54
-rw-r--r--setuptools/py36compat.py82
-rwxr-xr-xsetuptools/sandbox.py491
-rw-r--r--setuptools/script (dev).tmpl5
-rw-r--r--setuptools/script.tmpl3
-rw-r--r--setuptools/site-patch.py74
-rw-r--r--setuptools/ssl_support.py260
-rw-r--r--setuptools/tests/__init__.py6
-rw-r--r--setuptools/tests/contexts.py98
-rw-r--r--setuptools/tests/environment.py60
-rw-r--r--setuptools/tests/files.py39
-rw-r--r--setuptools/tests/fixtures.py23
-rw-r--r--setuptools/tests/indexes/test_links_priority/external.html3
-rw-r--r--setuptools/tests/indexes/test_links_priority/simple/foobar/index.html4
-rw-r--r--setuptools/tests/mod_with_constant.py1
-rw-r--r--setuptools/tests/namespaces.py42
-rw-r--r--setuptools/tests/script-with-bom.py3
-rw-r--r--setuptools/tests/server.py72
-rw-r--r--setuptools/tests/test_archive_util.py42
-rw-r--r--setuptools/tests/test_bdist_egg.py66
-rw-r--r--setuptools/tests/test_build_clib.py59
-rw-r--r--setuptools/tests/test_build_ext.py45
-rw-r--r--setuptools/tests/test_build_meta.py126
-rw-r--r--setuptools/tests/test_build_py.py30
-rw-r--r--setuptools/tests/test_config.py583
-rw-r--r--setuptools/tests/test_dep_util.py30
-rw-r--r--setuptools/tests/test_depends.py16
-rw-r--r--setuptools/tests/test_develop.py202
-rw-r--r--setuptools/tests/test_dist.py145
-rw-r--r--setuptools/tests/test_dist_info.py78
-rw-r--r--setuptools/tests/test_easy_install.py760
-rw-r--r--setuptools/tests/test_egg_info.py598
-rw-r--r--setuptools/tests/test_find_packages.py182
-rw-r--r--setuptools/tests/test_install_scripts.py88
-rw-r--r--setuptools/tests/test_integration.py165
-rw-r--r--setuptools/tests/test_manifest.py602
-rw-r--r--setuptools/tests/test_msvc.py178
-rw-r--r--setuptools/tests/test_namespaces.py111
-rw-r--r--setuptools/tests/test_packageindex.py275
-rw-r--r--setuptools/tests/test_sandbox.py133
-rw-r--r--setuptools/tests/test_sdist.py427
-rw-r--r--setuptools/tests/test_setuptools.py368
-rw-r--r--setuptools/tests/test_test.py131
-rw-r--r--setuptools/tests/test_unicode_utils.py10
-rw-r--r--setuptools/tests/test_upload_docs.py71
-rw-r--r--setuptools/tests/test_virtualenv.py139
-rw-r--r--setuptools/tests/test_wheel.py508
-rw-r--r--setuptools/tests/test_windows_wrappers.py181
-rw-r--r--setuptools/tests/text.py9
-rw-r--r--setuptools/tests/textwrap.py8
-rw-r--r--setuptools/unicode_utils.py44
-rw-r--r--setuptools/version.py6
-rw-r--r--setuptools/wheel.py163
-rw-r--r--setuptools/windows_support.py29
127 files changed, 28304 insertions, 0 deletions
diff --git a/setuptools/__init__.py b/setuptools/__init__.py
new file mode 100644
index 0000000..7da47fb
--- /dev/null
+++ b/setuptools/__init__.py
@@ -0,0 +1,180 @@
+"""Extensions to the 'distutils' for large or complex distributions"""
+
+import os
+import functools
+import distutils.core
+import distutils.filelist
+from distutils.util import convert_path
+from fnmatch import fnmatchcase
+
+from setuptools.extern.six.moves import filter, map
+
+import setuptools.version
+from setuptools.extension import Extension
+from setuptools.dist import Distribution, Feature
+from setuptools.depends import Require
+from . import monkey
+
+__all__ = [
+ 'setup', 'Distribution', 'Feature', 'Command', 'Extension', 'Require',
+ 'find_packages',
+]
+
+__version__ = setuptools.version.__version__
+
+bootstrap_install_from = None
+
+# If we run 2to3 on .py files, should we also convert docstrings?
+# Default: yes; assume that we can detect doctests reliably
+run_2to3_on_doctests = True
+# Standard package names for fixer packages
+lib2to3_fixer_packages = ['lib2to3.fixes']
+
+
+class PackageFinder(object):
+ """
+ Generate a list of all Python packages found within a directory
+ """
+
+ @classmethod
+ def find(cls, where='.', exclude=(), include=('*',)):
+ """Return a list all Python packages found within directory 'where'
+
+ 'where' is the root directory which will be searched for packages. It
+ should be supplied as a "cross-platform" (i.e. URL-style) path; it will
+ be converted to the appropriate local path syntax.
+
+ 'exclude' is a sequence of package names to exclude; '*' can be used
+ as a wildcard in the names, such that 'foo.*' will exclude all
+ subpackages of 'foo' (but not 'foo' itself).
+
+ 'include' is a sequence of package names to include. If it's
+ specified, only the named packages will be included. If it's not
+ specified, all found packages will be included. 'include' can contain
+ shell style wildcard patterns just like 'exclude'.
+ """
+
+ return list(cls._find_packages_iter(
+ convert_path(where),
+ cls._build_filter('ez_setup', '*__pycache__', *exclude),
+ cls._build_filter(*include)))
+
+ @classmethod
+ def _find_packages_iter(cls, where, exclude, include):
+ """
+ All the packages found in 'where' that pass the 'include' filter, but
+ not the 'exclude' filter.
+ """
+ for root, dirs, files in os.walk(where, followlinks=True):
+ # Copy dirs to iterate over it, then empty dirs.
+ all_dirs = dirs[:]
+ dirs[:] = []
+
+ for dir in all_dirs:
+ full_path = os.path.join(root, dir)
+ rel_path = os.path.relpath(full_path, where)
+ package = rel_path.replace(os.path.sep, '.')
+
+ # Skip directory trees that are not valid packages
+ if ('.' in dir or not cls._looks_like_package(full_path)):
+ continue
+
+ # Should this package be included?
+ if include(package) and not exclude(package):
+ yield package
+
+ # Keep searching subdirectories, as there may be more packages
+ # down there, even if the parent was excluded.
+ dirs.append(dir)
+
+ @staticmethod
+ def _looks_like_package(path):
+ """Does a directory look like a package?"""
+ return os.path.isfile(os.path.join(path, '__init__.py'))
+
+ @staticmethod
+ def _build_filter(*patterns):
+ """
+ Given a list of patterns, return a callable that will be true only if
+ the input matches at least one of the patterns.
+ """
+ return lambda name: any(fnmatchcase(name, pat=pat) for pat in patterns)
+
+
+class PEP420PackageFinder(PackageFinder):
+ @staticmethod
+ def _looks_like_package(path):
+ return True
+
+
+find_packages = PackageFinder.find
+
+
+def _install_setup_requires(attrs):
+ # Note: do not use `setuptools.Distribution` directly, as
+ # our PEP 517 backend patch `distutils.core.Distribution`.
+ dist = distutils.core.Distribution(dict(
+ (k, v) for k, v in attrs.items()
+ if k in ('dependency_links', 'setup_requires')
+ ))
+ # Honor setup.cfg's options.
+ dist.parse_config_files(ignore_option_errors=True)
+ if dist.setup_requires:
+ dist.fetch_build_eggs(dist.setup_requires)
+
+
+def setup(**attrs):
+ # Make sure we have any requirements needed to interpret 'attrs'.
+ _install_setup_requires(attrs)
+ return distutils.core.setup(**attrs)
+
+setup.__doc__ = distutils.core.setup.__doc__
+
+
+_Command = monkey.get_unpatched(distutils.core.Command)
+
+
+class Command(_Command):
+ __doc__ = _Command.__doc__
+
+ command_consumes_arguments = False
+
+ def __init__(self, dist, **kw):
+ """
+ Construct the command for dist, updating
+ vars(self) with any keyword parameters.
+ """
+ _Command.__init__(self, dist)
+ vars(self).update(kw)
+
+ def reinitialize_command(self, command, reinit_subcommands=0, **kw):
+ cmd = _Command.reinitialize_command(self, command, reinit_subcommands)
+ vars(cmd).update(kw)
+ return cmd
+
+
+def _find_all_simple(path):
+ """
+ Find all files under 'path'
+ """
+ results = (
+ os.path.join(base, file)
+ for base, dirs, files in os.walk(path, followlinks=True)
+ for file in files
+ )
+ return filter(os.path.isfile, results)
+
+
+def findall(dir=os.curdir):
+ """
+ Find all files under 'dir' and return the list of full filenames.
+ Unless dir is '.', return full filenames with dir prepended.
+ """
+ files = _find_all_simple(dir)
+ if dir == os.curdir:
+ make_rel = functools.partial(os.path.relpath, start=dir)
+ files = map(make_rel, files)
+ return list(files)
+
+
+monkey.patch_all()
diff --git a/setuptools/_vendor/__init__.py b/setuptools/_vendor/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/setuptools/_vendor/__init__.py
diff --git a/setuptools/_vendor/__pycache__/__init__.cpython-36.pyc b/setuptools/_vendor/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 0000000..f86f802
--- /dev/null
+++ b/setuptools/_vendor/__pycache__/__init__.cpython-36.pyc
Binary files differ
diff --git a/setuptools/_vendor/__pycache__/six.cpython-36.pyc b/setuptools/_vendor/__pycache__/six.cpython-36.pyc
new file mode 100644
index 0000000..afda210
--- /dev/null
+++ b/setuptools/_vendor/__pycache__/six.cpython-36.pyc
Binary files differ
diff --git a/setuptools/_vendor/packaging/__about__.py b/setuptools/_vendor/packaging/__about__.py
new file mode 100644
index 0000000..95d330e
--- /dev/null
+++ b/setuptools/_vendor/packaging/__about__.py
@@ -0,0 +1,21 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+from __future__ import absolute_import, division, print_function
+
+__all__ = [
+ "__title__", "__summary__", "__uri__", "__version__", "__author__",
+ "__email__", "__license__", "__copyright__",
+]
+
+__title__ = "packaging"
+__summary__ = "Core utilities for Python packages"
+__uri__ = "https://github.com/pypa/packaging"
+
+__version__ = "16.8"
+
+__author__ = "Donald Stufft and individual contributors"
+__email__ = "donald@stufft.io"
+
+__license__ = "BSD or Apache License, Version 2.0"
+__copyright__ = "Copyright 2014-2016 %s" % __author__
diff --git a/setuptools/_vendor/packaging/__init__.py b/setuptools/_vendor/packaging/__init__.py
new file mode 100644
index 0000000..5ee6220
--- /dev/null
+++ b/setuptools/_vendor/packaging/__init__.py
@@ -0,0 +1,14 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+from __future__ import absolute_import, division, print_function
+
+from .__about__ import (
+ __author__, __copyright__, __email__, __license__, __summary__, __title__,
+ __uri__, __version__
+)
+
+__all__ = [
+ "__title__", "__summary__", "__uri__", "__version__", "__author__",
+ "__email__", "__license__", "__copyright__",
+]
diff --git a/setuptools/_vendor/packaging/__pycache__/__about__.cpython-36.pyc b/setuptools/_vendor/packaging/__pycache__/__about__.cpython-36.pyc
new file mode 100644
index 0000000..d88ad13
--- /dev/null
+++ b/setuptools/_vendor/packaging/__pycache__/__about__.cpython-36.pyc
Binary files differ
diff --git a/setuptools/_vendor/packaging/__pycache__/__init__.cpython-36.pyc b/setuptools/_vendor/packaging/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 0000000..e9a878d
--- /dev/null
+++ b/setuptools/_vendor/packaging/__pycache__/__init__.cpython-36.pyc
Binary files differ
diff --git a/setuptools/_vendor/packaging/__pycache__/_compat.cpython-36.pyc b/setuptools/_vendor/packaging/__pycache__/_compat.cpython-36.pyc
new file mode 100644
index 0000000..ca59089
--- /dev/null
+++ b/setuptools/_vendor/packaging/__pycache__/_compat.cpython-36.pyc
Binary files differ
diff --git a/setuptools/_vendor/packaging/__pycache__/_structures.cpython-36.pyc b/setuptools/_vendor/packaging/__pycache__/_structures.cpython-36.pyc
new file mode 100644
index 0000000..2640f23
--- /dev/null
+++ b/setuptools/_vendor/packaging/__pycache__/_structures.cpython-36.pyc
Binary files differ
diff --git a/setuptools/_vendor/packaging/__pycache__/specifiers.cpython-36.pyc b/setuptools/_vendor/packaging/__pycache__/specifiers.cpython-36.pyc
new file mode 100644
index 0000000..c5139c2
--- /dev/null
+++ b/setuptools/_vendor/packaging/__pycache__/specifiers.cpython-36.pyc
Binary files differ
diff --git a/setuptools/_vendor/packaging/__pycache__/version.cpython-36.pyc b/setuptools/_vendor/packaging/__pycache__/version.cpython-36.pyc
new file mode 100644
index 0000000..d5f01d8
--- /dev/null
+++ b/setuptools/_vendor/packaging/__pycache__/version.cpython-36.pyc
Binary files differ
diff --git a/setuptools/_vendor/packaging/_compat.py b/setuptools/_vendor/packaging/_compat.py
new file mode 100644
index 0000000..210bb80
--- /dev/null
+++ b/setuptools/_vendor/packaging/_compat.py
@@ -0,0 +1,30 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+from __future__ import absolute_import, division, print_function
+
+import sys
+
+
+PY2 = sys.version_info[0] == 2
+PY3 = sys.version_info[0] == 3
+
+# flake8: noqa
+
+if PY3:
+ string_types = str,
+else:
+ string_types = basestring,
+
+
+def with_metaclass(meta, *bases):
+ """
+ Create a base class with a metaclass.
+ """
+ # This requires a bit of explanation: the basic idea is to make a dummy
+ # metaclass for one level of class instantiation that replaces itself with
+ # the actual metaclass.
+ class metaclass(meta):
+ def __new__(cls, name, this_bases, d):
+ return meta(name, bases, d)
+ return type.__new__(metaclass, 'temporary_class', (), {})
diff --git a/setuptools/_vendor/packaging/_structures.py b/setuptools/_vendor/packaging/_structures.py
new file mode 100644
index 0000000..ccc2786
--- /dev/null
+++ b/setuptools/_vendor/packaging/_structures.py
@@ -0,0 +1,68 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+from __future__ import absolute_import, division, print_function
+
+
+class Infinity(object):
+
+ def __repr__(self):
+ return "Infinity"
+
+ def __hash__(self):
+ return hash(repr(self))
+
+ def __lt__(self, other):
+ return False
+
+ def __le__(self, other):
+ return False
+
+ def __eq__(self, other):
+ return isinstance(other, self.__class__)
+
+ def __ne__(self, other):
+ return not isinstance(other, self.__class__)
+
+ def __gt__(self, other):
+ return True
+
+ def __ge__(self, other):
+ return True
+
+ def __neg__(self):
+ return NegativeInfinity
+
+Infinity = Infinity()
+
+
+class NegativeInfinity(object):
+
+ def __repr__(self):
+ return "-Infinity"
+
+ def __hash__(self):
+ return hash(repr(self))
+
+ def __lt__(self, other):
+ return True
+
+ def __le__(self, other):
+ return True
+
+ def __eq__(self, other):
+ return isinstance(other, self.__class__)
+
+ def __ne__(self, other):
+ return not isinstance(other, self.__class__)
+
+ def __gt__(self, other):
+ return False
+
+ def __ge__(self, other):
+ return False
+
+ def __neg__(self):
+ return Infinity
+
+NegativeInfinity = NegativeInfinity()
diff --git a/setuptools/_vendor/packaging/markers.py b/setuptools/_vendor/packaging/markers.py
new file mode 100644
index 0000000..031332a
--- /dev/null
+++ b/setuptools/_vendor/packaging/markers.py
@@ -0,0 +1,301 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+from __future__ import absolute_import, division, print_function
+
+import operator
+import os
+import platform
+import sys
+
+from setuptools.extern.pyparsing import ParseException, ParseResults, stringStart, stringEnd
+from setuptools.extern.pyparsing import ZeroOrMore, Group, Forward, QuotedString
+from setuptools.extern.pyparsing import Literal as L # noqa
+
+from ._compat import string_types
+from .specifiers import Specifier, InvalidSpecifier
+
+
+__all__ = [
+ "InvalidMarker", "UndefinedComparison", "UndefinedEnvironmentName",
+ "Marker", "default_environment",
+]
+
+
+class InvalidMarker(ValueError):
+ """
+ An invalid marker was found, users should refer to PEP 508.
+ """
+
+
+class UndefinedComparison(ValueError):
+ """
+ An invalid operation was attempted on a value that doesn't support it.
+ """
+
+
+class UndefinedEnvironmentName(ValueError):
+ """
+ A name was attempted to be used that does not exist inside of the
+ environment.
+ """
+
+
+class Node(object):
+
+ def __init__(self, value):
+ self.value = value
+
+ def __str__(self):
+ return str(self.value)
+
+ def __repr__(self):
+ return "<{0}({1!r})>".format(self.__class__.__name__, str(self))
+
+ def serialize(self):
+ raise NotImplementedError
+
+
+class Variable(Node):
+
+ def serialize(self):
+ return str(self)
+
+
+class Value(Node):
+
+ def serialize(self):
+ return '"{0}"'.format(self)
+
+
+class Op(Node):
+
+ def serialize(self):
+ return str(self)
+
+
+VARIABLE = (
+ L("implementation_version") |
+ L("platform_python_implementation") |
+ L("implementation_name") |
+ L("python_full_version") |
+ L("platform_release") |
+ L("platform_version") |
+ L("platform_machine") |
+ L("platform_system") |
+ L("python_version") |
+ L("sys_platform") |
+ L("os_name") |
+ L("os.name") | # PEP-345
+ L("sys.platform") | # PEP-345
+ L("platform.version") | # PEP-345
+ L("platform.machine") | # PEP-345
+ L("platform.python_implementation") | # PEP-345
+ L("python_implementation") | # undocumented setuptools legacy
+ L("extra")
+)
+ALIASES = {
+ 'os.name': 'os_name',
+ 'sys.platform': 'sys_platform',
+ 'platform.version': 'platform_version',
+ 'platform.machine': 'platform_machine',
+ 'platform.python_implementation': 'platform_python_implementation',
+ 'python_implementation': 'platform_python_implementation'
+}
+VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0])))
+
+VERSION_CMP = (
+ L("===") |
+ L("==") |
+ L(">=") |
+ L("<=") |
+ L("!=") |
+ L("~=") |
+ L(">") |
+ L("<")
+)
+
+MARKER_OP = VERSION_CMP | L("not in") | L("in")
+MARKER_OP.setParseAction(lambda s, l, t: Op(t[0]))
+
+MARKER_VALUE = QuotedString("'") | QuotedString('"')
+MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0]))
+
+BOOLOP = L("and") | L("or")
+
+MARKER_VAR = VARIABLE | MARKER_VALUE
+
+MARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR)
+MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0]))
+
+LPAREN = L("(").suppress()
+RPAREN = L(")").suppress()
+
+MARKER_EXPR = Forward()
+MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN)
+MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR)
+
+MARKER = stringStart + MARKER_EXPR + stringEnd
+
+
+def _coerce_parse_result(results):
+ if isinstance(results, ParseResults):
+ return [_coerce_parse_result(i) for i in results]
+ else:
+ return results
+
+
+def _format_marker(marker, first=True):
+ assert isinstance(marker, (list, tuple, string_types))
+
+ # Sometimes we have a structure like [[...]] which is a single item list
+ # where the single item is itself it's own list. In that case we want skip
+ # the rest of this function so that we don't get extraneous () on the
+ # outside.
+ if (isinstance(marker, list) and len(marker) == 1 and
+ isinstance(marker[0], (list, tuple))):
+ return _format_marker(marker[0])
+
+ if isinstance(marker, list):
+ inner = (_format_marker(m, first=False) for m in marker)
+ if first:
+ return " ".join(inner)
+ else:
+ return "(" + " ".join(inner) + ")"
+ elif isinstance(marker, tuple):
+ return " ".join([m.serialize() for m in marker])
+ else:
+ return marker
+
+
+_operators = {
+ "in": lambda lhs, rhs: lhs in rhs,
+ "not in": lambda lhs, rhs: lhs not in rhs,
+ "<": operator.lt,
+ "<=": operator.le,
+ "==": operator.eq,
+ "!=": operator.ne,
+ ">=": operator.ge,
+ ">": operator.gt,
+}
+
+
+def _eval_op(lhs, op, rhs):
+ try:
+ spec = Specifier("".join([op.serialize(), rhs]))
+ except InvalidSpecifier:
+ pass
+ else:
+ return spec.contains(lhs)
+
+ oper = _operators.get(op.serialize())
+ if oper is None:
+ raise UndefinedComparison(
+ "Undefined {0!r} on {1!r} and {2!r}.".format(op, lhs, rhs)
+ )
+
+ return oper(lhs, rhs)
+
+
+_undefined = object()
+
+
+def _get_env(environment, name):
+ value = environment.get(name, _undefined)
+
+ if value is _undefined:
+ raise UndefinedEnvironmentName(
+ "{0!r} does not exist in evaluation environment.".format(name)
+ )
+
+ return value
+
+
+def _evaluate_markers(markers, environment):
+ groups = [[]]
+
+ for marker in markers:
+ assert isinstance(marker, (list, tuple, string_types))
+
+ if isinstance(marker, list):
+ groups[-1].append(_evaluate_markers(marker, environment))
+ elif isinstance(marker, tuple):
+ lhs, op, rhs = marker
+
+ if isinstance(lhs, Variable):
+ lhs_value = _get_env(environment, lhs.value)
+ rhs_value = rhs.value
+ else:
+ lhs_value = lhs.value
+ rhs_value = _get_env(environment, rhs.value)
+
+ groups[-1].append(_eval_op(lhs_value, op, rhs_value))
+ else:
+ assert marker in ["and", "or"]
+ if marker == "or":
+ groups.append([])
+
+ return any(all(item) for item in groups)
+
+
+def format_full_version(info):
+ version = '{0.major}.{0.minor}.{0.micro}'.format(info)
+ kind = info.releaselevel
+ if kind != 'final':
+ version += kind[0] + str(info.serial)
+ return version
+
+
+def default_environment():
+ if hasattr(sys, 'implementation'):
+ iver = format_full_version(sys.implementation.version)
+ implementation_name = sys.implementation.name
+ else:
+ iver = '0'
+ implementation_name = ''
+
+ return {
+ "implementation_name": implementation_name,
+ "implementation_version": iver,
+ "os_name": os.name,
+ "platform_machine": platform.machine(),
+ "platform_release": platform.release(),
+ "platform_system": platform.system(),
+ "platform_version": platform.version(),
+ "python_full_version": platform.python_version(),
+ "platform_python_implementation": platform.python_implementation(),
+ "python_version": platform.python_version()[:3],
+ "sys_platform": sys.platform,
+ }
+
+
+class Marker(object):
+
+ def __init__(self, marker):
+ try:
+ self._markers = _coerce_parse_result(MARKER.parseString(marker))
+ except ParseException as e:
+ err_str = "Invalid marker: {0!r}, parse error at {1!r}".format(
+ marker, marker[e.loc:e.loc + 8])
+ raise InvalidMarker(err_str)
+
+ def __str__(self):
+ return _format_marker(self._markers)
+
+ def __repr__(self):
+ return "<Marker({0!r})>".format(str(self))
+
+ def evaluate(self, environment=None):
+ """Evaluate a marker.
+
+ Return the boolean from evaluating the given marker against the
+ environment. environment is an optional argument to override all or
+ part of the determined environment.
+
+ The environment is determined from the current Python process.
+ """
+ current_environment = default_environment()
+ if environment is not None:
+ current_environment.update(environment)
+
+ return _evaluate_markers(self._markers, current_environment)
diff --git a/setuptools/_vendor/packaging/requirements.py b/setuptools/_vendor/packaging/requirements.py
new file mode 100644
index 0000000..5b49341
--- /dev/null
+++ b/setuptools/_vendor/packaging/requirements.py
@@ -0,0 +1,127 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+from __future__ import absolute_import, division, print_function
+
+import string
+import re
+
+from setuptools.extern.pyparsing import stringStart, stringEnd, originalTextFor, ParseException
+from setuptools.extern.pyparsing import ZeroOrMore, Word, Optional, Regex, Combine
+from setuptools.extern.pyparsing import Literal as L # noqa
+from setuptools.extern.six.moves.urllib import parse as urlparse
+
+from .markers import MARKER_EXPR, Marker
+from .specifiers import LegacySpecifier, Specifier, SpecifierSet
+
+
+class InvalidRequirement(ValueError):
+ """
+ An invalid requirement was found, users should refer to PEP 508.
+ """
+
+
+ALPHANUM = Word(string.ascii_letters + string.digits)
+
+LBRACKET = L("[").suppress()
+RBRACKET = L("]").suppress()
+LPAREN = L("(").suppress()
+RPAREN = L(")").suppress()
+COMMA = L(",").suppress()
+SEMICOLON = L(";").suppress()
+AT = L("@").suppress()
+
+PUNCTUATION = Word("-_.")
+IDENTIFIER_END = ALPHANUM | (ZeroOrMore(PUNCTUATION) + ALPHANUM)
+IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END))
+
+NAME = IDENTIFIER("name")
+EXTRA = IDENTIFIER
+
+URI = Regex(r'[^ ]+')("url")
+URL = (AT + URI)
+
+EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA)
+EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras")
+
+VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE)
+VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE)
+
+VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY
+VERSION_MANY = Combine(VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE),
+ joinString=",", adjacent=False)("_raw_spec")
+_VERSION_SPEC = Optional(((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY))
+_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or '')
+
+VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier")
+VERSION_SPEC.setParseAction(lambda s, l, t: t[1])
+
+MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker")
+MARKER_EXPR.setParseAction(
+ lambda s, l, t: Marker(s[t._original_start:t._original_end])
+)
+MARKER_SEPERATOR = SEMICOLON
+MARKER = MARKER_SEPERATOR + MARKER_EXPR
+
+VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER)
+URL_AND_MARKER = URL + Optional(MARKER)
+
+NAMED_REQUIREMENT = \
+ NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER)
+
+REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd
+
+
+class Requirement(object):
+ """Parse a requirement.
+
+ Parse a given requirement string into its parts, such as name, specifier,
+ URL, and extras. Raises InvalidRequirement on a badly-formed requirement
+ string.
+ """
+
+ # TODO: Can we test whether something is contained within a requirement?
+ # If so how do we do that? Do we need to test against the _name_ of
+ # the thing as well as the version? What about the markers?
+ # TODO: Can we normalize the name and extra name?
+
+ def __init__(self, requirement_string):
+ try:
+ req = REQUIREMENT.parseString(requirement_string)
+ except ParseException as e:
+ raise InvalidRequirement(
+ "Invalid requirement, parse error at \"{0!r}\"".format(
+ requirement_string[e.loc:e.loc + 8]))
+
+ self.name = req.name
+ if req.url:
+ parsed_url = urlparse.urlparse(req.url)
+ if not (parsed_url.scheme and parsed_url.netloc) or (
+ not parsed_url.scheme and not parsed_url.netloc):
+ raise InvalidRequirement("Invalid URL given")
+ self.url = req.url
+ else:
+ self.url = None
+ self.extras = set(req.extras.asList() if req.extras else [])
+ self.specifier = SpecifierSet(req.specifier)
+ self.marker = req.marker if req.marker else None
+
+ def __str__(self):
+ parts = [self.name]
+
+ if self.extras:
+ parts.append("[{0}]".format(",".join(sorted(self.extras))))
+
+ if self.specifier:
+ parts.append(str(self.specifier))
+
+ if self.url:
+ parts.append("@ {0}".format(self.url))
+
+ if self.marker:
+ parts.append("; {0}".format(self.marker))
+
+ return "".join(parts)
+
+ def __repr__(self):
+ return "<Requirement({0!r})>".format(str(self))
diff --git a/setuptools/_vendor/packaging/specifiers.py b/setuptools/_vendor/packaging/specifiers.py
new file mode 100644
index 0000000..7f5a76c
--- /dev/null
+++ b/setuptools/_vendor/packaging/specifiers.py
@@ -0,0 +1,774 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+from __future__ import absolute_import, division, print_function
+
+import abc
+import functools
+import itertools
+import re
+
+from ._compat import string_types, with_metaclass
+from .version import Version, LegacyVersion, parse
+
+
+class InvalidSpecifier(ValueError):
+ """
+ An invalid specifier was found, users should refer to PEP 440.
+ """
+
+
+class BaseSpecifier(with_metaclass(abc.ABCMeta, object)):
+
+ @abc.abstractmethod
+ def __str__(self):
+ """
+ Returns the str representation of this Specifier like object. This
+ should be representative of the Specifier itself.
+ """
+
+ @abc.abstractmethod
+ def __hash__(self):
+ """
+ Returns a hash value for this Specifier like object.
+ """
+
+ @abc.abstractmethod
+ def __eq__(self, other):
+ """
+ Returns a boolean representing whether or not the two Specifier like
+ objects are equal.
+ """
+
+ @abc.abstractmethod
+ def __ne__(self, other):
+ """
+ Returns a boolean representing whether or not the two Specifier like
+ objects are not equal.
+ """
+
+ @abc.abstractproperty
+ def prereleases(self):
+ """
+ Returns whether or not pre-releases as a whole are allowed by this
+ specifier.
+ """
+
+ @prereleases.setter
+ def prereleases(self, value):
+ """
+ Sets whether or not pre-releases as a whole are allowed by this
+ specifier.
+ """
+
+ @abc.abstractmethod
+ def contains(self, item, prereleases=None):
+ """
+ Determines if the given item is contained within this specifier.
+ """
+
+ @abc.abstractmethod
+ def filter(self, iterable, prereleases=None):
+ """
+ Takes an iterable of items and filters them so that only items which
+ are contained within this specifier are allowed in it.
+ """
+
+
+class _IndividualSpecifier(BaseSpecifier):
+
+ _operators = {}
+
+ def __init__(self, spec="", prereleases=None):
+ match = self._regex.search(spec)
+ if not match:
+ raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec))
+
+ self._spec = (
+ match.group("operator").strip(),
+ match.group("version").strip(),
+ )
+
+ # Store whether or not this Specifier should accept prereleases
+ self._prereleases = prereleases
+
+ def __repr__(self):
+ pre = (
+ ", prereleases={0!r}".format(self.prereleases)
+ if self._prereleases is not None
+ else ""
+ )
+
+ return "<{0}({1!r}{2})>".format(
+ self.__class__.__name__,
+ str(self),
+ pre,
+ )
+
+ def __str__(self):
+ return "{0}{1}".format(*self._spec)
+
+ def __hash__(self):
+ return hash(self._spec)
+
+ def __eq__(self, other):
+ if isinstance(other, string_types):
+ try:
+ other = self.__class__(other)
+ except InvalidSpecifier:
+ return NotImplemented
+ elif not isinstance(other, self.__class__):
+ return NotImplemented
+
+ return self._spec == other._spec
+
+ def __ne__(self, other):
+ if isinstance(other, string_types):
+ try:
+ other = self.__class__(other)
+ except InvalidSpecifier:
+ return NotImplemented
+ elif not isinstance(other, self.__class__):
+ return NotImplemented
+
+ return self._spec != other._spec
+
+ def _get_operator(self, op):
+ return getattr(self, "_compare_{0}".format(self._operators[op]))
+
+ def _coerce_version(self, version):
+ if not isinstance(version, (LegacyVersion, Version)):
+ version = parse(version)
+ return version
+
+ @property
+ def operator(self):
+ return self._spec[0]
+
+ @property
+ def version(self):
+ return self._spec[1]
+
+ @property
+ def prereleases(self):
+ return self._prereleases
+
+ @prereleases.setter
+ def prereleases(self, value):
+ self._prereleases = value
+
+ def __contains__(self, item):
+ return self.contains(item)
+
+ def contains(self, item, prereleases=None):
+ # Determine if prereleases are to be allowed or not.
+ if prereleases is None:
+ prereleases = self.prereleases
+
+ # Normalize item to a Version or LegacyVersion, this allows us to have
+ # a shortcut for ``"2.0" in Specifier(">=2")
+ item = self._coerce_version(item)
+
+ # Determine if we should be supporting prereleases in this specifier
+ # or not, if we do not support prereleases than we can short circuit
+ # logic if this version is a prereleases.
+ if item.is_prerelease and not prereleases:
+ return False
+
+ # Actually do the comparison to determine if this item is contained
+ # within this Specifier or not.
+ return self._get_operator(self.operator)(item, self.version)
+
+ def filter(self, iterable, prereleases=None):
+ yielded = False
+ found_prereleases = []
+
+ kw = {"prereleases": prereleases if prereleases is not None else True}
+
+ # Attempt to iterate over all the values in the iterable and if any of
+ # them match, yield them.
+ for version in iterable:
+ parsed_version = self._coerce_version(version)
+
+ if self.contains(parsed_version, **kw):
+ # If our version is a prerelease, and we were not set to allow
+ # prereleases, then we'll store it for later incase nothing
+ # else matches this specifier.
+ if (parsed_version.is_prerelease and not
+ (prereleases or self.prereleases)):
+ found_prereleases.append(version)
+ # Either this is not a prerelease, or we should have been
+ # accepting prereleases from the begining.
+ else:
+ yielded = True
+ yield version
+
+ # Now that we've iterated over everything, determine if we've yielded
+ # any values, and if we have not and we have any prereleases stored up
+ # then we will go ahead and yield the prereleases.
+ if not yielded and found_prereleases:
+ for version in found_prereleases:
+ yield version
+
+
+class LegacySpecifier(_IndividualSpecifier):
+
+ _regex_str = (
+ r"""
+ (?P<operator>(==|!=|<=|>=|<|>))
+ \s*
+ (?P<version>
+ [^,;\s)]* # Since this is a "legacy" specifier, and the version
+ # string can be just about anything, we match everything
+ # except for whitespace, a semi-colon for marker support,
+ # a closing paren since versions can be enclosed in
+ # them, and a comma since it's a version separator.
+ )
+ """
+ )
+
+ _regex = re.compile(
+ r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
+
+ _operators = {
+ "==": "equal",
+ "!=": "not_equal",
+ "<=": "less_than_equal",
+ ">=": "greater_than_equal",
+ "<": "less_than",
+ ">": "greater_than",
+ }
+
+ def _coerce_version(self, version):
+ if not isinstance(version, LegacyVersion):
+ version = LegacyVersion(str(version))
+ return version
+
+ def _compare_equal(self, prospective, spec):
+ return prospective == self._coerce_version(spec)
+
+ def _compare_not_equal(self, prospective, spec):
+ return prospective != self._coerce_version(spec)
+
+ def _compare_less_than_equal(self, prospective, spec):
+ return prospective <= self._coerce_version(spec)
+
+ def _compare_greater_than_equal(self, prospective, spec):
+ return prospective >= self._coerce_version(spec)
+
+ def _compare_less_than(self, prospective, spec):
+ return prospective < self._coerce_version(spec)
+
+ def _compare_greater_than(self, prospective, spec):
+ return prospective > self._coerce_version(spec)
+
+
+def _require_version_compare(fn):
+ @functools.wraps(fn)
+ def wrapped(self, prospective, spec):
+ if not isinstance(prospective, Version):
+ return False
+ return fn(self, prospective, spec)
+ return wrapped
+
+
+class Specifier(_IndividualSpecifier):
+
+ _regex_str = (
+ r"""
+ (?P<operator>(~=|==|!=|<=|>=|<|>|===))
+ (?P<version>
+ (?:
+ # The identity operators allow for an escape hatch that will
+ # do an exact string match of the version you wish to install.
+ # This will not be parsed by PEP 440 and we cannot determine
+ # any semantic meaning from it. This operator is discouraged
+ # but included entirely as an escape hatch.
+ (?<====) # Only match for the identity operator
+ \s*
+ [^\s]* # We just match everything, except for whitespace
+ # since we are only testing for strict identity.
+ )
+ |
+ (?:
+ # The (non)equality operators allow for wild card and local
+ # versions to be specified so we have to define these two
+ # operators separately to enable that.
+ (?<===|!=) # Only match for equals and not equals
+
+ \s*
+ v?
+ (?:[0-9]+!)? # epoch
+ [0-9]+(?:\.[0-9]+)* # release
+ (?: # pre release
+ [-_\.]?
+ (a|b|c|rc|alpha|beta|pre|preview)
+ [-_\.]?
+ [0-9]*
+ )?
+ (?: # post release
+ (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
+ )?
+
+ # You cannot use a wild card and a dev or local version
+ # together so group them with a | and make them optional.
+ (?:
+ (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
+ (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local
+ |
+ \.\* # Wild card syntax of .*
+ )?
+ )
+ |
+ (?:
+ # The compatible operator requires at least two digits in the
+ # release segment.
+ (?<=~=) # Only match for the compatible operator
+
+ \s*
+ v?
+ (?:[0-9]+!)? # epoch
+ [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *)
+ (?: # pre release
+ [-_\.]?
+ (a|b|c|rc|alpha|beta|pre|preview)
+ [-_\.]?
+ [0-9]*
+ )?
+ (?: # post release
+ (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
+ )?
+ (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
+ )
+ |
+ (?:
+ # All other operators only allow a sub set of what the
+ # (non)equality operators do. Specifically they do not allow
+ # local versions to be specified nor do they allow the prefix
+ # matching wild cards.
+ (?<!==|!=|~=) # We have special cases for these
+ # operators so we want to make sure they
+ # don't match here.
+
+ \s*
+ v?
+ (?:[0-9]+!)? # epoch
+ [0-9]+(?:\.[0-9]+)* # release
+ (?: # pre release
+ [-_\.]?
+ (a|b|c|rc|alpha|beta|pre|preview)
+ [-_\.]?
+ [0-9]*
+ )?
+ (?: # post release
+ (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
+ )?
+ (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
+ )
+ )
+ """
+ )
+
+ _regex = re.compile(
+ r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
+
+ _operators = {
+ "~=": "compatible",
+ "==": "equal",
+ "!=": "not_equal",
+ "<=": "less_than_equal",
+ ">=": "greater_than_equal",
+ "<": "less_than",
+ ">": "greater_than",
+ "===": "arbitrary",
+ }
+
+ @_require_version_compare
+ def _compare_compatible(self, prospective, spec):
+ # Compatible releases have an equivalent combination of >= and ==. That
+ # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to
+ # implement this in terms of the other specifiers instead of
+ # implementing it ourselves. The only thing we need to do is construct
+ # the other specifiers.
+
+ # We want everything but the last item in the version, but we want to
+ # ignore post and dev releases and we want to treat the pre-release as
+ # it's own separate segment.
+ prefix = ".".join(
+ list(
+ itertools.takewhile(
+ lambda x: (not x.startswith("post") and not
+ x.startswith("dev")),
+ _version_split(spec),
+ )
+ )[:-1]
+ )
+
+ # Add the prefix notation to the end of our string
+ prefix += ".*"
+
+ return (self._get_operator(">=")(prospective, spec) and
+ self._get_operator("==")(prospective, prefix))
+
+ @_require_version_compare
+ def _compare_equal(self, prospective, spec):
+ # We need special logic to handle prefix matching
+ if spec.endswith(".*"):
+ # In the case of prefix matching we want to ignore local segment.
+ prospective = Version(prospective.public)
+ # Split the spec out by dots, and pretend that there is an implicit
+ # dot in between a release segment and a pre-release segment.
+ spec = _version_split(spec[:-2]) # Remove the trailing .*
+
+ # Split the prospective version out by dots, and pretend that there
+ # is an implicit dot in between a release segment and a pre-release
+ # segment.
+ prospective = _version_split(str(prospective))
+
+ # Shorten the prospective version to be the same length as the spec
+ # so that we can determine if the specifier is a prefix of the
+ # prospective version or not.
+ prospective = prospective[:len(spec)]
+
+ # Pad out our two sides with zeros so that they both equal the same
+ # length.
+ spec, prospective = _pad_version(spec, prospective)
+ else:
+ # Convert our spec string into a Version
+ spec = Version(spec)
+
+ # If the specifier does not have a local segment, then we want to
+ # act as if the prospective version also does not have a local
+ # segment.
+ if not spec.local:
+ prospective = Version(prospective.public)
+
+ return prospective == spec
+
+ @_require_version_compare
+ def _compare_not_equal(self, prospective, spec):
+ return not self._compare_equal(prospective, spec)
+
+ @_require_version_compare
+ def _compare_less_than_equal(self, prospective, spec):
+ return prospective <= Version(spec)
+
+ @_require_version_compare
+ def _compare_greater_than_equal(self, prospective, spec):
+ return prospective >= Version(spec)
+
+ @_require_version_compare
+ def _compare_less_than(self, prospective, spec):
+ # Convert our spec to a Version instance, since we'll want to work with
+ # it as a version.
+ spec = Version(spec)
+
+ # Check to see if the prospective version is less than the spec
+ # version. If it's not we can short circuit and just return False now
+ # instead of doing extra unneeded work.
+ if not prospective < spec:
+ return False
+
+ # This special case is here so that, unless the specifier itself
+ # includes is a pre-release version, that we do not accept pre-release
+ # versions for the version mentioned in the specifier (e.g. <3.1 should
+ # not match 3.1.dev0, but should match 3.0.dev0).
+ if not spec.is_prerelease and prospective.is_prerelease:
+ if Version(prospective.base_version) == Version(spec.base_version):
+ return False
+
+ # If we've gotten to here, it means that prospective version is both
+ # less than the spec version *and* it's not a pre-release of the same
+ # version in the spec.
+ return True
+
+ @_require_version_compare
+ def _compare_greater_than(self, prospective, spec):
+ # Convert our spec to a Version instance, since we'll want to work with
+ # it as a version.
+ spec = Version(spec)
+
+ # Check to see if the prospective version is greater than the spec
+ # version. If it's not we can short circuit and just return False now
+ # instead of doing extra unneeded work.
+ if not prospective > spec:
+ return False
+
+ # This special case is here so that, unless the specifier itself
+ # includes is a post-release version, that we do not accept
+ # post-release versions for the version mentioned in the specifier
+ # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0).
+ if not spec.is_postrelease and prospective.is_postrelease:
+ if Version(prospective.base_version) == Version(spec.base_version):
+ return False
+
+ # Ensure that we do not allow a local version of the version mentioned
+ # in the specifier, which is techincally greater than, to match.
+ if prospective.local is not None:
+ if Version(prospective.base_version) == Version(spec.base_version):
+ return False
+
+ # If we've gotten to here, it means that prospective version is both
+ # greater than the spec version *and* it's not a pre-release of the
+ # same version in the spec.
+ return True
+
+ def _compare_arbitrary(self, prospective, spec):
+ return str(prospective).lower() == str(spec).lower()
+
+ @property
+ def prereleases(self):
+ # If there is an explicit prereleases set for this, then we'll just
+ # blindly use that.
+ if self._prereleases is not None:
+ return self._prereleases
+
+ # Look at all of our specifiers and determine if they are inclusive
+ # operators, and if they are if they are including an explicit
+ # prerelease.
+ operator, version = self._spec
+ if operator in ["==", ">=", "<=", "~=", "==="]:
+ # The == specifier can include a trailing .*, if it does we
+ # want to remove before parsing.
+ if operator == "==" and version.endswith(".*"):
+ version = version[:-2]
+
+ # Parse the version, and if it is a pre-release than this
+ # specifier allows pre-releases.
+ if parse(version).is_prerelease:
+ return True
+
+ return False
+
+ @prereleases.setter
+ def prereleases(self, value):
+ self._prereleases = value
+
+
+_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$")
+
+
+def _version_split(version):
+ result = []
+ for item in version.split("."):
+ match = _prefix_regex.search(item)
+ if match:
+ result.extend(match.groups())
+ else:
+ result.append(item)
+ return result
+
+
+def _pad_version(left, right):
+ left_split, right_split = [], []
+
+ # Get the release segment of our versions
+ left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left)))
+ right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right)))
+
+ # Get the rest of our versions
+ left_split.append(left[len(left_split[0]):])
+ right_split.append(right[len(right_split[0]):])
+
+ # Insert our padding
+ left_split.insert(
+ 1,
+ ["0"] * max(0, len(right_split[0]) - len(left_split[0])),
+ )
+ right_split.insert(
+ 1,
+ ["0"] * max(0, len(left_split[0]) - len(right_split[0])),
+ )
+
+ return (
+ list(itertools.chain(*left_split)),
+ list(itertools.chain(*right_split)),
+ )
+
+
+class SpecifierSet(BaseSpecifier):
+
+ def __init__(self, specifiers="", prereleases=None):
+ # Split on , to break each indidivual specifier into it's own item, and
+ # strip each item to remove leading/trailing whitespace.
+ specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
+
+ # Parsed each individual specifier, attempting first to make it a
+ # Specifier and falling back to a LegacySpecifier.
+ parsed = set()
+ for specifier in specifiers:
+ try:
+ parsed.add(Specifier(specifier))
+ except InvalidSpecifier:
+ parsed.add(LegacySpecifier(specifier))
+
+ # Turn our parsed specifiers into a frozen set and save them for later.
+ self._specs = frozenset(parsed)
+
+ # Store our prereleases value so we can use it later to determine if
+ # we accept prereleases or not.
+ self._prereleases = prereleases
+
+ def __repr__(self):
+ pre = (
+ ", prereleases={0!r}".format(self.prereleases)
+ if self._prereleases is not None
+ else ""
+ )
+
+ return "<SpecifierSet({0!r}{1})>".format(str(self), pre)
+
+ def __str__(self):
+ return ",".join(sorted(str(s) for s in self._specs))
+
+ def __hash__(self):
+ return hash(self._specs)
+
+ def __and__(self, other):
+ if isinstance(other, string_types):
+ other = SpecifierSet(other)
+ elif not isinstance(other, SpecifierSet):
+ return NotImplemented
+
+ specifier = SpecifierSet()
+ specifier._specs = frozenset(self._specs | other._specs)
+
+ if self._prereleases is None and other._prereleases is not None:
+ specifier._prereleases = other._prereleases
+ elif self._prereleases is not None and other._prereleases is None:
+ specifier._prereleases = self._prereleases
+ elif self._prereleases == other._prereleases:
+ specifier._prereleases = self._prereleases
+ else:
+ raise ValueError(
+ "Cannot combine SpecifierSets with True and False prerelease "
+ "overrides."
+ )
+
+ return specifier
+
+ def __eq__(self, other):
+ if isinstance(other, string_types):
+ other = SpecifierSet(other)
+ elif isinstance(other, _IndividualSpecifier):
+ other = SpecifierSet(str(other))
+ elif not isinstance(other, SpecifierSet):
+ return NotImplemented
+
+ return self._specs == other._specs
+
+ def __ne__(self, other):
+ if isinstance(other, string_types):
+ other = SpecifierSet(other)
+ elif isinstance(other, _IndividualSpecifier):
+ other = SpecifierSet(str(other))
+ elif not isinstance(other, SpecifierSet):
+ return NotImplemented
+
+ return self._specs != other._specs
+
+ def __len__(self):
+ return len(self._specs)
+
+ def __iter__(self):
+ return iter(self._specs)
+
+ @property
+ def prereleases(self):
+ # If we have been given an explicit prerelease modifier, then we'll
+ # pass that through here.
+ if self._prereleases is not None:
+ return self._prereleases
+
+ # If we don't have any specifiers, and we don't have a forced value,
+ # then we'll just return None since we don't know if this should have
+ # pre-releases or not.
+ if not self._specs:
+ return None
+
+ # Otherwise we'll see if any of the given specifiers accept
+ # prereleases, if any of them do we'll return True, otherwise False.
+ return any(s.prereleases for s in self._specs)
+
+ @prereleases.setter
+ def prereleases(self, value):
+ self._prereleases = value
+
+ def __contains__(self, item):
+ return self.contains(item)
+
+ def contains(self, item, prereleases=None):
+ # Ensure that our item is a Version or LegacyVersion instance.
+ if not isinstance(item, (LegacyVersion, Version)):
+ item = parse(item)
+
+ # Determine if we're forcing a prerelease or not, if we're not forcing
+ # one for this particular filter call, then we'll use whatever the
+ # SpecifierSet thinks for whether or not we should support prereleases.
+ if prereleases is None:
+ prereleases = self.prereleases
+
+ # We can determine if we're going to allow pre-releases by looking to
+ # see if any of the underlying items supports them. If none of them do
+ # and this item is a pre-release then we do not allow it and we can
+ # short circuit that here.
+ # Note: This means that 1.0.dev1 would not be contained in something
+ # like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0
+ if not prereleases and item.is_prerelease:
+ return False
+
+ # We simply dispatch to the underlying specs here to make sure that the
+ # given version is contained within all of them.
+ # Note: This use of all() here means that an empty set of specifiers
+ # will always return True, this is an explicit design decision.
+ return all(
+ s.contains(item, prereleases=prereleases)
+ for s in self._specs
+ )
+
+ def filter(self, iterable, prereleases=None):
+ # Determine if we're forcing a prerelease or not, if we're not forcing
+ # one for this particular filter call, then we'll use whatever the
+ # SpecifierSet thinks for whether or not we should support prereleases.
+ if prereleases is None:
+ prereleases = self.prereleases
+
+ # If we have any specifiers, then we want to wrap our iterable in the
+ # filter method for each one, this will act as a logical AND amongst
+ # each specifier.
+ if self._specs:
+ for spec in self._specs:
+ iterable = spec.filter(iterable, prereleases=bool(prereleases))
+ return iterable
+ # If we do not have any specifiers, then we need to have a rough filter
+ # which will filter out any pre-releases, unless there are no final
+ # releases, and which will filter out LegacyVersion in general.
+ else:
+ filtered = []
+ found_prereleases = []
+
+ for item in iterable:
+ # Ensure that we some kind of Version class for this item.
+ if not isinstance(item, (LegacyVersion, Version)):
+ parsed_version = parse(item)
+ else:
+ parsed_version = item
+
+ # Filter out any item which is parsed as a LegacyVersion
+ if isinstance(parsed_version, LegacyVersion):
+ continue
+
+ # Store any item which is a pre-release for later unless we've
+ # already found a final version or we are accepting prereleases
+ if parsed_version.is_prerelease and not prereleases:
+ if not filtered:
+ found_prereleases.append(item)
+ else:
+ filtered.append(item)
+
+ # If we've found no items except for pre-releases, then we'll go
+ # ahead and use the pre-releases
+ if not filtered and found_prereleases and prereleases is None:
+ return found_prereleases
+
+ return filtered
diff --git a/setuptools/_vendor/packaging/utils.py b/setuptools/_vendor/packaging/utils.py
new file mode 100644
index 0000000..942387c
--- /dev/null
+++ b/setuptools/_vendor/packaging/utils.py
@@ -0,0 +1,14 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+from __future__ import absolute_import, division, print_function
+
+import re
+
+
+_canonicalize_regex = re.compile(r"[-_.]+")
+
+
+def canonicalize_name(name):
+ # This is taken from PEP 503.
+ return _canonicalize_regex.sub("-", name).lower()
diff --git a/setuptools/_vendor/packaging/version.py b/setuptools/_vendor/packaging/version.py
new file mode 100644
index 0000000..83b5ee8
--- /dev/null
+++ b/setuptools/_vendor/packaging/version.py
@@ -0,0 +1,393 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+from __future__ import absolute_import, division, print_function
+
+import collections
+import itertools
+import re
+
+from ._structures import Infinity
+
+
+__all__ = [
+ "parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"
+]
+
+
+_Version = collections.namedtuple(
+ "_Version",
+ ["epoch", "release", "dev", "pre", "post", "local"],
+)
+
+
+def parse(version):
+ """
+ Parse the given version string and return either a :class:`Version` object
+ or a :class:`LegacyVersion` object depending on if the given version is
+ a valid PEP 440 version or a legacy version.
+ """
+ try:
+ return Version(version)
+ except InvalidVersion:
+ return LegacyVersion(version)
+
+
+class InvalidVersion(ValueError):
+ """
+ An invalid version was found, users should refer to PEP 440.
+ """
+
+
+class _BaseVersion(object):
+
+ def __hash__(self):
+ return hash(self._key)
+
+ def __lt__(self, other):
+ return self._compare(other, lambda s, o: s < o)
+
+ def __le__(self, other):
+ return self._compare(other, lambda s, o: s <= o)
+
+ def __eq__(self, other):
+ return self._compare(other, lambda s, o: s == o)
+
+ def __ge__(self, other):
+ return self._compare(other, lambda s, o: s >= o)
+
+ def __gt__(self, other):
+ return self._compare(other, lambda s, o: s > o)
+
+ def __ne__(self, other):
+ return self._compare(other, lambda s, o: s != o)
+
+ def _compare(self, other, method):
+ if not isinstance(other, _BaseVersion):
+ return NotImplemented
+
+ return method(self._key, other._key)
+
+
+class LegacyVersion(_BaseVersion):
+
+ def __init__(self, version):
+ self._version = str(version)
+ self._key = _legacy_cmpkey(self._version)
+
+ def __str__(self):
+ return self._version
+
+ def __repr__(self):
+ return "<LegacyVersion({0})>".format(repr(str(self)))
+
+ @property
+ def public(self):
+ return self._version
+
+ @property
+ def base_version(self):
+ return self._version
+
+ @property
+ def local(self):
+ return None
+
+ @property
+ def is_prerelease(self):
+ return False
+
+ @property
+ def is_postrelease(self):
+ return False
+
+
+_legacy_version_component_re = re.compile(
+ r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE,
+)
+
+_legacy_version_replacement_map = {
+ "pre": "c", "preview": "c", "-": "final-", "rc": "c", "dev": "@",
+}
+
+
+def _parse_version_parts(s):
+ for part in _legacy_version_component_re.split(s):
+ part = _legacy_version_replacement_map.get(part, part)
+
+ if not part or part == ".":
+ continue
+
+ if part[:1] in "0123456789":
+ # pad for numeric comparison
+ yield part.zfill(8)
+ else:
+ yield "*" + part
+
+ # ensure that alpha/beta/candidate are before final
+ yield "*final"
+
+
+def _legacy_cmpkey(version):
+ # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch
+ # greater than or equal to 0. This will effectively put the LegacyVersion,
+ # which uses the defacto standard originally implemented by setuptools,
+ # as before all PEP 440 versions.
+ epoch = -1
+
+ # This scheme is taken from pkg_resources.parse_version setuptools prior to
+ # it's adoption of the packaging library.
+ parts = []
+ for part in _parse_version_parts(version.lower()):
+ if part.startswith("*"):
+ # remove "-" before a prerelease tag
+ if part < "*final":
+ while parts and parts[-1] == "*final-":
+ parts.pop()
+
+ # remove trailing zeros from each series of numeric parts
+ while parts and parts[-1] == "00000000":
+ parts.pop()
+
+ parts.append(part)
+ parts = tuple(parts)
+
+ return epoch, parts
+
+# Deliberately not anchored to the start and end of the string, to make it
+# easier for 3rd party code to reuse
+VERSION_PATTERN = r"""
+ v?
+ (?:
+ (?:(?P<epoch>[0-9]+)!)? # epoch
+ (?P<release>[0-9]+(?:\.[0-9]+)*) # release segment
+ (?P<pre> # pre-release
+ [-_\.]?
+ (?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview))
+ [-_\.]?
+ (?P<pre_n>[0-9]+)?
+ )?
+ (?P<post> # post release
+ (?:-(?P<post_n1>[0-9]+))
+ |
+ (?:
+ [-_\.]?
+ (?P<post_l>post|rev|r)
+ [-_\.]?
+ (?P<post_n2>[0-9]+)?
+ )
+ )?
+ (?P<dev> # dev release
+ [-_\.]?
+ (?P<dev_l>dev)
+ [-_\.]?
+ (?P<dev_n>[0-9]+)?
+ )?
+ )
+ (?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version
+"""
+
+
+class Version(_BaseVersion):
+
+ _regex = re.compile(
+ r"^\s*" + VERSION_PATTERN + r"\s*$",
+ re.VERBOSE | re.IGNORECASE,
+ )
+
+ def __init__(self, version):
+ # Validate the version and parse it into pieces
+ match = self._regex.search(version)
+ if not match:
+ raise InvalidVersion("Invalid version: '{0}'".format(version))
+
+ # Store the parsed out pieces of the version
+ self._version = _Version(
+ epoch=int(match.group("epoch")) if match.group("epoch") else 0,
+ release=tuple(int(i) for i in match.group("release").split(".")),
+ pre=_parse_letter_version(
+ match.group("pre_l"),
+ match.group("pre_n"),
+ ),
+ post=_parse_letter_version(
+ match.group("post_l"),
+ match.group("post_n1") or match.group("post_n2"),
+ ),
+ dev=_parse_letter_version(
+ match.group("dev_l"),
+ match.group("dev_n"),
+ ),
+ local=_parse_local_version(match.group("local")),
+ )
+
+ # Generate a key which will be used for sorting
+ self._key = _cmpkey(
+ self._version.epoch,
+ self._version.release,
+ self._version.pre,
+ self._version.post,
+ self._version.dev,
+ self._version.local,
+ )
+
+ def __repr__(self):
+ return "<Version({0})>".format(repr(str(self)))
+
+ def __str__(self):
+ parts = []
+
+ # Epoch
+ if self._version.epoch != 0:
+ parts.append("{0}!".format(self._version.epoch))
+
+ # Release segment
+ parts.append(".".join(str(x) for x in self._version.release))
+
+ # Pre-release
+ if self._version.pre is not None:
+ parts.append("".join(str(x) for x in self._version.pre))
+
+ # Post-release
+ if self._version.post is not None:
+ parts.append(".post{0}".format(self._version.post[1]))
+
+ # Development release
+ if self._version.dev is not None:
+ parts.append(".dev{0}".format(self._version.dev[1]))
+
+ # Local version segment
+ if self._version.local is not None:
+ parts.append(
+ "+{0}".format(".".join(str(x) for x in self._version.local))
+ )
+
+ return "".join(parts)
+
+ @property
+ def public(self):
+ return str(self).split("+", 1)[0]
+
+ @property
+ def base_version(self):
+ parts = []
+
+ # Epoch
+ if self._version.epoch != 0:
+ parts.append("{0}!".format(self._version.epoch))
+
+ # Release segment
+ parts.append(".".join(str(x) for x in self._version.release))
+
+ return "".join(parts)
+
+ @property
+ def local(self):
+ version_string = str(self)
+ if "+" in version_string:
+ return version_string.split("+", 1)[1]
+
+ @property
+ def is_prerelease(self):
+ return bool(self._version.dev or self._version.pre)
+
+ @property
+ def is_postrelease(self):
+ return bool(self._version.post)
+
+
+def _parse_letter_version(letter, number):
+ if letter:
+ # We consider there to be an implicit 0 in a pre-release if there is
+ # not a numeral associated with it.
+ if number is None:
+ number = 0
+
+ # We normalize any letters to their lower case form
+ letter = letter.lower()
+
+ # We consider some words to be alternate spellings of other words and
+ # in those cases we want to normalize the spellings to our preferred
+ # spelling.
+ if letter == "alpha":
+ letter = "a"
+ elif letter == "beta":
+ letter = "b"
+ elif letter in ["c", "pre", "preview"]:
+ letter = "rc"
+ elif letter in ["rev", "r"]:
+ letter = "post"
+
+ return letter, int(number)
+ if not letter and number:
+ # We assume if we are given a number, but we are not given a letter
+ # then this is using the implicit post release syntax (e.g. 1.0-1)
+ letter = "post"
+
+ return letter, int(number)
+
+
+_local_version_seperators = re.compile(r"[\._-]")
+
+
+def _parse_local_version(local):
+ """
+ Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
+ """
+ if local is not None:
+ return tuple(
+ part.lower() if not part.isdigit() else int(part)
+ for part in _local_version_seperators.split(local)
+ )
+
+
+def _cmpkey(epoch, release, pre, post, dev, local):
+ # When we compare a release version, we want to compare it with all of the
+ # trailing zeros removed. So we'll use a reverse the list, drop all the now
+ # leading zeros until we come to something non zero, then take the rest
+ # re-reverse it back into the correct order and make it a tuple and use
+ # that for our sorting key.
+ release = tuple(
+ reversed(list(
+ itertools.dropwhile(
+ lambda x: x == 0,
+ reversed(release),
+ )
+ ))
+ )
+
+ # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
+ # We'll do this by abusing the pre segment, but we _only_ want to do this
+ # if there is not a pre or a post segment. If we have one of those then
+ # the normal sorting rules will handle this case correctly.
+ if pre is None and post is None and dev is not None:
+ pre = -Infinity
+ # Versions without a pre-release (except as noted above) should sort after
+ # those with one.
+ elif pre is None:
+ pre = Infinity
+
+ # Versions without a post segment should sort before those with one.
+ if post is None:
+ post = -Infinity
+
+ # Versions without a development segment should sort after those with one.
+ if dev is None:
+ dev = Infinity
+
+ if local is None:
+ # Versions without a local segment should sort before those with one.
+ local = -Infinity
+ else:
+ # Versions with a local segment need that segment parsed to implement
+ # the sorting rules in PEP440.
+ # - Alpha numeric segments sort before numeric segments
+ # - Alpha numeric segments sort lexicographically
+ # - Numeric segments sort numerically
+ # - Shorter versions sort before longer versions when the prefixes
+ # match exactly
+ local = tuple(
+ (i, "") if isinstance(i, int) else (-Infinity, i)
+ for i in local
+ )
+
+ return epoch, release, pre, post, dev, local
diff --git a/setuptools/_vendor/pyparsing.py b/setuptools/_vendor/pyparsing.py
new file mode 100644
index 0000000..a212243
--- /dev/null
+++ b/setuptools/_vendor/pyparsing.py
@@ -0,0 +1,5696 @@
+# module pyparsing.py
+#
+# Copyright (c) 2003-2016 Paul T. McGuire
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be
+# included in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__doc__ = \
+"""
+pyparsing module - Classes and methods to define and execute parsing grammars
+
+The pyparsing module is an alternative approach to creating and executing simple grammars,
+vs. the traditional lex/yacc approach, or the use of regular expressions. With pyparsing, you
+don't need to learn a new syntax for defining grammars or matching expressions - the parsing module
+provides a library of classes that you use to construct the grammar directly in Python.
+
+Here is a program to parse "Hello, World!" (or any greeting of the form
+C{"<salutation>, <addressee>!"}), built up using L{Word}, L{Literal}, and L{And} elements
+(L{'+'<ParserElement.__add__>} operator gives L{And} expressions, strings are auto-converted to
+L{Literal} expressions)::
+
+ from pyparsing import Word, alphas
+
+ # define grammar of a greeting
+ greet = Word(alphas) + "," + Word(alphas) + "!"
+
+ hello = "Hello, World!"
+ print (hello, "->", greet.parseString(hello))
+
+The program outputs the following::
+
+ Hello, World! -> ['Hello', ',', 'World', '!']
+
+The Python representation of the grammar is quite readable, owing to the self-explanatory
+class names, and the use of '+', '|' and '^' operators.
+
+The L{ParseResults} object returned from L{ParserElement.parseString<ParserElement.parseString>} can be accessed as a nested list, a dictionary, or an
+object with named attributes.
+
+The pyparsing module handles some of the problems that are typically vexing when writing text parsers:
+ - extra or missing whitespace (the above program will also handle "Hello,World!", "Hello , World !", etc.)
+ - quoted strings
+ - embedded comments
+"""
+
+__version__ = "2.1.10"
+__versionTime__ = "07 Oct 2016 01:31 UTC"
+__author__ = "Paul McGuire <ptmcg@users.sourceforge.net>"
+
+import string
+from weakref import ref as wkref
+import copy
+import sys
+import warnings
+import re
+import sre_constants
+import collections
+import pprint
+import traceback
+import types
+from datetime import datetime
+
+try:
+ from _thread import RLock
+except ImportError:
+ from threading import RLock
+
+try:
+ from collections import OrderedDict as _OrderedDict
+except ImportError:
+ try:
+ from ordereddict import OrderedDict as _OrderedDict
+ except ImportError:
+ _OrderedDict = None
+
+#~ sys.stderr.write( "testing pyparsing module, version %s, %s\n" % (__version__,__versionTime__ ) )
+
+__all__ = [
+'And', 'CaselessKeyword', 'CaselessLiteral', 'CharsNotIn', 'Combine', 'Dict', 'Each', 'Empty',
+'FollowedBy', 'Forward', 'GoToColumn', 'Group', 'Keyword', 'LineEnd', 'LineStart', 'Literal',
+'MatchFirst', 'NoMatch', 'NotAny', 'OneOrMore', 'OnlyOnce', 'Optional', 'Or',
+'ParseBaseException', 'ParseElementEnhance', 'ParseException', 'ParseExpression', 'ParseFatalException',
+'ParseResults', 'ParseSyntaxException', 'ParserElement', 'QuotedString', 'RecursiveGrammarException',
+'Regex', 'SkipTo', 'StringEnd', 'StringStart', 'Suppress', 'Token', 'TokenConverter',
+'White', 'Word', 'WordEnd', 'WordStart', 'ZeroOrMore',
+'alphanums', 'alphas', 'alphas8bit', 'anyCloseTag', 'anyOpenTag', 'cStyleComment', 'col',
+'commaSeparatedList', 'commonHTMLEntity', 'countedArray', 'cppStyleComment', 'dblQuotedString',
+'dblSlashComment', 'delimitedList', 'dictOf', 'downcaseTokens', 'empty', 'hexnums',
+'htmlComment', 'javaStyleComment', 'line', 'lineEnd', 'lineStart', 'lineno',
+'makeHTMLTags', 'makeXMLTags', 'matchOnlyAtCol', 'matchPreviousExpr', 'matchPreviousLiteral',
+'nestedExpr', 'nullDebugAction', 'nums', 'oneOf', 'opAssoc', 'operatorPrecedence', 'printables',
+'punc8bit', 'pythonStyleComment', 'quotedString', 'removeQuotes', 'replaceHTMLEntity',
+'replaceWith', 'restOfLine', 'sglQuotedString', 'srange', 'stringEnd',
+'stringStart', 'traceParseAction', 'unicodeString', 'upcaseTokens', 'withAttribute',
+'indentedBlock', 'originalTextFor', 'ungroup', 'infixNotation','locatedExpr', 'withClass',
+'CloseMatch', 'tokenMap', 'pyparsing_common',
+]
+
+system_version = tuple(sys.version_info)[:3]
+PY_3 = system_version[0] == 3
+if PY_3:
+ _MAX_INT = sys.maxsize
+ basestring = str
+ unichr = chr
+ _ustr = str
+
+ # build list of single arg builtins, that can be used as parse actions
+ singleArgBuiltins = [sum, len, sorted, reversed, list, tuple, set, any, all, min, max]
+
+else:
+ _MAX_INT = sys.maxint
+ range = xrange
+
+ def _ustr(obj):
+ """Drop-in replacement for str(obj) that tries to be Unicode friendly. It first tries
+ str(obj). If that fails with a UnicodeEncodeError, then it tries unicode(obj). It
+ then < returns the unicode object | encodes it with the default encoding | ... >.
+ """
+ if isinstance(obj,unicode):
+ return obj
+
+ try:
+ # If this works, then _ustr(obj) has the same behaviour as str(obj), so
+ # it won't break any existing code.
+ return str(obj)
+
+ except UnicodeEncodeError:
+ # Else encode it
+ ret = unicode(obj).encode(sys.getdefaultencoding(), 'xmlcharrefreplace')
+ xmlcharref = Regex('&#\d+;')
+ xmlcharref.setParseAction(lambda t: '\\u' + hex(int(t[0][2:-1]))[2:])
+ return xmlcharref.transformString(ret)
+
+ # build list of single arg builtins, tolerant of Python version, that can be used as parse actions
+ singleArgBuiltins = []
+ import __builtin__
+ for fname in "sum len sorted reversed list tuple set any all min max".split():
+ try:
+ singleArgBuiltins.append(getattr(__builtin__,fname))
+ except AttributeError:
+ continue
+
+_generatorType = type((y for y in range(1)))
+
+def _xml_escape(data):
+ """Escape &, <, >, ", ', etc. in a string of data."""
+
+ # ampersand must be replaced first
+ from_symbols = '&><"\''
+ to_symbols = ('&'+s+';' for s in "amp gt lt quot apos".split())
+ for from_,to_ in zip(from_symbols, to_symbols):
+ data = data.replace(from_, to_)
+ return data
+
+class _Constants(object):
+ pass
+
+alphas = string.ascii_uppercase + string.ascii_lowercase
+nums = "0123456789"
+hexnums = nums + "ABCDEFabcdef"
+alphanums = alphas + nums
+_bslash = chr(92)
+printables = "".join(c for c in string.printable if c not in string.whitespace)
+
+class ParseBaseException(Exception):
+ """base exception class for all parsing runtime exceptions"""
+ # Performance tuning: we construct a *lot* of these, so keep this
+ # constructor as small and fast as possible
+ def __init__( self, pstr, loc=0, msg=None, elem=None ):
+ self.loc = loc
+ if msg is None:
+ self.msg = pstr
+ self.pstr = ""
+ else:
+ self.msg = msg
+ self.pstr = pstr
+ self.parserElement = elem
+ self.args = (pstr, loc, msg)
+
+ @classmethod
+ def _from_exception(cls, pe):
+ """
+ internal factory method to simplify creating one type of ParseException
+ from another - avoids having __init__ signature conflicts among subclasses
+ """
+ return cls(pe.pstr, pe.loc, pe.msg, pe.parserElement)
+
+ def __getattr__( self, aname ):
+ """supported attributes by name are:
+ - lineno - returns the line number of the exception text
+ - col - returns the column number of the exception text
+ - line - returns the line containing the exception text
+ """
+ if( aname == "lineno" ):
+ return lineno( self.loc, self.pstr )
+ elif( aname in ("col", "column") ):
+ return col( self.loc, self.pstr )
+ elif( aname == "line" ):
+ return line( self.loc, self.pstr )
+ else:
+ raise AttributeError(aname)
+
+ def __str__( self ):
+ return "%s (at char %d), (line:%d, col:%d)" % \
+ ( self.msg, self.loc, self.lineno, self.column )
+ def __repr__( self ):
+ return _ustr(self)
+ def markInputline( self, markerString = ">!<" ):
+ """Extracts the exception line from the input string, and marks
+ the location of the exception with a special symbol.
+ """
+ line_str = self.line
+ line_column = self.column - 1
+ if markerString:
+ line_str = "".join((line_str[:line_column],
+ markerString, line_str[line_column:]))
+ return line_str.strip()
+ def __dir__(self):
+ return "lineno col line".split() + dir(type(self))
+
+class ParseException(ParseBaseException):
+ """
+ Exception thrown when parse expressions don't match class;
+ supported attributes by name are:
+ - lineno - returns the line number of the exception text
+ - col - returns the column number of the exception text
+ - line - returns the line containing the exception text
+
+ Example::
+ try:
+ Word(nums).setName("integer").parseString("ABC")
+ except ParseException as pe:
+ print(pe)
+ print("column: {}".format(pe.col))
+
+ prints::
+ Expected integer (at char 0), (line:1, col:1)
+ column: 1
+ """
+ pass
+
+class ParseFatalException(ParseBaseException):
+ """user-throwable exception thrown when inconsistent parse content
+ is found; stops all parsing immediately"""
+ pass
+
+class ParseSyntaxException(ParseFatalException):
+ """just like L{ParseFatalException}, but thrown internally when an
+ L{ErrorStop<And._ErrorStop>} ('-' operator) indicates that parsing is to stop
+ immediately because an unbacktrackable syntax error has been found"""
+ pass
+
+#~ class ReparseException(ParseBaseException):
+ #~ """Experimental class - parse actions can raise this exception to cause
+ #~ pyparsing to reparse the input string:
+ #~ - with a modified input string, and/or
+ #~ - with a modified start location
+ #~ Set the values of the ReparseException in the constructor, and raise the
+ #~ exception in a parse action to cause pyparsing to use the new string/location.
+ #~ Setting the values as None causes no change to be made.
+ #~ """
+ #~ def __init_( self, newstring, restartLoc ):
+ #~ self.newParseText = newstring
+ #~ self.reparseLoc = restartLoc
+
+class RecursiveGrammarException(Exception):
+ """exception thrown by L{ParserElement.validate} if the grammar could be improperly recursive"""
+ def __init__( self, parseElementList ):
+ self.parseElementTrace = parseElementList
+
+ def __str__( self ):
+ return "RecursiveGrammarException: %s" % self.parseElementTrace
+
+class _ParseResultsWithOffset(object):
+ def __init__(self,p1,p2):
+ self.tup = (p1,p2)
+ def __getitem__(self,i):
+ return self.tup[i]
+ def __repr__(self):
+ return repr(self.tup[0])
+ def setOffset(self,i):
+ self.tup = (self.tup[0],i)
+
+class ParseResults(object):
+ """
+ Structured parse results, to provide multiple means of access to the parsed data:
+ - as a list (C{len(results)})
+ - by list index (C{results[0], results[1]}, etc.)
+ - by attribute (C{results.<resultsName>} - see L{ParserElement.setResultsName})
+
+ Example::
+ integer = Word(nums)
+ date_str = (integer.setResultsName("year") + '/'
+ + integer.setResultsName("month") + '/'
+ + integer.setResultsName("day"))
+ # equivalent form:
+ # date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+
+ # parseString returns a ParseResults object
+ result = date_str.parseString("1999/12/31")
+
+ def test(s, fn=repr):
+ print("%s -> %s" % (s, fn(eval(s))))
+ test("list(result)")
+ test("result[0]")
+ test("result['month']")
+ test("result.day")
+ test("'month' in result")
+ test("'minutes' in result")
+ test("result.dump()", str)
+ prints::
+ list(result) -> ['1999', '/', '12', '/', '31']
+ result[0] -> '1999'
+ result['month'] -> '12'
+ result.day -> '31'
+ 'month' in result -> True
+ 'minutes' in result -> False
+ result.dump() -> ['1999', '/', '12', '/', '31']
+ - day: 31
+ - month: 12
+ - year: 1999
+ """
+ def __new__(cls, toklist=None, name=None, asList=True, modal=True ):
+ if isinstance(toklist, cls):
+ return toklist
+ retobj = object.__new__(cls)
+ retobj.__doinit = True
+ return retobj
+
+ # Performance tuning: we construct a *lot* of these, so keep this
+ # constructor as small and fast as possible
+ def __init__( self, toklist=None, name=None, asList=True, modal=True, isinstance=isinstance ):
+ if self.__doinit:
+ self.__doinit = False
+ self.__name = None
+ self.__parent = None
+ self.__accumNames = {}
+ self.__asList = asList
+ self.__modal = modal
+ if toklist is None:
+ toklist = []
+ if isinstance(toklist, list):
+ self.__toklist = toklist[:]
+ elif isinstance(toklist, _generatorType):
+ self.__toklist = list(toklist)
+ else:
+ self.__toklist = [toklist]
+ self.__tokdict = dict()
+
+ if name is not None and name:
+ if not modal:
+ self.__accumNames[name] = 0
+ if isinstance(name,int):
+ name = _ustr(name) # will always return a str, but use _ustr for consistency
+ self.__name = name
+ if not (isinstance(toklist, (type(None), basestring, list)) and toklist in (None,'',[])):
+ if isinstance(toklist,basestring):
+ toklist = [ toklist ]
+ if asList:
+ if isinstance(toklist,ParseResults):
+ self[name] = _ParseResultsWithOffset(toklist.copy(),0)
+ else:
+ self[name] = _ParseResultsWithOffset(ParseResults(toklist[0]),0)
+ self[name].__name = name
+ else:
+ try:
+ self[name] = toklist[0]
+ except (KeyError,TypeError,IndexError):
+ self[name] = toklist
+
+ def __getitem__( self, i ):
+ if isinstance( i, (int,slice) ):
+ return self.__toklist[i]
+ else:
+ if i not in self.__accumNames:
+ return self.__tokdict[i][-1][0]
+ else:
+ return ParseResults([ v[0] for v in self.__tokdict[i] ])
+
+ def __setitem__( self, k, v, isinstance=isinstance ):
+ if isinstance(v,_ParseResultsWithOffset):
+ self.__tokdict[k] = self.__tokdict.get(k,list()) + [v]
+ sub = v[0]
+ elif isinstance(k,(int,slice)):
+ self.__toklist[k] = v
+ sub = v
+ else:
+ self.__tokdict[k] = self.__tokdict.get(k,list()) + [_ParseResultsWithOffset(v,0)]
+ sub = v
+ if isinstance(sub,ParseResults):
+ sub.__parent = wkref(self)
+
+ def __delitem__( self, i ):
+ if isinstance(i,(int,slice)):
+ mylen = len( self.__toklist )
+ del self.__toklist[i]
+
+ # convert int to slice
+ if isinstance(i, int):
+ if i < 0:
+ i += mylen
+ i = slice(i, i+1)
+ # get removed indices
+ removed = list(range(*i.indices(mylen)))
+ removed.reverse()
+ # fixup indices in token dictionary
+ for name,occurrences in self.__tokdict.items():
+ for j in removed:
+ for k, (value, position) in enumerate(occurrences):
+ occurrences[k] = _ParseResultsWithOffset(value, position - (position > j))
+ else:
+ del self.__tokdict[i]
+
+ def __contains__( self, k ):
+ return k in self.__tokdict
+
+ def __len__( self ): return len( self.__toklist )
+ def __bool__(self): return ( not not self.__toklist )
+ __nonzero__ = __bool__
+ def __iter__( self ): return iter( self.__toklist )
+ def __reversed__( self ): return iter( self.__toklist[::-1] )
+ def _iterkeys( self ):
+ if hasattr(self.__tokdict, "iterkeys"):
+ return self.__tokdict.iterkeys()
+ else:
+ return iter(self.__tokdict)
+
+ def _itervalues( self ):
+ return (self[k] for k in self._iterkeys())
+
+ def _iteritems( self ):
+ return ((k, self[k]) for k in self._iterkeys())
+
+ if PY_3:
+ keys = _iterkeys
+ """Returns an iterator of all named result keys (Python 3.x only)."""
+
+ values = _itervalues
+ """Returns an iterator of all named result values (Python 3.x only)."""
+
+ items = _iteritems
+ """Returns an iterator of all named result key-value tuples (Python 3.x only)."""
+
+ else:
+ iterkeys = _iterkeys
+ """Returns an iterator of all named result keys (Python 2.x only)."""
+
+ itervalues = _itervalues
+ """Returns an iterator of all named result values (Python 2.x only)."""
+
+ iteritems = _iteritems
+ """Returns an iterator of all named result key-value tuples (Python 2.x only)."""
+
+ def keys( self ):
+ """Returns all named result keys (as a list in Python 2.x, as an iterator in Python 3.x)."""
+ return list(self.iterkeys())
+
+ def values( self ):
+ """Returns all named result values (as a list in Python 2.x, as an iterator in Python 3.x)."""
+ return list(self.itervalues())
+
+ def items( self ):
+ """Returns all named result key-values (as a list of tuples in Python 2.x, as an iterator in Python 3.x)."""
+ return list(self.iteritems())
+
+ def haskeys( self ):
+ """Since keys() returns an iterator, this method is helpful in bypassing
+ code that looks for the existence of any defined results names."""
+ return bool(self.__tokdict)
+
+ def pop( self, *args, **kwargs):
+ """
+ Removes and returns item at specified index (default=C{last}).
+ Supports both C{list} and C{dict} semantics for C{pop()}. If passed no
+ argument or an integer argument, it will use C{list} semantics
+ and pop tokens from the list of parsed tokens. If passed a
+ non-integer argument (most likely a string), it will use C{dict}
+ semantics and pop the corresponding value from any defined
+ results names. A second default return value argument is
+ supported, just as in C{dict.pop()}.
+
+ Example::
+ def remove_first(tokens):
+ tokens.pop(0)
+ print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']
+ print(OneOrMore(Word(nums)).addParseAction(remove_first).parseString("0 123 321")) # -> ['123', '321']
+
+ label = Word(alphas)
+ patt = label("LABEL") + OneOrMore(Word(nums))
+ print(patt.parseString("AAB 123 321").dump())
+
+ # Use pop() in a parse action to remove named result (note that corresponding value is not
+ # removed from list form of results)
+ def remove_LABEL(tokens):
+ tokens.pop("LABEL")
+ return tokens
+ patt.addParseAction(remove_LABEL)
+ print(patt.parseString("AAB 123 321").dump())
+ prints::
+ ['AAB', '123', '321']
+ - LABEL: AAB
+
+ ['AAB', '123', '321']
+ """
+ if not args:
+ args = [-1]
+ for k,v in kwargs.items():
+ if k == 'default':
+ args = (args[0], v)
+ else:
+ raise TypeError("pop() got an unexpected keyword argument '%s'" % k)
+ if (isinstance(args[0], int) or
+ len(args) == 1 or
+ args[0] in self):
+ index = args[0]
+ ret = self[index]
+ del self[index]
+ return ret
+ else:
+ defaultvalue = args[1]
+ return defaultvalue
+
+ def get(self, key, defaultValue=None):
+ """
+ Returns named result matching the given key, or if there is no
+ such name, then returns the given C{defaultValue} or C{None} if no
+ C{defaultValue} is specified.
+
+ Similar to C{dict.get()}.
+
+ Example::
+ integer = Word(nums)
+ date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+
+ result = date_str.parseString("1999/12/31")
+ print(result.get("year")) # -> '1999'
+ print(result.get("hour", "not specified")) # -> 'not specified'
+ print(result.get("hour")) # -> None
+ """
+ if key in self:
+ return self[key]
+ else:
+ return defaultValue
+
+ def insert( self, index, insStr ):
+ """
+ Inserts new element at location index in the list of parsed tokens.
+
+ Similar to C{list.insert()}.
+
+ Example::
+ print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']
+
+ # use a parse action to insert the parse location in the front of the parsed results
+ def insert_locn(locn, tokens):
+ tokens.insert(0, locn)
+ print(OneOrMore(Word(nums)).addParseAction(insert_locn).parseString("0 123 321")) # -> [0, '0', '123', '321']
+ """
+ self.__toklist.insert(index, insStr)
+ # fixup indices in token dictionary
+ for name,occurrences in self.__tokdict.items():
+ for k, (value, position) in enumerate(occurrences):
+ occurrences[k] = _ParseResultsWithOffset(value, position + (position > index))
+
+ def append( self, item ):
+ """
+ Add single element to end of ParseResults list of elements.
+
+ Example::
+ print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']
+
+ # use a parse action to compute the sum of the parsed integers, and add it to the end
+ def append_sum(tokens):
+ tokens.append(sum(map(int, tokens)))
+ print(OneOrMore(Word(nums)).addParseAction(append_sum).parseString("0 123 321")) # -> ['0', '123', '321', 444]
+ """
+ self.__toklist.append(item)
+
+ def extend( self, itemseq ):
+ """
+ Add sequence of elements to end of ParseResults list of elements.
+
+ Example::
+ patt = OneOrMore(Word(alphas))
+
+ # use a parse action to append the reverse of the matched strings, to make a palindrome
+ def make_palindrome(tokens):
+ tokens.extend(reversed([t[::-1] for t in tokens]))
+ return ''.join(tokens)
+ print(patt.addParseAction(make_palindrome).parseString("lskdj sdlkjf lksd")) # -> 'lskdjsdlkjflksddsklfjkldsjdksl'
+ """
+ if isinstance(itemseq, ParseResults):
+ self += itemseq
+ else:
+ self.__toklist.extend(itemseq)
+
+ def clear( self ):
+ """
+ Clear all elements and results names.
+ """
+ del self.__toklist[:]
+ self.__tokdict.clear()
+
+ def __getattr__( self, name ):
+ try:
+ return self[name]
+ except KeyError:
+ return ""
+
+ if name in self.__tokdict:
+ if name not in self.__accumNames:
+ return self.__tokdict[name][-1][0]
+ else:
+ return ParseResults([ v[0] for v in self.__tokdict[name] ])
+ else:
+ return ""
+
+ def __add__( self, other ):
+ ret = self.copy()
+ ret += other
+ return ret
+
+ def __iadd__( self, other ):
+ if other.__tokdict:
+ offset = len(self.__toklist)
+ addoffset = lambda a: offset if a<0 else a+offset
+ otheritems = other.__tokdict.items()
+ otherdictitems = [(k, _ParseResultsWithOffset(v[0],addoffset(v[1])) )
+ for (k,vlist) in otheritems for v in vlist]
+ for k,v in otherdictitems:
+ self[k] = v
+ if isinstance(v[0],ParseResults):
+ v[0].__parent = wkref(self)
+
+ self.__toklist += other.__toklist
+ self.__accumNames.update( other.__accumNames )
+ return self
+
+ def __radd__(self, other):
+ if isinstance(other,int) and other == 0:
+ # useful for merging many ParseResults using sum() builtin
+ return self.copy()
+ else:
+ # this may raise a TypeError - so be it
+ return other + self
+
+ def __repr__( self ):
+ return "(%s, %s)" % ( repr( self.__toklist ), repr( self.__tokdict ) )
+
+ def __str__( self ):
+ return '[' + ', '.join(_ustr(i) if isinstance(i, ParseResults) else repr(i) for i in self.__toklist) + ']'
+
+ def _asStringList( self, sep='' ):
+ out = []
+ for item in self.__toklist:
+ if out and sep:
+ out.append(sep)
+ if isinstance( item, ParseResults ):
+ out += item._asStringList()
+ else:
+ out.append( _ustr(item) )
+ return out
+
+ def asList( self ):
+ """
+ Returns the parse results as a nested list of matching tokens, all converted to strings.
+
+ Example::
+ patt = OneOrMore(Word(alphas))
+ result = patt.parseString("sldkj lsdkj sldkj")
+ # even though the result prints in string-like form, it is actually a pyparsing ParseResults
+ print(type(result), result) # -> <class 'pyparsing.ParseResults'> ['sldkj', 'lsdkj', 'sldkj']
+
+ # Use asList() to create an actual list
+ result_list = result.asList()
+ print(type(result_list), result_list) # -> <class 'list'> ['sldkj', 'lsdkj', 'sldkj']
+ """
+ return [res.asList() if isinstance(res,ParseResults) else res for res in self.__toklist]
+
+ def asDict( self ):
+ """
+ Returns the named parse results as a nested dictionary.
+
+ Example::
+ integer = Word(nums)
+ date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+
+ result = date_str.parseString('12/31/1999')
+ print(type(result), repr(result)) # -> <class 'pyparsing.ParseResults'> (['12', '/', '31', '/', '1999'], {'day': [('1999', 4)], 'year': [('12', 0)], 'month': [('31', 2)]})
+
+ result_dict = result.asDict()
+ print(type(result_dict), repr(result_dict)) # -> <class 'dict'> {'day': '1999', 'year': '12', 'month': '31'}
+
+ # even though a ParseResults supports dict-like access, sometime you just need to have a dict
+ import json
+ print(json.dumps(result)) # -> Exception: TypeError: ... is not JSON serializable
+ print(json.dumps(result.asDict())) # -> {"month": "31", "day": "1999", "year": "12"}
+ """
+ if PY_3:
+ item_fn = self.items
+ else:
+ item_fn = self.iteritems
+
+ def toItem(obj):
+ if isinstance(obj, ParseResults):
+ if obj.haskeys():
+ return obj.asDict()
+ else:
+ return [toItem(v) for v in obj]
+ else:
+ return obj
+
+ return dict((k,toItem(v)) for k,v in item_fn())
+
+ def copy( self ):
+ """
+ Returns a new copy of a C{ParseResults} object.
+ """
+ ret = ParseResults( self.__toklist )
+ ret.__tokdict = self.__tokdict.copy()
+ ret.__parent = self.__parent
+ ret.__accumNames.update( self.__accumNames )
+ ret.__name = self.__name
+ return ret
+
+ def asXML( self, doctag=None, namedItemsOnly=False, indent="", formatted=True ):
+ """
+ (Deprecated) Returns the parse results as XML. Tags are created for tokens and lists that have defined results names.
+ """
+ nl = "\n"
+ out = []
+ namedItems = dict((v[1],k) for (k,vlist) in self.__tokdict.items()
+ for v in vlist)
+ nextLevelIndent = indent + " "
+
+ # collapse out indents if formatting is not desired
+ if not formatted:
+ indent = ""
+ nextLevelIndent = ""
+ nl = ""
+
+ selfTag = None
+ if doctag is not None:
+ selfTag = doctag
+ else:
+ if self.__name:
+ selfTag = self.__name
+
+ if not selfTag:
+ if namedItemsOnly:
+ return ""
+ else:
+ selfTag = "ITEM"
+
+ out += [ nl, indent, "<", selfTag, ">" ]
+
+ for i,res in enumerate(self.__toklist):
+ if isinstance(res,ParseResults):
+ if i in namedItems:
+ out += [ res.asXML(namedItems[i],
+ namedItemsOnly and doctag is None,
+ nextLevelIndent,
+ formatted)]
+ else:
+ out += [ res.asXML(None,
+ namedItemsOnly and doctag is None,
+ nextLevelIndent,
+ formatted)]
+ else:
+ # individual token, see if there is a name for it
+ resTag = None
+ if i in namedItems:
+ resTag = namedItems[i]
+ if not resTag:
+ if namedItemsOnly:
+ continue
+ else:
+ resTag = "ITEM"
+ xmlBodyText = _xml_escape(_ustr(res))
+ out += [ nl, nextLevelIndent, "<", resTag, ">",
+ xmlBodyText,
+ "</", resTag, ">" ]
+
+ out += [ nl, indent, "</", selfTag, ">" ]
+ return "".join(out)
+
+ def __lookup(self,sub):
+ for k,vlist in self.__tokdict.items():
+ for v,loc in vlist:
+ if sub is v:
+ return k
+ return None
+
+ def getName(self):
+ """
+ Returns the results name for this token expression. Useful when several
+ different expressions might match at a particular location.
+
+ Example::
+ integer = Word(nums)
+ ssn_expr = Regex(r"\d\d\d-\d\d-\d\d\d\d")
+ house_number_expr = Suppress('#') + Word(nums, alphanums)
+ user_data = (Group(house_number_expr)("house_number")
+ | Group(ssn_expr)("ssn")
+ | Group(integer)("age"))
+ user_info = OneOrMore(user_data)
+
+ result = user_info.parseString("22 111-22-3333 #221B")
+ for item in result:
+ print(item.getName(), ':', item[0])
+ prints::
+ age : 22
+ ssn : 111-22-3333
+ house_number : 221B
+ """
+ if self.__name:
+ return self.__name
+ elif self.__parent:
+ par = self.__parent()
+ if par:
+ return par.__lookup(self)
+ else:
+ return None
+ elif (len(self) == 1 and
+ len(self.__tokdict) == 1 and
+ next(iter(self.__tokdict.values()))[0][1] in (0,-1)):
+ return next(iter(self.__tokdict.keys()))
+ else:
+ return None
+
+ def dump(self, indent='', depth=0, full=True):
+ """
+ Diagnostic method for listing out the contents of a C{ParseResults}.
+ Accepts an optional C{indent} argument so that this string can be embedded
+ in a nested display of other data.
+
+ Example::
+ integer = Word(nums)
+ date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+
+ result = date_str.parseString('12/31/1999')
+ print(result.dump())
+ prints::
+ ['12', '/', '31', '/', '1999']
+ - day: 1999
+ - month: 31
+ - year: 12
+ """
+ out = []
+ NL = '\n'
+ out.append( indent+_ustr(self.asList()) )
+ if full:
+ if self.haskeys():
+ items = sorted((str(k), v) for k,v in self.items())
+ for k,v in items:
+ if out:
+ out.append(NL)
+ out.append( "%s%s- %s: " % (indent,(' '*depth), k) )
+ if isinstance(v,ParseResults):
+ if v:
+ out.append( v.dump(indent,depth+1) )
+ else:
+ out.append(_ustr(v))
+ else:
+ out.append(repr(v))
+ elif any(isinstance(vv,ParseResults) for vv in self):
+ v = self
+ for i,vv in enumerate(v):
+ if isinstance(vv,ParseResults):
+ out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),vv.dump(indent,depth+1) ))
+ else:
+ out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),_ustr(vv)))
+
+ return "".join(out)
+
+ def pprint(self, *args, **kwargs):
+ """
+ Pretty-printer for parsed results as a list, using the C{pprint} module.
+ Accepts additional positional or keyword args as defined for the
+ C{pprint.pprint} method. (U{http://docs.python.org/3/library/pprint.html#pprint.pprint})
+
+ Example::
+ ident = Word(alphas, alphanums)
+ num = Word(nums)
+ func = Forward()
+ term = ident | num | Group('(' + func + ')')
+ func <<= ident + Group(Optional(delimitedList(term)))
+ result = func.parseString("fna a,b,(fnb c,d,200),100")
+ result.pprint(width=40)
+ prints::
+ ['fna',
+ ['a',
+ 'b',
+ ['(', 'fnb', ['c', 'd', '200'], ')'],
+ '100']]
+ """
+ pprint.pprint(self.asList(), *args, **kwargs)
+
+ # add support for pickle protocol
+ def __getstate__(self):
+ return ( self.__toklist,
+ ( self.__tokdict.copy(),
+ self.__parent is not None and self.__parent() or None,
+ self.__accumNames,
+ self.__name ) )
+
+ def __setstate__(self,state):
+ self.__toklist = state[0]
+ (self.__tokdict,
+ par,
+ inAccumNames,
+ self.__name) = state[1]
+ self.__accumNames = {}
+ self.__accumNames.update(inAccumNames)
+ if par is not None:
+ self.__parent = wkref(par)
+ else:
+ self.__parent = None
+
+ def __getnewargs__(self):
+ return self.__toklist, self.__name, self.__asList, self.__modal
+
+ def __dir__(self):
+ return (dir(type(self)) + list(self.keys()))
+
+collections.MutableMapping.register(ParseResults)
+
+def col (loc,strg):
+ """Returns current column within a string, counting newlines as line separators.
+ The first column is number 1.
+
+ Note: the default parsing behavior is to expand tabs in the input string
+ before starting the parsing process. See L{I{ParserElement.parseString}<ParserElement.parseString>} for more information
+ on parsing strings containing C{<TAB>}s, and suggested methods to maintain a
+ consistent view of the parsed string, the parse location, and line and column
+ positions within the parsed string.
+ """
+ s = strg
+ return 1 if 0<loc<len(s) and s[loc-1] == '\n' else loc - s.rfind("\n", 0, loc)
+
+def lineno(loc,strg):
+ """Returns current line number within a string, counting newlines as line separators.
+ The first line is number 1.
+
+ Note: the default parsing behavior is to expand tabs in the input string
+ before starting the parsing process. See L{I{ParserElement.parseString}<ParserElement.parseString>} for more information
+ on parsing strings containing C{<TAB>}s, and suggested methods to maintain a
+ consistent view of the parsed string, the parse location, and line and column
+ positions within the parsed string.
+ """
+ return strg.count("\n",0,loc) + 1
+
+def line( loc, strg ):
+ """Returns the line of text containing loc within a string, counting newlines as line separators.
+ """
+ lastCR = strg.rfind("\n", 0, loc)
+ nextCR = strg.find("\n", loc)
+ if nextCR >= 0:
+ return strg[lastCR+1:nextCR]
+ else:
+ return strg[lastCR+1:]
+
+def _defaultStartDebugAction( instring, loc, expr ):
+ print (("Match " + _ustr(expr) + " at loc " + _ustr(loc) + "(%d,%d)" % ( lineno(loc,instring), col(loc,instring) )))
+
+def _defaultSuccessDebugAction( instring, startloc, endloc, expr, toks ):
+ print ("Matched " + _ustr(expr) + " -> " + str(toks.asList()))
+
+def _defaultExceptionDebugAction( instring, loc, expr, exc ):
+ print ("Exception raised:" + _ustr(exc))
+
+def nullDebugAction(*args):
+ """'Do-nothing' debug action, to suppress debugging output during parsing."""
+ pass
+
+# Only works on Python 3.x - nonlocal is toxic to Python 2 installs
+#~ 'decorator to trim function calls to match the arity of the target'
+#~ def _trim_arity(func, maxargs=3):
+ #~ if func in singleArgBuiltins:
+ #~ return lambda s,l,t: func(t)
+ #~ limit = 0
+ #~ foundArity = False
+ #~ def wrapper(*args):
+ #~ nonlocal limit,foundArity
+ #~ while 1:
+ #~ try:
+ #~ ret = func(*args[limit:])
+ #~ foundArity = True
+ #~ return ret
+ #~ except TypeError:
+ #~ if limit == maxargs or foundArity:
+ #~ raise
+ #~ limit += 1
+ #~ continue
+ #~ return wrapper
+
+# this version is Python 2.x-3.x cross-compatible
+'decorator to trim function calls to match the arity of the target'
+def _trim_arity(func, maxargs=2):
+ if func in singleArgBuiltins:
+ return lambda s,l,t: func(t)
+ limit = [0]
+ foundArity = [False]
+
+ # traceback return data structure changed in Py3.5 - normalize back to plain tuples
+ if system_version[:2] >= (3,5):
+ def extract_stack(limit=0):
+ # special handling for Python 3.5.0 - extra deep call stack by 1
+ offset = -3 if system_version == (3,5,0) else -2
+ frame_summary = traceback.extract_stack(limit=-offset+limit-1)[offset]
+ return [(frame_summary.filename, frame_summary.lineno)]
+ def extract_tb(tb, limit=0):
+ frames = traceback.extract_tb(tb, limit=limit)
+ frame_summary = frames[-1]
+ return [(frame_summary.filename, frame_summary.lineno)]
+ else:
+ extract_stack = traceback.extract_stack
+ extract_tb = traceback.extract_tb
+
+ # synthesize what would be returned by traceback.extract_stack at the call to
+ # user's parse action 'func', so that we don't incur call penalty at parse time
+
+ LINE_DIFF = 6
+ # IF ANY CODE CHANGES, EVEN JUST COMMENTS OR BLANK LINES, BETWEEN THE NEXT LINE AND
+ # THE CALL TO FUNC INSIDE WRAPPER, LINE_DIFF MUST BE MODIFIED!!!!
+ this_line = extract_stack(limit=2)[-1]
+ pa_call_line_synth = (this_line[0], this_line[1]+LINE_DIFF)
+
+ def wrapper(*args):
+ while 1:
+ try:
+ ret = func(*args[limit[0]:])
+ foundArity[0] = True
+ return ret
+ except TypeError:
+ # re-raise TypeErrors if they did not come from our arity testing
+ if foundArity[0]:
+ raise
+ else:
+ try:
+ tb = sys.exc_info()[-1]
+ if not extract_tb(tb, limit=2)[-1][:2] == pa_call_line_synth:
+ raise
+ finally:
+ del tb
+
+ if limit[0] <= maxargs:
+ limit[0] += 1
+ continue
+ raise
+
+ # copy func name to wrapper for sensible debug output
+ func_name = "<parse action>"
+ try:
+ func_name = getattr(func, '__name__',
+ getattr(func, '__class__').__name__)
+ except Exception:
+ func_name = str(func)
+ wrapper.__name__ = func_name
+
+ return wrapper
+
+class ParserElement(object):
+ """Abstract base level parser element class."""
+ DEFAULT_WHITE_CHARS = " \n\t\r"
+ verbose_stacktrace = False
+
+ @staticmethod
+ def setDefaultWhitespaceChars( chars ):
+ r"""
+ Overrides the default whitespace chars
+
+ Example::
+ # default whitespace chars are space, <TAB> and newline
+ OneOrMore(Word(alphas)).parseString("abc def\nghi jkl") # -> ['abc', 'def', 'ghi', 'jkl']
+
+ # change to just treat newline as significant
+ ParserElement.setDefaultWhitespaceChars(" \t")
+ OneOrMore(Word(alphas)).parseString("abc def\nghi jkl") # -> ['abc', 'def']
+ """
+ ParserElement.DEFAULT_WHITE_CHARS = chars
+
+ @staticmethod
+ def inlineLiteralsUsing(cls):
+ """
+ Set class to be used for inclusion of string literals into a parser.
+
+ Example::
+ # default literal class used is Literal
+ integer = Word(nums)
+ date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+
+ date_str.parseString("1999/12/31") # -> ['1999', '/', '12', '/', '31']
+
+
+ # change to Suppress
+ ParserElement.inlineLiteralsUsing(Suppress)
+ date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+
+ date_str.parseString("1999/12/31") # -> ['1999', '12', '31']
+ """
+ ParserElement._literalStringClass = cls
+
+ def __init__( self, savelist=False ):
+ self.parseAction = list()
+ self.failAction = None
+ #~ self.name = "<unknown>" # don't define self.name, let subclasses try/except upcall
+ self.strRepr = None
+ self.resultsName = None
+ self.saveAsList = savelist
+ self.skipWhitespace = True
+ self.whiteChars = ParserElement.DEFAULT_WHITE_CHARS
+ self.copyDefaultWhiteChars = True
+ self.mayReturnEmpty = False # used when checking for left-recursion
+ self.keepTabs = False
+ self.ignoreExprs = list()
+ self.debug = False
+ self.streamlined = False
+ self.mayIndexError = True # used to optimize exception handling for subclasses that don't advance parse index
+ self.errmsg = ""
+ self.modalResults = True # used to mark results names as modal (report only last) or cumulative (list all)
+ self.debugActions = ( None, None, None ) #custom debug actions
+ self.re = None
+ self.callPreparse = True # used to avoid redundant calls to preParse
+ self.callDuringTry = False
+
+ def copy( self ):
+ """
+ Make a copy of this C{ParserElement}. Useful for defining different parse actions
+ for the same parsing pattern, using copies of the original parse element.
+
+ Example::
+ integer = Word(nums).setParseAction(lambda toks: int(toks[0]))
+ integerK = integer.copy().addParseAction(lambda toks: toks[0]*1024) + Suppress("K")
+ integerM = integer.copy().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M")
+
+ print(OneOrMore(integerK | integerM | integer).parseString("5K 100 640K 256M"))
+ prints::
+ [5120, 100, 655360, 268435456]
+ Equivalent form of C{expr.copy()} is just C{expr()}::
+ integerM = integer().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M")
+ """
+ cpy = copy.copy( self )
+ cpy.parseAction = self.parseAction[:]
+ cpy.ignoreExprs = self.ignoreExprs[:]
+ if self.copyDefaultWhiteChars:
+ cpy.whiteChars = ParserElement.DEFAULT_WHITE_CHARS
+ return cpy
+
+ def setName( self, name ):
+ """
+ Define name for this expression, makes debugging and exception messages clearer.
+
+ Example::
+ Word(nums).parseString("ABC") # -> Exception: Expected W:(0123...) (at char 0), (line:1, col:1)
+ Word(nums).setName("integer").parseString("ABC") # -> Exception: Expected integer (at char 0), (line:1, col:1)
+ """
+ self.name = name
+ self.errmsg = "Expected " + self.name
+ if hasattr(self,"exception"):
+ self.exception.msg = self.errmsg
+ return self
+
+ def setResultsName( self, name, listAllMatches=False ):
+ """
+ Define name for referencing matching tokens as a nested attribute
+ of the returned parse results.
+ NOTE: this returns a *copy* of the original C{ParserElement} object;
+ this is so that the client can define a basic element, such as an
+ integer, and reference it in multiple places with different names.
+
+ You can also set results names using the abbreviated syntax,
+ C{expr("name")} in place of C{expr.setResultsName("name")} -
+ see L{I{__call__}<__call__>}.
+
+ Example::
+ date_str = (integer.setResultsName("year") + '/'
+ + integer.setResultsName("month") + '/'
+ + integer.setResultsName("day"))
+
+ # equivalent form:
+ date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+ """
+ newself = self.copy()
+ if name.endswith("*"):
+ name = name[:-1]
+ listAllMatches=True
+ newself.resultsName = name
+ newself.modalResults = not listAllMatches
+ return newself
+
+ def setBreak(self,breakFlag = True):
+ """Method to invoke the Python pdb debugger when this element is
+ about to be parsed. Set C{breakFlag} to True to enable, False to
+ disable.
+ """
+ if breakFlag:
+ _parseMethod = self._parse
+ def breaker(instring, loc, doActions=True, callPreParse=True):
+ import pdb
+ pdb.set_trace()
+ return _parseMethod( instring, loc, doActions, callPreParse )
+ breaker._originalParseMethod = _parseMethod
+ self._parse = breaker
+ else:
+ if hasattr(self._parse,"_originalParseMethod"):
+ self._parse = self._parse._originalParseMethod
+ return self
+
+ def setParseAction( self, *fns, **kwargs ):
+ """
+ Define action to perform when successfully matching parse element definition.
+ Parse action fn is a callable method with 0-3 arguments, called as C{fn(s,loc,toks)},
+ C{fn(loc,toks)}, C{fn(toks)}, or just C{fn()}, where:
+ - s = the original string being parsed (see note below)
+ - loc = the location of the matching substring
+ - toks = a list of the matched tokens, packaged as a C{L{ParseResults}} object
+ If the functions in fns modify the tokens, they can return them as the return
+ value from fn, and the modified list of tokens will replace the original.
+ Otherwise, fn does not need to return any value.
+
+ Optional keyword arguments:
+ - callDuringTry = (default=C{False}) indicate if parse action should be run during lookaheads and alternate testing
+
+ Note: the default parsing behavior is to expand tabs in the input string
+ before starting the parsing process. See L{I{parseString}<parseString>} for more information
+ on parsing strings containing C{<TAB>}s, and suggested methods to maintain a
+ consistent view of the parsed string, the parse location, and line and column
+ positions within the parsed string.
+
+ Example::
+ integer = Word(nums)
+ date_str = integer + '/' + integer + '/' + integer
+
+ date_str.parseString("1999/12/31") # -> ['1999', '/', '12', '/', '31']
+
+ # use parse action to convert to ints at parse time
+ integer = Word(nums).setParseAction(lambda toks: int(toks[0]))
+ date_str = integer + '/' + integer + '/' + integer
+
+ # note that integer fields are now ints, not strings
+ date_str.parseString("1999/12/31") # -> [1999, '/', 12, '/', 31]
+ """
+ self.parseAction = list(map(_trim_arity, list(fns)))
+ self.callDuringTry = kwargs.get("callDuringTry", False)
+ return self
+
+ def addParseAction( self, *fns, **kwargs ):
+ """
+ Add parse action to expression's list of parse actions. See L{I{setParseAction}<setParseAction>}.
+
+ See examples in L{I{copy}<copy>}.
+ """
+ self.parseAction += list(map(_trim_arity, list(fns)))
+ self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False)
+ return self
+
+ def addCondition(self, *fns, **kwargs):
+ """Add a boolean predicate function to expression's list of parse actions. See
+ L{I{setParseAction}<setParseAction>} for function call signatures. Unlike C{setParseAction},
+ functions passed to C{addCondition} need to return boolean success/fail of the condition.
+
+ Optional keyword arguments:
+ - message = define a custom message to be used in the raised exception
+ - fatal = if True, will raise ParseFatalException to stop parsing immediately; otherwise will raise ParseException
+
+ Example::
+ integer = Word(nums).setParseAction(lambda toks: int(toks[0]))
+ year_int = integer.copy()
+ year_int.addCondition(lambda toks: toks[0] >= 2000, message="Only support years 2000 and later")
+ date_str = year_int + '/' + integer + '/' + integer
+
+ result = date_str.parseString("1999/12/31") # -> Exception: Only support years 2000 and later (at char 0), (line:1, col:1)
+ """
+ msg = kwargs.get("message", "failed user-defined condition")
+ exc_type = ParseFatalException if kwargs.get("fatal", False) else ParseException
+ for fn in fns:
+ def pa(s,l,t):
+ if not bool(_trim_arity(fn)(s,l,t)):
+ raise exc_type(s,l,msg)
+ self.parseAction.append(pa)
+ self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False)
+ return self
+
+ def setFailAction( self, fn ):
+ """Define action to perform if parsing fails at this expression.
+ Fail acton fn is a callable function that takes the arguments
+ C{fn(s,loc,expr,err)} where:
+ - s = string being parsed
+ - loc = location where expression match was attempted and failed
+ - expr = the parse expression that failed
+ - err = the exception thrown
+ The function returns no value. It may throw C{L{ParseFatalException}}
+ if it is desired to stop parsing immediately."""
+ self.failAction = fn
+ return self
+
+ def _skipIgnorables( self, instring, loc ):
+ exprsFound = True
+ while exprsFound:
+ exprsFound = False
+ for e in self.ignoreExprs:
+ try:
+ while 1:
+ loc,dummy = e._parse( instring, loc )
+ exprsFound = True
+ except ParseException:
+ pass
+ return loc
+
+ def preParse( self, instring, loc ):
+ if self.ignoreExprs:
+ loc = self._skipIgnorables( instring, loc )
+
+ if self.skipWhitespace:
+ wt = self.whiteChars
+ instrlen = len(instring)
+ while loc < instrlen and instring[loc] in wt:
+ loc += 1
+
+ return loc
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ return loc, []
+
+ def postParse( self, instring, loc, tokenlist ):
+ return tokenlist
+
+ #~ @profile
+ def _parseNoCache( self, instring, loc, doActions=True, callPreParse=True ):
+ debugging = ( self.debug ) #and doActions )
+
+ if debugging or self.failAction:
+ #~ print ("Match",self,"at loc",loc,"(%d,%d)" % ( lineno(loc,instring), col(loc,instring) ))
+ if (self.debugActions[0] ):
+ self.debugActions[0]( instring, loc, self )
+ if callPreParse and self.callPreparse:
+ preloc = self.preParse( instring, loc )
+ else:
+ preloc = loc
+ tokensStart = preloc
+ try:
+ try:
+ loc,tokens = self.parseImpl( instring, preloc, doActions )
+ except IndexError:
+ raise ParseException( instring, len(instring), self.errmsg, self )
+ except ParseBaseException as err:
+ #~ print ("Exception raised:", err)
+ if self.debugActions[2]:
+ self.debugActions[2]( instring, tokensStart, self, err )
+ if self.failAction:
+ self.failAction( instring, tokensStart, self, err )
+ raise
+ else:
+ if callPreParse and self.callPreparse:
+ preloc = self.preParse( instring, loc )
+ else:
+ preloc = loc
+ tokensStart = preloc
+ if self.mayIndexError or loc >= len(instring):
+ try:
+ loc,tokens = self.parseImpl( instring, preloc, doActions )
+ except IndexError:
+ raise ParseException( instring, len(instring), self.errmsg, self )
+ else:
+ loc,tokens = self.parseImpl( instring, preloc, doActions )
+
+ tokens = self.postParse( instring, loc, tokens )
+
+ retTokens = ParseResults( tokens, self.resultsName, asList=self.saveAsList, modal=self.modalResults )
+ if self.parseAction and (doActions or self.callDuringTry):
+ if debugging:
+ try:
+ for fn in self.parseAction:
+ tokens = fn( instring, tokensStart, retTokens )
+ if tokens is not None:
+ retTokens = ParseResults( tokens,
+ self.resultsName,
+ asList=self.saveAsList and isinstance(tokens,(ParseResults,list)),
+ modal=self.modalResults )
+ except ParseBaseException as err:
+ #~ print "Exception raised in user parse action:", err
+ if (self.debugActions[2] ):
+ self.debugActions[2]( instring, tokensStart, self, err )
+ raise
+ else:
+ for fn in self.parseAction:
+ tokens = fn( instring, tokensStart, retTokens )
+ if tokens is not None:
+ retTokens = ParseResults( tokens,
+ self.resultsName,
+ asList=self.saveAsList and isinstance(tokens,(ParseResults,list)),
+ modal=self.modalResults )
+
+ if debugging:
+ #~ print ("Matched",self,"->",retTokens.asList())
+ if (self.debugActions[1] ):
+ self.debugActions[1]( instring, tokensStart, loc, self, retTokens )
+
+ return loc, retTokens
+
+ def tryParse( self, instring, loc ):
+ try:
+ return self._parse( instring, loc, doActions=False )[0]
+ except ParseFatalException:
+ raise ParseException( instring, loc, self.errmsg, self)
+
+ def canParseNext(self, instring, loc):
+ try:
+ self.tryParse(instring, loc)
+ except (ParseException, IndexError):
+ return False
+ else:
+ return True
+
+ class _UnboundedCache(object):
+ def __init__(self):
+ cache = {}
+ self.not_in_cache = not_in_cache = object()
+
+ def get(self, key):
+ return cache.get(key, not_in_cache)
+
+ def set(self, key, value):
+ cache[key] = value
+
+ def clear(self):
+ cache.clear()
+
+ self.get = types.MethodType(get, self)
+ self.set = types.MethodType(set, self)
+ self.clear = types.MethodType(clear, self)
+
+ if _OrderedDict is not None:
+ class _FifoCache(object):
+ def __init__(self, size):
+ self.not_in_cache = not_in_cache = object()
+
+ cache = _OrderedDict()
+
+ def get(self, key):
+ return cache.get(key, not_in_cache)
+
+ def set(self, key, value):
+ cache[key] = value
+ if len(cache) > size:
+ cache.popitem(False)
+
+ def clear(self):
+ cache.clear()
+
+ self.get = types.MethodType(get, self)
+ self.set = types.MethodType(set, self)
+ self.clear = types.MethodType(clear, self)
+
+ else:
+ class _FifoCache(object):
+ def __init__(self, size):
+ self.not_in_cache = not_in_cache = object()
+
+ cache = {}
+ key_fifo = collections.deque([], size)
+
+ def get(self, key):
+ return cache.get(key, not_in_cache)
+
+ def set(self, key, value):
+ cache[key] = value
+ if len(cache) > size:
+ cache.pop(key_fifo.popleft(), None)
+ key_fifo.append(key)
+
+ def clear(self):
+ cache.clear()
+ key_fifo.clear()
+
+ self.get = types.MethodType(get, self)
+ self.set = types.MethodType(set, self)
+ self.clear = types.MethodType(clear, self)
+
+ # argument cache for optimizing repeated calls when backtracking through recursive expressions
+ packrat_cache = {} # this is set later by enabledPackrat(); this is here so that resetCache() doesn't fail
+ packrat_cache_lock = RLock()
+ packrat_cache_stats = [0, 0]
+
+ # this method gets repeatedly called during backtracking with the same arguments -
+ # we can cache these arguments and save ourselves the trouble of re-parsing the contained expression
+ def _parseCache( self, instring, loc, doActions=True, callPreParse=True ):
+ HIT, MISS = 0, 1
+ lookup = (self, instring, loc, callPreParse, doActions)
+ with ParserElement.packrat_cache_lock:
+ cache = ParserElement.packrat_cache
+ value = cache.get(lookup)
+ if value is cache.not_in_cache:
+ ParserElement.packrat_cache_stats[MISS] += 1
+ try:
+ value = self._parseNoCache(instring, loc, doActions, callPreParse)
+ except ParseBaseException as pe:
+ # cache a copy of the exception, without the traceback
+ cache.set(lookup, pe.__class__(*pe.args))
+ raise
+ else:
+ cache.set(lookup, (value[0], value[1].copy()))
+ return value
+ else:
+ ParserElement.packrat_cache_stats[HIT] += 1
+ if isinstance(value, Exception):
+ raise value
+ return (value[0], value[1].copy())
+
+ _parse = _parseNoCache
+
+ @staticmethod
+ def resetCache():
+ ParserElement.packrat_cache.clear()
+ ParserElement.packrat_cache_stats[:] = [0] * len(ParserElement.packrat_cache_stats)
+
+ _packratEnabled = False
+ @staticmethod
+ def enablePackrat(cache_size_limit=128):
+ """Enables "packrat" parsing, which adds memoizing to the parsing logic.
+ Repeated parse attempts at the same string location (which happens
+ often in many complex grammars) can immediately return a cached value,
+ instead of re-executing parsing/validating code. Memoizing is done of
+ both valid results and parsing exceptions.
+
+ Parameters:
+ - cache_size_limit - (default=C{128}) - if an integer value is provided
+ will limit the size of the packrat cache; if None is passed, then
+ the cache size will be unbounded; if 0 is passed, the cache will
+ be effectively disabled.
+
+ This speedup may break existing programs that use parse actions that
+ have side-effects. For this reason, packrat parsing is disabled when
+ you first import pyparsing. To activate the packrat feature, your
+ program must call the class method C{ParserElement.enablePackrat()}. If
+ your program uses C{psyco} to "compile as you go", you must call
+ C{enablePackrat} before calling C{psyco.full()}. If you do not do this,
+ Python will crash. For best results, call C{enablePackrat()} immediately
+ after importing pyparsing.
+
+ Example::
+ import pyparsing
+ pyparsing.ParserElement.enablePackrat()
+ """
+ if not ParserElement._packratEnabled:
+ ParserElement._packratEnabled = True
+ if cache_size_limit is None:
+ ParserElement.packrat_cache = ParserElement._UnboundedCache()
+ else:
+ ParserElement.packrat_cache = ParserElement._FifoCache(cache_size_limit)
+ ParserElement._parse = ParserElement._parseCache
+
+ def parseString( self, instring, parseAll=False ):
+ """
+ Execute the parse expression with the given string.
+ This is the main interface to the client code, once the complete
+ expression has been built.
+
+ If you want the grammar to require that the entire input string be
+ successfully parsed, then set C{parseAll} to True (equivalent to ending
+ the grammar with C{L{StringEnd()}}).
+
+ Note: C{parseString} implicitly calls C{expandtabs()} on the input string,
+ in order to report proper column numbers in parse actions.
+ If the input string contains tabs and
+ the grammar uses parse actions that use the C{loc} argument to index into the
+ string being parsed, you can ensure you have a consistent view of the input
+ string by:
+ - calling C{parseWithTabs} on your grammar before calling C{parseString}
+ (see L{I{parseWithTabs}<parseWithTabs>})
+ - define your parse action using the full C{(s,loc,toks)} signature, and
+ reference the input string using the parse action's C{s} argument
+ - explictly expand the tabs in your input string before calling
+ C{parseString}
+
+ Example::
+ Word('a').parseString('aaaaabaaa') # -> ['aaaaa']
+ Word('a').parseString('aaaaabaaa', parseAll=True) # -> Exception: Expected end of text
+ """
+ ParserElement.resetCache()
+ if not self.streamlined:
+ self.streamline()
+ #~ self.saveAsList = True
+ for e in self.ignoreExprs:
+ e.streamline()
+ if not self.keepTabs:
+ instring = instring.expandtabs()
+ try:
+ loc, tokens = self._parse( instring, 0 )
+ if parseAll:
+ loc = self.preParse( instring, loc )
+ se = Empty() + StringEnd()
+ se._parse( instring, loc )
+ except ParseBaseException as exc:
+ if ParserElement.verbose_stacktrace:
+ raise
+ else:
+ # catch and re-raise exception from here, clears out pyparsing internal stack trace
+ raise exc
+ else:
+ return tokens
+
+ def scanString( self, instring, maxMatches=_MAX_INT, overlap=False ):
+ """
+ Scan the input string for expression matches. Each match will return the
+ matching tokens, start location, and end location. May be called with optional
+ C{maxMatches} argument, to clip scanning after 'n' matches are found. If
+ C{overlap} is specified, then overlapping matches will be reported.
+
+ Note that the start and end locations are reported relative to the string
+ being parsed. See L{I{parseString}<parseString>} for more information on parsing
+ strings with embedded tabs.
+
+ Example::
+ source = "sldjf123lsdjjkf345sldkjf879lkjsfd987"
+ print(source)
+ for tokens,start,end in Word(alphas).scanString(source):
+ print(' '*start + '^'*(end-start))
+ print(' '*start + tokens[0])
+
+ prints::
+
+ sldjf123lsdjjkf345sldkjf879lkjsfd987
+ ^^^^^
+ sldjf
+ ^^^^^^^
+ lsdjjkf
+ ^^^^^^
+ sldkjf
+ ^^^^^^
+ lkjsfd
+ """
+ if not self.streamlined:
+ self.streamline()
+ for e in self.ignoreExprs:
+ e.streamline()
+
+ if not self.keepTabs:
+ instring = _ustr(instring).expandtabs()
+ instrlen = len(instring)
+ loc = 0
+ preparseFn = self.preParse
+ parseFn = self._parse
+ ParserElement.resetCache()
+ matches = 0
+ try:
+ while loc <= instrlen and matches < maxMatches:
+ try:
+ preloc = preparseFn( instring, loc )
+ nextLoc,tokens = parseFn( instring, preloc, callPreParse=False )
+ except ParseException:
+ loc = preloc+1
+ else:
+ if nextLoc > loc:
+ matches += 1
+ yield tokens, preloc, nextLoc
+ if overlap:
+ nextloc = preparseFn( instring, loc )
+ if nextloc > loc:
+ loc = nextLoc
+ else:
+ loc += 1
+ else:
+ loc = nextLoc
+ else:
+ loc = preloc+1
+ except ParseBaseException as exc:
+ if ParserElement.verbose_stacktrace:
+ raise
+ else:
+ # catch and re-raise exception from here, clears out pyparsing internal stack trace
+ raise exc
+
+ def transformString( self, instring ):
+ """
+ Extension to C{L{scanString}}, to modify matching text with modified tokens that may
+ be returned from a parse action. To use C{transformString}, define a grammar and
+ attach a parse action to it that modifies the returned token list.
+ Invoking C{transformString()} on a target string will then scan for matches,
+ and replace the matched text patterns according to the logic in the parse
+ action. C{transformString()} returns the resulting transformed string.
+
+ Example::
+ wd = Word(alphas)
+ wd.setParseAction(lambda toks: toks[0].title())
+
+ print(wd.transformString("now is the winter of our discontent made glorious summer by this sun of york."))
+ Prints::
+ Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York.
+ """
+ out = []
+ lastE = 0
+ # force preservation of <TAB>s, to minimize unwanted transformation of string, and to
+ # keep string locs straight between transformString and scanString
+ self.keepTabs = True
+ try:
+ for t,s,e in self.scanString( instring ):
+ out.append( instring[lastE:s] )
+ if t:
+ if isinstance(t,ParseResults):
+ out += t.asList()
+ elif isinstance(t,list):
+ out += t
+ else:
+ out.append(t)
+ lastE = e
+ out.append(instring[lastE:])
+ out = [o for o in out if o]
+ return "".join(map(_ustr,_flatten(out)))
+ except ParseBaseException as exc:
+ if ParserElement.verbose_stacktrace:
+ raise
+ else:
+ # catch and re-raise exception from here, clears out pyparsing internal stack trace
+ raise exc
+
+ def searchString( self, instring, maxMatches=_MAX_INT ):
+ """
+ Another extension to C{L{scanString}}, simplifying the access to the tokens found
+ to match the given parse expression. May be called with optional
+ C{maxMatches} argument, to clip searching after 'n' matches are found.
+
+ Example::
+ # a capitalized word starts with an uppercase letter, followed by zero or more lowercase letters
+ cap_word = Word(alphas.upper(), alphas.lower())
+
+ print(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity"))
+ prints::
+ ['More', 'Iron', 'Lead', 'Gold', 'I']
+ """
+ try:
+ return ParseResults([ t for t,s,e in self.scanString( instring, maxMatches ) ])
+ except ParseBaseException as exc:
+ if ParserElement.verbose_stacktrace:
+ raise
+ else:
+ # catch and re-raise exception from here, clears out pyparsing internal stack trace
+ raise exc
+
+ def split(self, instring, maxsplit=_MAX_INT, includeSeparators=False):
+ """
+ Generator method to split a string using the given expression as a separator.
+ May be called with optional C{maxsplit} argument, to limit the number of splits;
+ and the optional C{includeSeparators} argument (default=C{False}), if the separating
+ matching text should be included in the split results.
+
+ Example::
+ punc = oneOf(list(".,;:/-!?"))
+ print(list(punc.split("This, this?, this sentence, is badly punctuated!")))
+ prints::
+ ['This', ' this', '', ' this sentence', ' is badly punctuated', '']
+ """
+ splits = 0
+ last = 0
+ for t,s,e in self.scanString(instring, maxMatches=maxsplit):
+ yield instring[last:s]
+ if includeSeparators:
+ yield t[0]
+ last = e
+ yield instring[last:]
+
+ def __add__(self, other ):
+ """
+ Implementation of + operator - returns C{L{And}}. Adding strings to a ParserElement
+ converts them to L{Literal}s by default.
+
+ Example::
+ greet = Word(alphas) + "," + Word(alphas) + "!"
+ hello = "Hello, World!"
+ print (hello, "->", greet.parseString(hello))
+ Prints::
+ Hello, World! -> ['Hello', ',', 'World', '!']
+ """
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ if not isinstance( other, ParserElement ):
+ warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+ SyntaxWarning, stacklevel=2)
+ return None
+ return And( [ self, other ] )
+
+ def __radd__(self, other ):
+ """
+ Implementation of + operator when left operand is not a C{L{ParserElement}}
+ """
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ if not isinstance( other, ParserElement ):
+ warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+ SyntaxWarning, stacklevel=2)
+ return None
+ return other + self
+
+ def __sub__(self, other):
+ """
+ Implementation of - operator, returns C{L{And}} with error stop
+ """
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ if not isinstance( other, ParserElement ):
+ warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+ SyntaxWarning, stacklevel=2)
+ return None
+ return And( [ self, And._ErrorStop(), other ] )
+
+ def __rsub__(self, other ):
+ """
+ Implementation of - operator when left operand is not a C{L{ParserElement}}
+ """
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ if not isinstance( other, ParserElement ):
+ warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+ SyntaxWarning, stacklevel=2)
+ return None
+ return other - self
+
+ def __mul__(self,other):
+ """
+ Implementation of * operator, allows use of C{expr * 3} in place of
+ C{expr + expr + expr}. Expressions may also me multiplied by a 2-integer
+ tuple, similar to C{{min,max}} multipliers in regular expressions. Tuples
+ may also include C{None} as in:
+ - C{expr*(n,None)} or C{expr*(n,)} is equivalent
+ to C{expr*n + L{ZeroOrMore}(expr)}
+ (read as "at least n instances of C{expr}")
+ - C{expr*(None,n)} is equivalent to C{expr*(0,n)}
+ (read as "0 to n instances of C{expr}")
+ - C{expr*(None,None)} is equivalent to C{L{ZeroOrMore}(expr)}
+ - C{expr*(1,None)} is equivalent to C{L{OneOrMore}(expr)}
+
+ Note that C{expr*(None,n)} does not raise an exception if
+ more than n exprs exist in the input stream; that is,
+ C{expr*(None,n)} does not enforce a maximum number of expr
+ occurrences. If this behavior is desired, then write
+ C{expr*(None,n) + ~expr}
+ """
+ if isinstance(other,int):
+ minElements, optElements = other,0
+ elif isinstance(other,tuple):
+ other = (other + (None, None))[:2]
+ if other[0] is None:
+ other = (0, other[1])
+ if isinstance(other[0],int) and other[1] is None:
+ if other[0] == 0:
+ return ZeroOrMore(self)
+ if other[0] == 1:
+ return OneOrMore(self)
+ else:
+ return self*other[0] + ZeroOrMore(self)
+ elif isinstance(other[0],int) and isinstance(other[1],int):
+ minElements, optElements = other
+ optElements -= minElements
+ else:
+ raise TypeError("cannot multiply 'ParserElement' and ('%s','%s') objects", type(other[0]),type(other[1]))
+ else:
+ raise TypeError("cannot multiply 'ParserElement' and '%s' objects", type(other))
+
+ if minElements < 0:
+ raise ValueError("cannot multiply ParserElement by negative value")
+ if optElements < 0:
+ raise ValueError("second tuple value must be greater or equal to first tuple value")
+ if minElements == optElements == 0:
+ raise ValueError("cannot multiply ParserElement by 0 or (0,0)")
+
+ if (optElements):
+ def makeOptionalList(n):
+ if n>1:
+ return Optional(self + makeOptionalList(n-1))
+ else:
+ return Optional(self)
+ if minElements:
+ if minElements == 1:
+ ret = self + makeOptionalList(optElements)
+ else:
+ ret = And([self]*minElements) + makeOptionalList(optElements)
+ else:
+ ret = makeOptionalList(optElements)
+ else:
+ if minElements == 1:
+ ret = self
+ else:
+ ret = And([self]*minElements)
+ return ret
+
+ def __rmul__(self, other):
+ return self.__mul__(other)
+
+ def __or__(self, other ):
+ """
+ Implementation of | operator - returns C{L{MatchFirst}}
+ """
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ if not isinstance( other, ParserElement ):
+ warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+ SyntaxWarning, stacklevel=2)
+ return None
+ return MatchFirst( [ self, other ] )
+
+ def __ror__(self, other ):
+ """
+ Implementation of | operator when left operand is not a C{L{ParserElement}}
+ """
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ if not isinstance( other, ParserElement ):
+ warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+ SyntaxWarning, stacklevel=2)
+ return None
+ return other | self
+
+ def __xor__(self, other ):
+ """
+ Implementation of ^ operator - returns C{L{Or}}
+ """
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ if not isinstance( other, ParserElement ):
+ warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+ SyntaxWarning, stacklevel=2)
+ return None
+ return Or( [ self, other ] )
+
+ def __rxor__(self, other ):
+ """
+ Implementation of ^ operator when left operand is not a C{L{ParserElement}}
+ """
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ if not isinstance( other, ParserElement ):
+ warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+ SyntaxWarning, stacklevel=2)
+ return None
+ return other ^ self
+
+ def __and__(self, other ):
+ """
+ Implementation of & operator - returns C{L{Each}}
+ """
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ if not isinstance( other, ParserElement ):
+ warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+ SyntaxWarning, stacklevel=2)
+ return None
+ return Each( [ self, other ] )
+
+ def __rand__(self, other ):
+ """
+ Implementation of & operator when left operand is not a C{L{ParserElement}}
+ """
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ if not isinstance( other, ParserElement ):
+ warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+ SyntaxWarning, stacklevel=2)
+ return None
+ return other & self
+
+ def __invert__( self ):
+ """
+ Implementation of ~ operator - returns C{L{NotAny}}
+ """
+ return NotAny( self )
+
+ def __call__(self, name=None):
+ """
+ Shortcut for C{L{setResultsName}}, with C{listAllMatches=False}.
+
+ If C{name} is given with a trailing C{'*'} character, then C{listAllMatches} will be
+ passed as C{True}.
+
+ If C{name} is omitted, same as calling C{L{copy}}.
+
+ Example::
+ # these are equivalent
+ userdata = Word(alphas).setResultsName("name") + Word(nums+"-").setResultsName("socsecno")
+ userdata = Word(alphas)("name") + Word(nums+"-")("socsecno")
+ """
+ if name is not None:
+ return self.setResultsName(name)
+ else:
+ return self.copy()
+
+ def suppress( self ):
+ """
+ Suppresses the output of this C{ParserElement}; useful to keep punctuation from
+ cluttering up returned output.
+ """
+ return Suppress( self )
+
+ def leaveWhitespace( self ):
+ """
+ Disables the skipping of whitespace before matching the characters in the
+ C{ParserElement}'s defined pattern. This is normally only used internally by
+ the pyparsing module, but may be needed in some whitespace-sensitive grammars.
+ """
+ self.skipWhitespace = False
+ return self
+
+ def setWhitespaceChars( self, chars ):
+ """
+ Overrides the default whitespace chars
+ """
+ self.skipWhitespace = True
+ self.whiteChars = chars
+ self.copyDefaultWhiteChars = False
+ return self
+
+ def parseWithTabs( self ):
+ """
+ Overrides default behavior to expand C{<TAB>}s to spaces before parsing the input string.
+ Must be called before C{parseString} when the input grammar contains elements that
+ match C{<TAB>} characters.
+ """
+ self.keepTabs = True
+ return self
+
+ def ignore( self, other ):
+ """
+ Define expression to be ignored (e.g., comments) while doing pattern
+ matching; may be called repeatedly, to define multiple comment or other
+ ignorable patterns.
+
+ Example::
+ patt = OneOrMore(Word(alphas))
+ patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj']
+
+ patt.ignore(cStyleComment)
+ patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj', 'lskjd']
+ """
+ if isinstance(other, basestring):
+ other = Suppress(other)
+
+ if isinstance( other, Suppress ):
+ if other not in self.ignoreExprs:
+ self.ignoreExprs.append(other)
+ else:
+ self.ignoreExprs.append( Suppress( other.copy() ) )
+ return self
+
+ def setDebugActions( self, startAction, successAction, exceptionAction ):
+ """
+ Enable display of debugging messages while doing pattern matching.
+ """
+ self.debugActions = (startAction or _defaultStartDebugAction,
+ successAction or _defaultSuccessDebugAction,
+ exceptionAction or _defaultExceptionDebugAction)
+ self.debug = True
+ return self
+
+ def setDebug( self, flag=True ):
+ """
+ Enable display of debugging messages while doing pattern matching.
+ Set C{flag} to True to enable, False to disable.
+
+ Example::
+ wd = Word(alphas).setName("alphaword")
+ integer = Word(nums).setName("numword")
+ term = wd | integer
+
+ # turn on debugging for wd
+ wd.setDebug()
+
+ OneOrMore(term).parseString("abc 123 xyz 890")
+
+ prints::
+ Match alphaword at loc 0(1,1)
+ Matched alphaword -> ['abc']
+ Match alphaword at loc 3(1,4)
+ Exception raised:Expected alphaword (at char 4), (line:1, col:5)
+ Match alphaword at loc 7(1,8)
+ Matched alphaword -> ['xyz']
+ Match alphaword at loc 11(1,12)
+ Exception raised:Expected alphaword (at char 12), (line:1, col:13)
+ Match alphaword at loc 15(1,16)
+ Exception raised:Expected alphaword (at char 15), (line:1, col:16)
+
+ The output shown is that produced by the default debug actions - custom debug actions can be
+ specified using L{setDebugActions}. Prior to attempting
+ to match the C{wd} expression, the debugging message C{"Match <exprname> at loc <n>(<line>,<col>)"}
+ is shown. Then if the parse succeeds, a C{"Matched"} message is shown, or an C{"Exception raised"}
+ message is shown. Also note the use of L{setName} to assign a human-readable name to the expression,
+ which makes debugging and exception messages easier to understand - for instance, the default
+ name created for the C{Word} expression without calling C{setName} is C{"W:(ABCD...)"}.
+ """
+ if flag:
+ self.setDebugActions( _defaultStartDebugAction, _defaultSuccessDebugAction, _defaultExceptionDebugAction )
+ else:
+ self.debug = False
+ return self
+
+ def __str__( self ):
+ return self.name
+
+ def __repr__( self ):
+ return _ustr(self)
+
+ def streamline( self ):
+ self.streamlined = True
+ self.strRepr = None
+ return self
+
+ def checkRecursion( self, parseElementList ):
+ pass
+
+ def validate( self, validateTrace=[] ):
+ """
+ Check defined expressions for valid structure, check for infinite recursive definitions.
+ """
+ self.checkRecursion( [] )
+
+ def parseFile( self, file_or_filename, parseAll=False ):
+ """
+ Execute the parse expression on the given file or filename.
+ If a filename is specified (instead of a file object),
+ the entire file is opened, read, and closed before parsing.
+ """
+ try:
+ file_contents = file_or_filename.read()
+ except AttributeError:
+ with open(file_or_filename, "r") as f:
+ file_contents = f.read()
+ try:
+ return self.parseString(file_contents, parseAll)
+ except ParseBaseException as exc:
+ if ParserElement.verbose_stacktrace:
+ raise
+ else:
+ # catch and re-raise exception from here, clears out pyparsing internal stack trace
+ raise exc
+
+ def __eq__(self,other):
+ if isinstance(other, ParserElement):
+ return self is other or vars(self) == vars(other)
+ elif isinstance(other, basestring):
+ return self.matches(other)
+ else:
+ return super(ParserElement,self)==other
+
+ def __ne__(self,other):
+ return not (self == other)
+
+ def __hash__(self):
+ return hash(id(self))
+
+ def __req__(self,other):
+ return self == other
+
+ def __rne__(self,other):
+ return not (self == other)
+
+ def matches(self, testString, parseAll=True):
+ """
+ Method for quick testing of a parser against a test string. Good for simple
+ inline microtests of sub expressions while building up larger parser.
+
+ Parameters:
+ - testString - to test against this expression for a match
+ - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests
+
+ Example::
+ expr = Word(nums)
+ assert expr.matches("100")
+ """
+ try:
+ self.parseString(_ustr(testString), parseAll=parseAll)
+ return True
+ except ParseBaseException:
+ return False
+
+ def runTests(self, tests, parseAll=True, comment='#', fullDump=True, printResults=True, failureTests=False):
+ """
+ Execute the parse expression on a series of test strings, showing each
+ test, the parsed results or where the parse failed. Quick and easy way to
+ run a parse expression against a list of sample strings.
+
+ Parameters:
+ - tests - a list of separate test strings, or a multiline string of test strings
+ - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests
+ - comment - (default=C{'#'}) - expression for indicating embedded comments in the test
+ string; pass None to disable comment filtering
+ - fullDump - (default=C{True}) - dump results as list followed by results names in nested outline;
+ if False, only dump nested list
+ - printResults - (default=C{True}) prints test output to stdout
+ - failureTests - (default=C{False}) indicates if these tests are expected to fail parsing
+
+ Returns: a (success, results) tuple, where success indicates that all tests succeeded
+ (or failed if C{failureTests} is True), and the results contain a list of lines of each
+ test's output
+
+ Example::
+ number_expr = pyparsing_common.number.copy()
+
+ result = number_expr.runTests('''
+ # unsigned integer
+ 100
+ # negative integer
+ -100
+ # float with scientific notation
+ 6.02e23
+ # integer with scientific notation
+ 1e-12
+ ''')
+ print("Success" if result[0] else "Failed!")
+
+ result = number_expr.runTests('''
+ # stray character
+ 100Z
+ # missing leading digit before '.'
+ -.100
+ # too many '.'
+ 3.14.159
+ ''', failureTests=True)
+ print("Success" if result[0] else "Failed!")
+ prints::
+ # unsigned integer
+ 100
+ [100]
+
+ # negative integer
+ -100
+ [-100]
+
+ # float with scientific notation
+ 6.02e23
+ [6.02e+23]
+
+ # integer with scientific notation
+ 1e-12
+ [1e-12]
+
+ Success
+
+ # stray character
+ 100Z
+ ^
+ FAIL: Expected end of text (at char 3), (line:1, col:4)
+
+ # missing leading digit before '.'
+ -.100
+ ^
+ FAIL: Expected {real number with scientific notation | real number | signed integer} (at char 0), (line:1, col:1)
+
+ # too many '.'
+ 3.14.159
+ ^
+ FAIL: Expected end of text (at char 4), (line:1, col:5)
+
+ Success
+
+ Each test string must be on a single line. If you want to test a string that spans multiple
+ lines, create a test like this::
+
+ expr.runTest(r"this is a test\\n of strings that spans \\n 3 lines")
+
+ (Note that this is a raw string literal, you must include the leading 'r'.)
+ """
+ if isinstance(tests, basestring):
+ tests = list(map(str.strip, tests.rstrip().splitlines()))
+ if isinstance(comment, basestring):
+ comment = Literal(comment)
+ allResults = []
+ comments = []
+ success = True
+ for t in tests:
+ if comment is not None and comment.matches(t, False) or comments and not t:
+ comments.append(t)
+ continue
+ if not t:
+ continue
+ out = ['\n'.join(comments), t]
+ comments = []
+ try:
+ t = t.replace(r'\n','\n')
+ result = self.parseString(t, parseAll=parseAll)
+ out.append(result.dump(full=fullDump))
+ success = success and not failureTests
+ except ParseBaseException as pe:
+ fatal = "(FATAL)" if isinstance(pe, ParseFatalException) else ""
+ if '\n' in t:
+ out.append(line(pe.loc, t))
+ out.append(' '*(col(pe.loc,t)-1) + '^' + fatal)
+ else:
+ out.append(' '*pe.loc + '^' + fatal)
+ out.append("FAIL: " + str(pe))
+ success = success and failureTests
+ result = pe
+ except Exception as exc:
+ out.append("FAIL-EXCEPTION: " + str(exc))
+ success = success and failureTests
+ result = exc
+
+ if printResults:
+ if fullDump:
+ out.append('')
+ print('\n'.join(out))
+
+ allResults.append((t, result))
+
+ return success, allResults
+
+
+class Token(ParserElement):
+ """
+ Abstract C{ParserElement} subclass, for defining atomic matching patterns.
+ """
+ def __init__( self ):
+ super(Token,self).__init__( savelist=False )
+
+
+class Empty(Token):
+ """
+ An empty token, will always match.
+ """
+ def __init__( self ):
+ super(Empty,self).__init__()
+ self.name = "Empty"
+ self.mayReturnEmpty = True
+ self.mayIndexError = False
+
+
+class NoMatch(Token):
+ """
+ A token that will never match.
+ """
+ def __init__( self ):
+ super(NoMatch,self).__init__()
+ self.name = "NoMatch"
+ self.mayReturnEmpty = True
+ self.mayIndexError = False
+ self.errmsg = "Unmatchable token"
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ raise ParseException(instring, loc, self.errmsg, self)
+
+
+class Literal(Token):
+ """
+ Token to exactly match a specified string.
+
+ Example::
+ Literal('blah').parseString('blah') # -> ['blah']
+ Literal('blah').parseString('blahfooblah') # -> ['blah']
+ Literal('blah').parseString('bla') # -> Exception: Expected "blah"
+
+ For case-insensitive matching, use L{CaselessLiteral}.
+
+ For keyword matching (force word break before and after the matched string),
+ use L{Keyword} or L{CaselessKeyword}.
+ """
+ def __init__( self, matchString ):
+ super(Literal,self).__init__()
+ self.match = matchString
+ self.matchLen = len(matchString)
+ try:
+ self.firstMatchChar = matchString[0]
+ except IndexError:
+ warnings.warn("null string passed to Literal; use Empty() instead",
+ SyntaxWarning, stacklevel=2)
+ self.__class__ = Empty
+ self.name = '"%s"' % _ustr(self.match)
+ self.errmsg = "Expected " + self.name
+ self.mayReturnEmpty = False
+ self.mayIndexError = False
+
+ # Performance tuning: this routine gets called a *lot*
+ # if this is a single character match string and the first character matches,
+ # short-circuit as quickly as possible, and avoid calling startswith
+ #~ @profile
+ def parseImpl( self, instring, loc, doActions=True ):
+ if (instring[loc] == self.firstMatchChar and
+ (self.matchLen==1 or instring.startswith(self.match,loc)) ):
+ return loc+self.matchLen, self.match
+ raise ParseException(instring, loc, self.errmsg, self)
+_L = Literal
+ParserElement._literalStringClass = Literal
+
+class Keyword(Token):
+ """
+ Token to exactly match a specified string as a keyword, that is, it must be
+ immediately followed by a non-keyword character. Compare with C{L{Literal}}:
+ - C{Literal("if")} will match the leading C{'if'} in C{'ifAndOnlyIf'}.
+ - C{Keyword("if")} will not; it will only match the leading C{'if'} in C{'if x=1'}, or C{'if(y==2)'}
+ Accepts two optional constructor arguments in addition to the keyword string:
+ - C{identChars} is a string of characters that would be valid identifier characters,
+ defaulting to all alphanumerics + "_" and "$"
+ - C{caseless} allows case-insensitive matching, default is C{False}.
+
+ Example::
+ Keyword("start").parseString("start") # -> ['start']
+ Keyword("start").parseString("starting") # -> Exception
+
+ For case-insensitive matching, use L{CaselessKeyword}.
+ """
+ DEFAULT_KEYWORD_CHARS = alphanums+"_$"
+
+ def __init__( self, matchString, identChars=None, caseless=False ):
+ super(Keyword,self).__init__()
+ if identChars is None:
+ identChars = Keyword.DEFAULT_KEYWORD_CHARS
+ self.match = matchString
+ self.matchLen = len(matchString)
+ try:
+ self.firstMatchChar = matchString[0]
+ except IndexError:
+ warnings.warn("null string passed to Keyword; use Empty() instead",
+ SyntaxWarning, stacklevel=2)
+ self.name = '"%s"' % self.match
+ self.errmsg = "Expected " + self.name
+ self.mayReturnEmpty = False
+ self.mayIndexError = False
+ self.caseless = caseless
+ if caseless:
+ self.caselessmatch = matchString.upper()
+ identChars = identChars.upper()
+ self.identChars = set(identChars)
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if self.caseless:
+ if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and
+ (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) and
+ (loc == 0 or instring[loc-1].upper() not in self.identChars) ):
+ return loc+self.matchLen, self.match
+ else:
+ if (instring[loc] == self.firstMatchChar and
+ (self.matchLen==1 or instring.startswith(self.match,loc)) and
+ (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen] not in self.identChars) and
+ (loc == 0 or instring[loc-1] not in self.identChars) ):
+ return loc+self.matchLen, self.match
+ raise ParseException(instring, loc, self.errmsg, self)
+
+ def copy(self):
+ c = super(Keyword,self).copy()
+ c.identChars = Keyword.DEFAULT_KEYWORD_CHARS
+ return c
+
+ @staticmethod
+ def setDefaultKeywordChars( chars ):
+ """Overrides the default Keyword chars
+ """
+ Keyword.DEFAULT_KEYWORD_CHARS = chars
+
+class CaselessLiteral(Literal):
+ """
+ Token to match a specified string, ignoring case of letters.
+ Note: the matched results will always be in the case of the given
+ match string, NOT the case of the input text.
+
+ Example::
+ OneOrMore(CaselessLiteral("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD', 'CMD']
+
+ (Contrast with example for L{CaselessKeyword}.)
+ """
+ def __init__( self, matchString ):
+ super(CaselessLiteral,self).__init__( matchString.upper() )
+ # Preserve the defining literal.
+ self.returnString = matchString
+ self.name = "'%s'" % self.returnString
+ self.errmsg = "Expected " + self.name
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if instring[ loc:loc+self.matchLen ].upper() == self.match:
+ return loc+self.matchLen, self.returnString
+ raise ParseException(instring, loc, self.errmsg, self)
+
+class CaselessKeyword(Keyword):
+ """
+ Caseless version of L{Keyword}.
+
+ Example::
+ OneOrMore(CaselessKeyword("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD']
+
+ (Contrast with example for L{CaselessLiteral}.)
+ """
+ def __init__( self, matchString, identChars=None ):
+ super(CaselessKeyword,self).__init__( matchString, identChars, caseless=True )
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and
+ (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) ):
+ return loc+self.matchLen, self.match
+ raise ParseException(instring, loc, self.errmsg, self)
+
+class CloseMatch(Token):
+ """
+ A variation on L{Literal} which matches "close" matches, that is,
+ strings with at most 'n' mismatching characters. C{CloseMatch} takes parameters:
+ - C{match_string} - string to be matched
+ - C{maxMismatches} - (C{default=1}) maximum number of mismatches allowed to count as a match
+
+ The results from a successful parse will contain the matched text from the input string and the following named results:
+ - C{mismatches} - a list of the positions within the match_string where mismatches were found
+ - C{original} - the original match_string used to compare against the input string
+
+ If C{mismatches} is an empty list, then the match was an exact match.
+
+ Example::
+ patt = CloseMatch("ATCATCGAATGGA")
+ patt.parseString("ATCATCGAAXGGA") # -> (['ATCATCGAAXGGA'], {'mismatches': [[9]], 'original': ['ATCATCGAATGGA']})
+ patt.parseString("ATCAXCGAAXGGA") # -> Exception: Expected 'ATCATCGAATGGA' (with up to 1 mismatches) (at char 0), (line:1, col:1)
+
+ # exact match
+ patt.parseString("ATCATCGAATGGA") # -> (['ATCATCGAATGGA'], {'mismatches': [[]], 'original': ['ATCATCGAATGGA']})
+
+ # close match allowing up to 2 mismatches
+ patt = CloseMatch("ATCATCGAATGGA", maxMismatches=2)
+ patt.parseString("ATCAXCGAAXGGA") # -> (['ATCAXCGAAXGGA'], {'mismatches': [[4, 9]], 'original': ['ATCATCGAATGGA']})
+ """
+ def __init__(self, match_string, maxMismatches=1):
+ super(CloseMatch,self).__init__()
+ self.name = match_string
+ self.match_string = match_string
+ self.maxMismatches = maxMismatches
+ self.errmsg = "Expected %r (with up to %d mismatches)" % (self.match_string, self.maxMismatches)
+ self.mayIndexError = False
+ self.mayReturnEmpty = False
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ start = loc
+ instrlen = len(instring)
+ maxloc = start + len(self.match_string)
+
+ if maxloc <= instrlen:
+ match_string = self.match_string
+ match_stringloc = 0
+ mismatches = []
+ maxMismatches = self.maxMismatches
+
+ for match_stringloc,s_m in enumerate(zip(instring[loc:maxloc], self.match_string)):
+ src,mat = s_m
+ if src != mat:
+ mismatches.append(match_stringloc)
+ if len(mismatches) > maxMismatches:
+ break
+ else:
+ loc = match_stringloc + 1
+ results = ParseResults([instring[start:loc]])
+ results['original'] = self.match_string
+ results['mismatches'] = mismatches
+ return loc, results
+
+ raise ParseException(instring, loc, self.errmsg, self)
+
+
+class Word(Token):
+ """
+ Token for matching words composed of allowed character sets.
+ Defined with string containing all allowed initial characters,
+ an optional string containing allowed body characters (if omitted,
+ defaults to the initial character set), and an optional minimum,
+ maximum, and/or exact length. The default value for C{min} is 1 (a
+ minimum value < 1 is not valid); the default values for C{max} and C{exact}
+ are 0, meaning no maximum or exact length restriction. An optional
+ C{excludeChars} parameter can list characters that might be found in
+ the input C{bodyChars} string; useful to define a word of all printables
+ except for one or two characters, for instance.
+
+ L{srange} is useful for defining custom character set strings for defining
+ C{Word} expressions, using range notation from regular expression character sets.
+
+ A common mistake is to use C{Word} to match a specific literal string, as in
+ C{Word("Address")}. Remember that C{Word} uses the string argument to define
+ I{sets} of matchable characters. This expression would match "Add", "AAA",
+ "dAred", or any other word made up of the characters 'A', 'd', 'r', 'e', and 's'.
+ To match an exact literal string, use L{Literal} or L{Keyword}.
+
+ pyparsing includes helper strings for building Words:
+ - L{alphas}
+ - L{nums}
+ - L{alphanums}
+ - L{hexnums}
+ - L{alphas8bit} (alphabetic characters in ASCII range 128-255 - accented, tilded, umlauted, etc.)
+ - L{punc8bit} (non-alphabetic characters in ASCII range 128-255 - currency, symbols, superscripts, diacriticals, etc.)
+ - L{printables} (any non-whitespace character)
+
+ Example::
+ # a word composed of digits
+ integer = Word(nums) # equivalent to Word("0123456789") or Word(srange("0-9"))
+
+ # a word with a leading capital, and zero or more lowercase
+ capital_word = Word(alphas.upper(), alphas.lower())
+
+ # hostnames are alphanumeric, with leading alpha, and '-'
+ hostname = Word(alphas, alphanums+'-')
+
+ # roman numeral (not a strict parser, accepts invalid mix of characters)
+ roman = Word("IVXLCDM")
+
+ # any string of non-whitespace characters, except for ','
+ csv_value = Word(printables, excludeChars=",")
+ """
+ def __init__( self, initChars, bodyChars=None, min=1, max=0, exact=0, asKeyword=False, excludeChars=None ):
+ super(Word,self).__init__()
+ if excludeChars:
+ initChars = ''.join(c for c in initChars if c not in excludeChars)
+ if bodyChars:
+ bodyChars = ''.join(c for c in bodyChars if c not in excludeChars)
+ self.initCharsOrig = initChars
+ self.initChars = set(initChars)
+ if bodyChars :
+ self.bodyCharsOrig = bodyChars
+ self.bodyChars = set(bodyChars)
+ else:
+ self.bodyCharsOrig = initChars
+ self.bodyChars = set(initChars)
+
+ self.maxSpecified = max > 0
+
+ if min < 1:
+ raise ValueError("cannot specify a minimum length < 1; use Optional(Word()) if zero-length word is permitted")
+
+ self.minLen = min
+
+ if max > 0:
+ self.maxLen = max
+ else:
+ self.maxLen = _MAX_INT
+
+ if exact > 0:
+ self.maxLen = exact
+ self.minLen = exact
+
+ self.name = _ustr(self)
+ self.errmsg = "Expected " + self.name
+ self.mayIndexError = False
+ self.asKeyword = asKeyword
+
+ if ' ' not in self.initCharsOrig+self.bodyCharsOrig and (min==1 and max==0 and exact==0):
+ if self.bodyCharsOrig == self.initCharsOrig:
+ self.reString = "[%s]+" % _escapeRegexRangeChars(self.initCharsOrig)
+ elif len(self.initCharsOrig) == 1:
+ self.reString = "%s[%s]*" % \
+ (re.escape(self.initCharsOrig),
+ _escapeRegexRangeChars(self.bodyCharsOrig),)
+ else:
+ self.reString = "[%s][%s]*" % \
+ (_escapeRegexRangeChars(self.initCharsOrig),
+ _escapeRegexRangeChars(self.bodyCharsOrig),)
+ if self.asKeyword:
+ self.reString = r"\b"+self.reString+r"\b"
+ try:
+ self.re = re.compile( self.reString )
+ except Exception:
+ self.re = None
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if self.re:
+ result = self.re.match(instring,loc)
+ if not result:
+ raise ParseException(instring, loc, self.errmsg, self)
+
+ loc = result.end()
+ return loc, result.group()
+
+ if not(instring[ loc ] in self.initChars):
+ raise ParseException(instring, loc, self.errmsg, self)
+
+ start = loc
+ loc += 1
+ instrlen = len(instring)
+ bodychars = self.bodyChars
+ maxloc = start + self.maxLen
+ maxloc = min( maxloc, instrlen )
+ while loc < maxloc and instring[loc] in bodychars:
+ loc += 1
+
+ throwException = False
+ if loc - start < self.minLen:
+ throwException = True
+ if self.maxSpecified and loc < instrlen and instring[loc] in bodychars:
+ throwException = True
+ if self.asKeyword:
+ if (start>0 and instring[start-1] in bodychars) or (loc<instrlen and instring[loc] in bodychars):
+ throwException = True
+
+ if throwException:
+ raise ParseException(instring, loc, self.errmsg, self)
+
+ return loc, instring[start:loc]
+
+ def __str__( self ):
+ try:
+ return super(Word,self).__str__()
+ except Exception:
+ pass
+
+
+ if self.strRepr is None:
+
+ def charsAsStr(s):
+ if len(s)>4:
+ return s[:4]+"..."
+ else:
+ return s
+
+ if ( self.initCharsOrig != self.bodyCharsOrig ):
+ self.strRepr = "W:(%s,%s)" % ( charsAsStr(self.initCharsOrig), charsAsStr(self.bodyCharsOrig) )
+ else:
+ self.strRepr = "W:(%s)" % charsAsStr(self.initCharsOrig)
+
+ return self.strRepr
+
+
+class Regex(Token):
+ """
+ Token for matching strings that match a given regular expression.
+ Defined with string specifying the regular expression in a form recognized by the inbuilt Python re module.
+ If the given regex contains named groups (defined using C{(?P<name>...)}), these will be preserved as
+ named parse results.
+
+ Example::
+ realnum = Regex(r"[+-]?\d+\.\d*")
+ date = Regex(r'(?P<year>\d{4})-(?P<month>\d\d?)-(?P<day>\d\d?)')
+ # ref: http://stackoverflow.com/questions/267399/how-do-you-match-only-valid-roman-numerals-with-a-regular-expression
+ roman = Regex(r"M{0,4}(CM|CD|D?C{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})")
+ """
+ compiledREtype = type(re.compile("[A-Z]"))
+ def __init__( self, pattern, flags=0):
+ """The parameters C{pattern} and C{flags} are passed to the C{re.compile()} function as-is. See the Python C{re} module for an explanation of the acceptable patterns and flags."""
+ super(Regex,self).__init__()
+
+ if isinstance(pattern, basestring):
+ if not pattern:
+ warnings.warn("null string passed to Regex; use Empty() instead",
+ SyntaxWarning, stacklevel=2)
+
+ self.pattern = pattern
+ self.flags = flags
+
+ try:
+ self.re = re.compile(self.pattern, self.flags)
+ self.reString = self.pattern
+ except sre_constants.error:
+ warnings.warn("invalid pattern (%s) passed to Regex" % pattern,
+ SyntaxWarning, stacklevel=2)
+ raise
+
+ elif isinstance(pattern, Regex.compiledREtype):
+ self.re = pattern
+ self.pattern = \
+ self.reString = str(pattern)
+ self.flags = flags
+
+ else:
+ raise ValueError("Regex may only be constructed with a string or a compiled RE object")
+
+ self.name = _ustr(self)
+ self.errmsg = "Expected " + self.name
+ self.mayIndexError = False
+ self.mayReturnEmpty = True
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ result = self.re.match(instring,loc)
+ if not result:
+ raise ParseException(instring, loc, self.errmsg, self)
+
+ loc = result.end()
+ d = result.groupdict()
+ ret = ParseResults(result.group())
+ if d:
+ for k in d:
+ ret[k] = d[k]
+ return loc,ret
+
+ def __str__( self ):
+ try:
+ return super(Regex,self).__str__()
+ except Exception:
+ pass
+
+ if self.strRepr is None:
+ self.strRepr = "Re:(%s)" % repr(self.pattern)
+
+ return self.strRepr
+
+
+class QuotedString(Token):
+ r"""
+ Token for matching strings that are delimited by quoting characters.
+
+ Defined with the following parameters:
+ - quoteChar - string of one or more characters defining the quote delimiting string
+ - escChar - character to escape quotes, typically backslash (default=C{None})
+ - escQuote - special quote sequence to escape an embedded quote string (such as SQL's "" to escape an embedded ") (default=C{None})
+ - multiline - boolean indicating whether quotes can span multiple lines (default=C{False})
+ - unquoteResults - boolean indicating whether the matched text should be unquoted (default=C{True})
+ - endQuoteChar - string of one or more characters defining the end of the quote delimited string (default=C{None} => same as quoteChar)
+ - convertWhitespaceEscapes - convert escaped whitespace (C{'\t'}, C{'\n'}, etc.) to actual whitespace (default=C{True})
+
+ Example::
+ qs = QuotedString('"')
+ print(qs.searchString('lsjdf "This is the quote" sldjf'))
+ complex_qs = QuotedString('{{', endQuoteChar='}}')
+ print(complex_qs.searchString('lsjdf {{This is the "quote"}} sldjf'))
+ sql_qs = QuotedString('"', escQuote='""')
+ print(sql_qs.searchString('lsjdf "This is the quote with ""embedded"" quotes" sldjf'))
+ prints::
+ [['This is the quote']]
+ [['This is the "quote"']]
+ [['This is the quote with "embedded" quotes']]
+ """
+ def __init__( self, quoteChar, escChar=None, escQuote=None, multiline=False, unquoteResults=True, endQuoteChar=None, convertWhitespaceEscapes=True):
+ super(QuotedString,self).__init__()
+
+ # remove white space from quote chars - wont work anyway
+ quoteChar = quoteChar.strip()
+ if not quoteChar:
+ warnings.warn("quoteChar cannot be the empty string",SyntaxWarning,stacklevel=2)
+ raise SyntaxError()
+
+ if endQuoteChar is None:
+ endQuoteChar = quoteChar
+ else:
+ endQuoteChar = endQuoteChar.strip()
+ if not endQuoteChar:
+ warnings.warn("endQuoteChar cannot be the empty string",SyntaxWarning,stacklevel=2)
+ raise SyntaxError()
+
+ self.quoteChar = quoteChar
+ self.quoteCharLen = len(quoteChar)
+ self.firstQuoteChar = quoteChar[0]
+ self.endQuoteChar = endQuoteChar
+ self.endQuoteCharLen = len(endQuoteChar)
+ self.escChar = escChar
+ self.escQuote = escQuote
+ self.unquoteResults = unquoteResults
+ self.convertWhitespaceEscapes = convertWhitespaceEscapes
+
+ if multiline:
+ self.flags = re.MULTILINE | re.DOTALL
+ self.pattern = r'%s(?:[^%s%s]' % \
+ ( re.escape(self.quoteChar),
+ _escapeRegexRangeChars(self.endQuoteChar[0]),
+ (escChar is not None and _escapeRegexRangeChars(escChar) or '') )
+ else:
+ self.flags = 0
+ self.pattern = r'%s(?:[^%s\n\r%s]' % \
+ ( re.escape(self.quoteChar),
+ _escapeRegexRangeChars(self.endQuoteChar[0]),
+ (escChar is not None and _escapeRegexRangeChars(escChar) or '') )
+ if len(self.endQuoteChar) > 1:
+ self.pattern += (
+ '|(?:' + ')|(?:'.join("%s[^%s]" % (re.escape(self.endQuoteChar[:i]),
+ _escapeRegexRangeChars(self.endQuoteChar[i]))
+ for i in range(len(self.endQuoteChar)-1,0,-1)) + ')'
+ )
+ if escQuote:
+ self.pattern += (r'|(?:%s)' % re.escape(escQuote))
+ if escChar:
+ self.pattern += (r'|(?:%s.)' % re.escape(escChar))
+ self.escCharReplacePattern = re.escape(self.escChar)+"(.)"
+ self.pattern += (r')*%s' % re.escape(self.endQuoteChar))
+
+ try:
+ self.re = re.compile(self.pattern, self.flags)
+ self.reString = self.pattern
+ except sre_constants.error:
+ warnings.warn("invalid pattern (%s) passed to Regex" % self.pattern,
+ SyntaxWarning, stacklevel=2)
+ raise
+
+ self.name = _ustr(self)
+ self.errmsg = "Expected " + self.name
+ self.mayIndexError = False
+ self.mayReturnEmpty = True
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ result = instring[loc] == self.firstQuoteChar and self.re.match(instring,loc) or None
+ if not result:
+ raise ParseException(instring, loc, self.errmsg, self)
+
+ loc = result.end()
+ ret = result.group()
+
+ if self.unquoteResults:
+
+ # strip off quotes
+ ret = ret[self.quoteCharLen:-self.endQuoteCharLen]
+
+ if isinstance(ret,basestring):
+ # replace escaped whitespace
+ if '\\' in ret and self.convertWhitespaceEscapes:
+ ws_map = {
+ r'\t' : '\t',
+ r'\n' : '\n',
+ r'\f' : '\f',
+ r'\r' : '\r',
+ }
+ for wslit,wschar in ws_map.items():
+ ret = ret.replace(wslit, wschar)
+
+ # replace escaped characters
+ if self.escChar:
+ ret = re.sub(self.escCharReplacePattern,"\g<1>",ret)
+
+ # replace escaped quotes
+ if self.escQuote:
+ ret = ret.replace(self.escQuote, self.endQuoteChar)
+
+ return loc, ret
+
+ def __str__( self ):
+ try:
+ return super(QuotedString,self).__str__()
+ except Exception:
+ pass
+
+ if self.strRepr is None:
+ self.strRepr = "quoted string, starting with %s ending with %s" % (self.quoteChar, self.endQuoteChar)
+
+ return self.strRepr
+
+
+class CharsNotIn(Token):
+ """
+ Token for matching words composed of characters I{not} in a given set (will
+ include whitespace in matched characters if not listed in the provided exclusion set - see example).
+ Defined with string containing all disallowed characters, and an optional
+ minimum, maximum, and/or exact length. The default value for C{min} is 1 (a
+ minimum value < 1 is not valid); the default values for C{max} and C{exact}
+ are 0, meaning no maximum or exact length restriction.
+
+ Example::
+ # define a comma-separated-value as anything that is not a ','
+ csv_value = CharsNotIn(',')
+ print(delimitedList(csv_value).parseString("dkls,lsdkjf,s12 34,@!#,213"))
+ prints::
+ ['dkls', 'lsdkjf', 's12 34', '@!#', '213']
+ """
+ def __init__( self, notChars, min=1, max=0, exact=0 ):
+ super(CharsNotIn,self).__init__()
+ self.skipWhitespace = False
+ self.notChars = notChars
+
+ if min < 1:
+ raise ValueError("cannot specify a minimum length < 1; use Optional(CharsNotIn()) if zero-length char group is permitted")
+
+ self.minLen = min
+
+ if max > 0:
+ self.maxLen = max
+ else:
+ self.maxLen = _MAX_INT
+
+ if exact > 0:
+ self.maxLen = exact
+ self.minLen = exact
+
+ self.name = _ustr(self)
+ self.errmsg = "Expected " + self.name
+ self.mayReturnEmpty = ( self.minLen == 0 )
+ self.mayIndexError = False
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if instring[loc] in self.notChars:
+ raise ParseException(instring, loc, self.errmsg, self)
+
+ start = loc
+ loc += 1
+ notchars = self.notChars
+ maxlen = min( start+self.maxLen, len(instring) )
+ while loc < maxlen and \
+ (instring[loc] not in notchars):
+ loc += 1
+
+ if loc - start < self.minLen:
+ raise ParseException(instring, loc, self.errmsg, self)
+
+ return loc, instring[start:loc]
+
+ def __str__( self ):
+ try:
+ return super(CharsNotIn, self).__str__()
+ except Exception:
+ pass
+
+ if self.strRepr is None:
+ if len(self.notChars) > 4:
+ self.strRepr = "!W:(%s...)" % self.notChars[:4]
+ else:
+ self.strRepr = "!W:(%s)" % self.notChars
+
+ return self.strRepr
+
+class White(Token):
+ """
+ Special matching class for matching whitespace. Normally, whitespace is ignored
+ by pyparsing grammars. This class is included when some whitespace structures
+ are significant. Define with a string containing the whitespace characters to be
+ matched; default is C{" \\t\\r\\n"}. Also takes optional C{min}, C{max}, and C{exact} arguments,
+ as defined for the C{L{Word}} class.
+ """
+ whiteStrs = {
+ " " : "<SPC>",
+ "\t": "<TAB>",
+ "\n": "<LF>",
+ "\r": "<CR>",
+ "\f": "<FF>",
+ }
+ def __init__(self, ws=" \t\r\n", min=1, max=0, exact=0):
+ super(White,self).__init__()
+ self.matchWhite = ws
+ self.setWhitespaceChars( "".join(c for c in self.whiteChars if c not in self.matchWhite) )
+ #~ self.leaveWhitespace()
+ self.name = ("".join(White.whiteStrs[c] for c in self.matchWhite))
+ self.mayReturnEmpty = True
+ self.errmsg = "Expected " + self.name
+
+ self.minLen = min
+
+ if max > 0:
+ self.maxLen = max
+ else:
+ self.maxLen = _MAX_INT
+
+ if exact > 0:
+ self.maxLen = exact
+ self.minLen = exact
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if not(instring[ loc ] in self.matchWhite):
+ raise ParseException(instring, loc, self.errmsg, self)
+ start = loc
+ loc += 1
+ maxloc = start + self.maxLen
+ maxloc = min( maxloc, len(instring) )
+ while loc < maxloc and instring[loc] in self.matchWhite:
+ loc += 1
+
+ if loc - start < self.minLen:
+ raise ParseException(instring, loc, self.errmsg, self)
+
+ return loc, instring[start:loc]
+
+
+class _PositionToken(Token):
+ def __init__( self ):
+ super(_PositionToken,self).__init__()
+ self.name=self.__class__.__name__
+ self.mayReturnEmpty = True
+ self.mayIndexError = False
+
+class GoToColumn(_PositionToken):
+ """
+ Token to advance to a specific column of input text; useful for tabular report scraping.
+ """
+ def __init__( self, colno ):
+ super(GoToColumn,self).__init__()
+ self.col = colno
+
+ def preParse( self, instring, loc ):
+ if col(loc,instring) != self.col:
+ instrlen = len(instring)
+ if self.ignoreExprs:
+ loc = self._skipIgnorables( instring, loc )
+ while loc < instrlen and instring[loc].isspace() and col( loc, instring ) != self.col :
+ loc += 1
+ return loc
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ thiscol = col( loc, instring )
+ if thiscol > self.col:
+ raise ParseException( instring, loc, "Text not in expected column", self )
+ newloc = loc + self.col - thiscol
+ ret = instring[ loc: newloc ]
+ return newloc, ret
+
+
+class LineStart(_PositionToken):
+ """
+ Matches if current position is at the beginning of a line within the parse string
+
+ Example::
+
+ test = '''\
+ AAA this line
+ AAA and this line
+ AAA but not this one
+ B AAA and definitely not this one
+ '''
+
+ for t in (LineStart() + 'AAA' + restOfLine).searchString(test):
+ print(t)
+
+ Prints::
+ ['AAA', ' this line']
+ ['AAA', ' and this line']
+
+ """
+ def __init__( self ):
+ super(LineStart,self).__init__()
+ self.errmsg = "Expected start of line"
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if col(loc, instring) == 1:
+ return loc, []
+ raise ParseException(instring, loc, self.errmsg, self)
+
+class LineEnd(_PositionToken):
+ """
+ Matches if current position is at the end of a line within the parse string
+ """
+ def __init__( self ):
+ super(LineEnd,self).__init__()
+ self.setWhitespaceChars( ParserElement.DEFAULT_WHITE_CHARS.replace("\n","") )
+ self.errmsg = "Expected end of line"
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if loc<len(instring):
+ if instring[loc] == "\n":
+ return loc+1, "\n"
+ else:
+ raise ParseException(instring, loc, self.errmsg, self)
+ elif loc == len(instring):
+ return loc+1, []
+ else:
+ raise ParseException(instring, loc, self.errmsg, self)
+
+class StringStart(_PositionToken):
+ """
+ Matches if current position is at the beginning of the parse string
+ """
+ def __init__( self ):
+ super(StringStart,self).__init__()
+ self.errmsg = "Expected start of text"
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if loc != 0:
+ # see if entire string up to here is just whitespace and ignoreables
+ if loc != self.preParse( instring, 0 ):
+ raise ParseException(instring, loc, self.errmsg, self)
+ return loc, []
+
+class StringEnd(_PositionToken):
+ """
+ Matches if current position is at the end of the parse string
+ """
+ def __init__( self ):
+ super(StringEnd,self).__init__()
+ self.errmsg = "Expected end of text"
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if loc < len(instring):
+ raise ParseException(instring, loc, self.errmsg, self)
+ elif loc == len(instring):
+ return loc+1, []
+ elif loc > len(instring):
+ return loc, []
+ else:
+ raise ParseException(instring, loc, self.errmsg, self)
+
+class WordStart(_PositionToken):
+ """
+ Matches if the current position is at the beginning of a Word, and
+ is not preceded by any character in a given set of C{wordChars}
+ (default=C{printables}). To emulate the C{\b} behavior of regular expressions,
+ use C{WordStart(alphanums)}. C{WordStart} will also match at the beginning of
+ the string being parsed, or at the beginning of a line.
+ """
+ def __init__(self, wordChars = printables):
+ super(WordStart,self).__init__()
+ self.wordChars = set(wordChars)
+ self.errmsg = "Not at the start of a word"
+
+ def parseImpl(self, instring, loc, doActions=True ):
+ if loc != 0:
+ if (instring[loc-1] in self.wordChars or
+ instring[loc] not in self.wordChars):
+ raise ParseException(instring, loc, self.errmsg, self)
+ return loc, []
+
+class WordEnd(_PositionToken):
+ """
+ Matches if the current position is at the end of a Word, and
+ is not followed by any character in a given set of C{wordChars}
+ (default=C{printables}). To emulate the C{\b} behavior of regular expressions,
+ use C{WordEnd(alphanums)}. C{WordEnd} will also match at the end of
+ the string being parsed, or at the end of a line.
+ """
+ def __init__(self, wordChars = printables):
+ super(WordEnd,self).__init__()
+ self.wordChars = set(wordChars)
+ self.skipWhitespace = False
+ self.errmsg = "Not at the end of a word"
+
+ def parseImpl(self, instring, loc, doActions=True ):
+ instrlen = len(instring)
+ if instrlen>0 and loc<instrlen:
+ if (instring[loc] in self.wordChars or
+ instring[loc-1] not in self.wordChars):
+ raise ParseException(instring, loc, self.errmsg, self)
+ return loc, []
+
+
+class ParseExpression(ParserElement):
+ """
+ Abstract subclass of ParserElement, for combining and post-processing parsed tokens.
+ """
+ def __init__( self, exprs, savelist = False ):
+ super(ParseExpression,self).__init__(savelist)
+ if isinstance( exprs, _generatorType ):
+ exprs = list(exprs)
+
+ if isinstance( exprs, basestring ):
+ self.exprs = [ ParserElement._literalStringClass( exprs ) ]
+ elif isinstance( exprs, collections.Iterable ):
+ exprs = list(exprs)
+ # if sequence of strings provided, wrap with Literal
+ if all(isinstance(expr, basestring) for expr in exprs):
+ exprs = map(ParserElement._literalStringClass, exprs)
+ self.exprs = list(exprs)
+ else:
+ try:
+ self.exprs = list( exprs )
+ except TypeError:
+ self.exprs = [ exprs ]
+ self.callPreparse = False
+
+ def __getitem__( self, i ):
+ return self.exprs[i]
+
+ def append( self, other ):
+ self.exprs.append( other )
+ self.strRepr = None
+ return self
+
+ def leaveWhitespace( self ):
+ """Extends C{leaveWhitespace} defined in base class, and also invokes C{leaveWhitespace} on
+ all contained expressions."""
+ self.skipWhitespace = False
+ self.exprs = [ e.copy() for e in self.exprs ]
+ for e in self.exprs:
+ e.leaveWhitespace()
+ return self
+
+ def ignore( self, other ):
+ if isinstance( other, Suppress ):
+ if other not in self.ignoreExprs:
+ super( ParseExpression, self).ignore( other )
+ for e in self.exprs:
+ e.ignore( self.ignoreExprs[-1] )
+ else:
+ super( ParseExpression, self).ignore( other )
+ for e in self.exprs:
+ e.ignore( self.ignoreExprs[-1] )
+ return self
+
+ def __str__( self ):
+ try:
+ return super(ParseExpression,self).__str__()
+ except Exception:
+ pass
+
+ if self.strRepr is None:
+ self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.exprs) )
+ return self.strRepr
+
+ def streamline( self ):
+ super(ParseExpression,self).streamline()
+
+ for e in self.exprs:
+ e.streamline()
+
+ # collapse nested And's of the form And( And( And( a,b), c), d) to And( a,b,c,d )
+ # but only if there are no parse actions or resultsNames on the nested And's
+ # (likewise for Or's and MatchFirst's)
+ if ( len(self.exprs) == 2 ):
+ other = self.exprs[0]
+ if ( isinstance( other, self.__class__ ) and
+ not(other.parseAction) and
+ other.resultsName is None and
+ not other.debug ):
+ self.exprs = other.exprs[:] + [ self.exprs[1] ]
+ self.strRepr = None
+ self.mayReturnEmpty |= other.mayReturnEmpty
+ self.mayIndexError |= other.mayIndexError
+
+ other = self.exprs[-1]
+ if ( isinstance( other, self.__class__ ) and
+ not(other.parseAction) and
+ other.resultsName is None and
+ not other.debug ):
+ self.exprs = self.exprs[:-1] + other.exprs[:]
+ self.strRepr = None
+ self.mayReturnEmpty |= other.mayReturnEmpty
+ self.mayIndexError |= other.mayIndexError
+
+ self.errmsg = "Expected " + _ustr(self)
+
+ return self
+
+ def setResultsName( self, name, listAllMatches=False ):
+ ret = super(ParseExpression,self).setResultsName(name,listAllMatches)
+ return ret
+
+ def validate( self, validateTrace=[] ):
+ tmp = validateTrace[:]+[self]
+ for e in self.exprs:
+ e.validate(tmp)
+ self.checkRecursion( [] )
+
+ def copy(self):
+ ret = super(ParseExpression,self).copy()
+ ret.exprs = [e.copy() for e in self.exprs]
+ return ret
+
+class And(ParseExpression):
+ """
+ Requires all given C{ParseExpression}s to be found in the given order.
+ Expressions may be separated by whitespace.
+ May be constructed using the C{'+'} operator.
+ May also be constructed using the C{'-'} operator, which will suppress backtracking.
+
+ Example::
+ integer = Word(nums)
+ name_expr = OneOrMore(Word(alphas))
+
+ expr = And([integer("id"),name_expr("name"),integer("age")])
+ # more easily written as:
+ expr = integer("id") + name_expr("name") + integer("age")
+ """
+
+ class _ErrorStop(Empty):
+ def __init__(self, *args, **kwargs):
+ super(And._ErrorStop,self).__init__(*args, **kwargs)
+ self.name = '-'
+ self.leaveWhitespace()
+
+ def __init__( self, exprs, savelist = True ):
+ super(And,self).__init__(exprs, savelist)
+ self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs)
+ self.setWhitespaceChars( self.exprs[0].whiteChars )
+ self.skipWhitespace = self.exprs[0].skipWhitespace
+ self.callPreparse = True
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ # pass False as last arg to _parse for first element, since we already
+ # pre-parsed the string as part of our And pre-parsing
+ loc, resultlist = self.exprs[0]._parse( instring, loc, doActions, callPreParse=False )
+ errorStop = False
+ for e in self.exprs[1:]:
+ if isinstance(e, And._ErrorStop):
+ errorStop = True
+ continue
+ if errorStop:
+ try:
+ loc, exprtokens = e._parse( instring, loc, doActions )
+ except ParseSyntaxException:
+ raise
+ except ParseBaseException as pe:
+ pe.__traceback__ = None
+ raise ParseSyntaxException._from_exception(pe)
+ except IndexError:
+ raise ParseSyntaxException(instring, len(instring), self.errmsg, self)
+ else:
+ loc, exprtokens = e._parse( instring, loc, doActions )
+ if exprtokens or exprtokens.haskeys():
+ resultlist += exprtokens
+ return loc, resultlist
+
+ def __iadd__(self, other ):
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ return self.append( other ) #And( [ self, other ] )
+
+ def checkRecursion( self, parseElementList ):
+ subRecCheckList = parseElementList[:] + [ self ]
+ for e in self.exprs:
+ e.checkRecursion( subRecCheckList )
+ if not e.mayReturnEmpty:
+ break
+
+ def __str__( self ):
+ if hasattr(self,"name"):
+ return self.name
+
+ if self.strRepr is None:
+ self.strRepr = "{" + " ".join(_ustr(e) for e in self.exprs) + "}"
+
+ return self.strRepr
+
+
+class Or(ParseExpression):
+ """
+ Requires that at least one C{ParseExpression} is found.
+ If two expressions match, the expression that matches the longest string will be used.
+ May be constructed using the C{'^'} operator.
+
+ Example::
+ # construct Or using '^' operator
+
+ number = Word(nums) ^ Combine(Word(nums) + '.' + Word(nums))
+ print(number.searchString("123 3.1416 789"))
+ prints::
+ [['123'], ['3.1416'], ['789']]
+ """
+ def __init__( self, exprs, savelist = False ):
+ super(Or,self).__init__(exprs, savelist)
+ if self.exprs:
+ self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs)
+ else:
+ self.mayReturnEmpty = True
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ maxExcLoc = -1
+ maxException = None
+ matches = []
+ for e in self.exprs:
+ try:
+ loc2 = e.tryParse( instring, loc )
+ except ParseException as err:
+ err.__traceback__ = None
+ if err.loc > maxExcLoc:
+ maxException = err
+ maxExcLoc = err.loc
+ except IndexError:
+ if len(instring) > maxExcLoc:
+ maxException = ParseException(instring,len(instring),e.errmsg,self)
+ maxExcLoc = len(instring)
+ else:
+ # save match among all matches, to retry longest to shortest
+ matches.append((loc2, e))
+
+ if matches:
+ matches.sort(key=lambda x: -x[0])
+ for _,e in matches:
+ try:
+ return e._parse( instring, loc, doActions )
+ except ParseException as err:
+ err.__traceback__ = None
+ if err.loc > maxExcLoc:
+ maxException = err
+ maxExcLoc = err.loc
+
+ if maxException is not None:
+ maxException.msg = self.errmsg
+ raise maxException
+ else:
+ raise ParseException(instring, loc, "no defined alternatives to match", self)
+
+
+ def __ixor__(self, other ):
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ return self.append( other ) #Or( [ self, other ] )
+
+ def __str__( self ):
+ if hasattr(self,"name"):
+ return self.name
+
+ if self.strRepr is None:
+ self.strRepr = "{" + " ^ ".join(_ustr(e) for e in self.exprs) + "}"
+
+ return self.strRepr
+
+ def checkRecursion( self, parseElementList ):
+ subRecCheckList = parseElementList[:] + [ self ]
+ for e in self.exprs:
+ e.checkRecursion( subRecCheckList )
+
+
+class MatchFirst(ParseExpression):
+ """
+ Requires that at least one C{ParseExpression} is found.
+ If two expressions match, the first one listed is the one that will match.
+ May be constructed using the C{'|'} operator.
+
+ Example::
+ # construct MatchFirst using '|' operator
+
+ # watch the order of expressions to match
+ number = Word(nums) | Combine(Word(nums) + '.' + Word(nums))
+ print(number.searchString("123 3.1416 789")) # Fail! -> [['123'], ['3'], ['1416'], ['789']]
+
+ # put more selective expression first
+ number = Combine(Word(nums) + '.' + Word(nums)) | Word(nums)
+ print(number.searchString("123 3.1416 789")) # Better -> [['123'], ['3.1416'], ['789']]
+ """
+ def __init__( self, exprs, savelist = False ):
+ super(MatchFirst,self).__init__(exprs, savelist)
+ if self.exprs:
+ self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs)
+ else:
+ self.mayReturnEmpty = True
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ maxExcLoc = -1
+ maxException = None
+ for e in self.exprs:
+ try:
+ ret = e._parse( instring, loc, doActions )
+ return ret
+ except ParseException as err:
+ if err.loc > maxExcLoc:
+ maxException = err
+ maxExcLoc = err.loc
+ except IndexError:
+ if len(instring) > maxExcLoc:
+ maxException = ParseException(instring,len(instring),e.errmsg,self)
+ maxExcLoc = len(instring)
+
+ # only got here if no expression matched, raise exception for match that made it the furthest
+ else:
+ if maxException is not None:
+ maxException.msg = self.errmsg
+ raise maxException
+ else:
+ raise ParseException(instring, loc, "no defined alternatives to match", self)
+
+ def __ior__(self, other ):
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ return self.append( other ) #MatchFirst( [ self, other ] )
+
+ def __str__( self ):
+ if hasattr(self,"name"):
+ return self.name
+
+ if self.strRepr is None:
+ self.strRepr = "{" + " | ".join(_ustr(e) for e in self.exprs) + "}"
+
+ return self.strRepr
+
+ def checkRecursion( self, parseElementList ):
+ subRecCheckList = parseElementList[:] + [ self ]
+ for e in self.exprs:
+ e.checkRecursion( subRecCheckList )
+
+
+class Each(ParseExpression):
+ """
+ Requires all given C{ParseExpression}s to be found, but in any order.
+ Expressions may be separated by whitespace.
+ May be constructed using the C{'&'} operator.
+
+ Example::
+ color = oneOf("RED ORANGE YELLOW GREEN BLUE PURPLE BLACK WHITE BROWN")
+ shape_type = oneOf("SQUARE CIRCLE TRIANGLE STAR HEXAGON OCTAGON")
+ integer = Word(nums)
+ shape_attr = "shape:" + shape_type("shape")
+ posn_attr = "posn:" + Group(integer("x") + ',' + integer("y"))("posn")
+ color_attr = "color:" + color("color")
+ size_attr = "size:" + integer("size")
+
+ # use Each (using operator '&') to accept attributes in any order
+ # (shape and posn are required, color and size are optional)
+ shape_spec = shape_attr & posn_attr & Optional(color_attr) & Optional(size_attr)
+
+ shape_spec.runTests('''
+ shape: SQUARE color: BLACK posn: 100, 120
+ shape: CIRCLE size: 50 color: BLUE posn: 50,80
+ color:GREEN size:20 shape:TRIANGLE posn:20,40
+ '''
+ )
+ prints::
+ shape: SQUARE color: BLACK posn: 100, 120
+ ['shape:', 'SQUARE', 'color:', 'BLACK', 'posn:', ['100', ',', '120']]
+ - color: BLACK
+ - posn: ['100', ',', '120']
+ - x: 100
+ - y: 120
+ - shape: SQUARE
+
+
+ shape: CIRCLE size: 50 color: BLUE posn: 50,80
+ ['shape:', 'CIRCLE', 'size:', '50', 'color:', 'BLUE', 'posn:', ['50', ',', '80']]
+ - color: BLUE
+ - posn: ['50', ',', '80']
+ - x: 50
+ - y: 80
+ - shape: CIRCLE
+ - size: 50
+
+
+ color: GREEN size: 20 shape: TRIANGLE posn: 20,40
+ ['color:', 'GREEN', 'size:', '20', 'shape:', 'TRIANGLE', 'posn:', ['20', ',', '40']]
+ - color: GREEN
+ - posn: ['20', ',', '40']
+ - x: 20
+ - y: 40
+ - shape: TRIANGLE
+ - size: 20
+ """
+ def __init__( self, exprs, savelist = True ):
+ super(Each,self).__init__(exprs, savelist)
+ self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs)
+ self.skipWhitespace = True
+ self.initExprGroups = True
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if self.initExprGroups:
+ self.opt1map = dict((id(e.expr),e) for e in self.exprs if isinstance(e,Optional))
+ opt1 = [ e.expr for e in self.exprs if isinstance(e,Optional) ]
+ opt2 = [ e for e in self.exprs if e.mayReturnEmpty and not isinstance(e,Optional)]
+ self.optionals = opt1 + opt2
+ self.multioptionals = [ e.expr for e in self.exprs if isinstance(e,ZeroOrMore) ]
+ self.multirequired = [ e.expr for e in self.exprs if isinstance(e,OneOrMore) ]
+ self.required = [ e for e in self.exprs if not isinstance(e,(Optional,ZeroOrMore,OneOrMore)) ]
+ self.required += self.multirequired
+ self.initExprGroups = False
+ tmpLoc = loc
+ tmpReqd = self.required[:]
+ tmpOpt = self.optionals[:]
+ matchOrder = []
+
+ keepMatching = True
+ while keepMatching:
+ tmpExprs = tmpReqd + tmpOpt + self.multioptionals + self.multirequired
+ failed = []
+ for e in tmpExprs:
+ try:
+ tmpLoc = e.tryParse( instring, tmpLoc )
+ except ParseException:
+ failed.append(e)
+ else:
+ matchOrder.append(self.opt1map.get(id(e),e))
+ if e in tmpReqd:
+ tmpReqd.remove(e)
+ elif e in tmpOpt:
+ tmpOpt.remove(e)
+ if len(failed) == len(tmpExprs):
+ keepMatching = False
+
+ if tmpReqd:
+ missing = ", ".join(_ustr(e) for e in tmpReqd)
+ raise ParseException(instring,loc,"Missing one or more required elements (%s)" % missing )
+
+ # add any unmatched Optionals, in case they have default values defined
+ matchOrder += [e for e in self.exprs if isinstance(e,Optional) and e.expr in tmpOpt]
+
+ resultlist = []
+ for e in matchOrder:
+ loc,results = e._parse(instring,loc,doActions)
+ resultlist.append(results)
+
+ finalResults = sum(resultlist, ParseResults([]))
+ return loc, finalResults
+
+ def __str__( self ):
+ if hasattr(self,"name"):
+ return self.name
+
+ if self.strRepr is None:
+ self.strRepr = "{" + " & ".join(_ustr(e) for e in self.exprs) + "}"
+
+ return self.strRepr
+
+ def checkRecursion( self, parseElementList ):
+ subRecCheckList = parseElementList[:] + [ self ]
+ for e in self.exprs:
+ e.checkRecursion( subRecCheckList )
+
+
+class ParseElementEnhance(ParserElement):
+ """
+ Abstract subclass of C{ParserElement}, for combining and post-processing parsed tokens.
+ """
+ def __init__( self, expr, savelist=False ):
+ super(ParseElementEnhance,self).__init__(savelist)
+ if isinstance( expr, basestring ):
+ if issubclass(ParserElement._literalStringClass, Token):
+ expr = ParserElement._literalStringClass(expr)
+ else:
+ expr = ParserElement._literalStringClass(Literal(expr))
+ self.expr = expr
+ self.strRepr = None
+ if expr is not None:
+ self.mayIndexError = expr.mayIndexError
+ self.mayReturnEmpty = expr.mayReturnEmpty
+ self.setWhitespaceChars( expr.whiteChars )
+ self.skipWhitespace = expr.skipWhitespace
+ self.saveAsList = expr.saveAsList
+ self.callPreparse = expr.callPreparse
+ self.ignoreExprs.extend(expr.ignoreExprs)
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if self.expr is not None:
+ return self.expr._parse( instring, loc, doActions, callPreParse=False )
+ else:
+ raise ParseException("",loc,self.errmsg,self)
+
+ def leaveWhitespace( self ):
+ self.skipWhitespace = False
+ self.expr = self.expr.copy()
+ if self.expr is not None:
+ self.expr.leaveWhitespace()
+ return self
+
+ def ignore( self, other ):
+ if isinstance( other, Suppress ):
+ if other not in self.ignoreExprs:
+ super( ParseElementEnhance, self).ignore( other )
+ if self.expr is not None:
+ self.expr.ignore( self.ignoreExprs[-1] )
+ else:
+ super( ParseElementEnhance, self).ignore( other )
+ if self.expr is not None:
+ self.expr.ignore( self.ignoreExprs[-1] )
+ return self
+
+ def streamline( self ):
+ super(ParseElementEnhance,self).streamline()
+ if self.expr is not None:
+ self.expr.streamline()
+ return self
+
+ def checkRecursion( self, parseElementList ):
+ if self in parseElementList:
+ raise RecursiveGrammarException( parseElementList+[self] )
+ subRecCheckList = parseElementList[:] + [ self ]
+ if self.expr is not None:
+ self.expr.checkRecursion( subRecCheckList )
+
+ def validate( self, validateTrace=[] ):
+ tmp = validateTrace[:]+[self]
+ if self.expr is not None:
+ self.expr.validate(tmp)
+ self.checkRecursion( [] )
+
+ def __str__( self ):
+ try:
+ return super(ParseElementEnhance,self).__str__()
+ except Exception:
+ pass
+
+ if self.strRepr is None and self.expr is not None:
+ self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.expr) )
+ return self.strRepr
+
+
+class FollowedBy(ParseElementEnhance):
+ """
+ Lookahead matching of the given parse expression. C{FollowedBy}
+ does I{not} advance the parsing position within the input string, it only
+ verifies that the specified parse expression matches at the current
+ position. C{FollowedBy} always returns a null token list.
+
+ Example::
+ # use FollowedBy to match a label only if it is followed by a ':'
+ data_word = Word(alphas)
+ label = data_word + FollowedBy(':')
+ attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
+
+ OneOrMore(attr_expr).parseString("shape: SQUARE color: BLACK posn: upper left").pprint()
+ prints::
+ [['shape', 'SQUARE'], ['color', 'BLACK'], ['posn', 'upper left']]
+ """
+ def __init__( self, expr ):
+ super(FollowedBy,self).__init__(expr)
+ self.mayReturnEmpty = True
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ self.expr.tryParse( instring, loc )
+ return loc, []
+
+
+class NotAny(ParseElementEnhance):
+ """
+ Lookahead to disallow matching with the given parse expression. C{NotAny}
+ does I{not} advance the parsing position within the input string, it only
+ verifies that the specified parse expression does I{not} match at the current
+ position. Also, C{NotAny} does I{not} skip over leading whitespace. C{NotAny}
+ always returns a null token list. May be constructed using the '~' operator.
+
+ Example::
+
+ """
+ def __init__( self, expr ):
+ super(NotAny,self).__init__(expr)
+ #~ self.leaveWhitespace()
+ self.skipWhitespace = False # do NOT use self.leaveWhitespace(), don't want to propagate to exprs
+ self.mayReturnEmpty = True
+ self.errmsg = "Found unwanted token, "+_ustr(self.expr)
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if self.expr.canParseNext(instring, loc):
+ raise ParseException(instring, loc, self.errmsg, self)
+ return loc, []
+
+ def __str__( self ):
+ if hasattr(self,"name"):
+ return self.name
+
+ if self.strRepr is None:
+ self.strRepr = "~{" + _ustr(self.expr) + "}"
+
+ return self.strRepr
+
+class _MultipleMatch(ParseElementEnhance):
+ def __init__( self, expr, stopOn=None):
+ super(_MultipleMatch, self).__init__(expr)
+ self.saveAsList = True
+ ender = stopOn
+ if isinstance(ender, basestring):
+ ender = ParserElement._literalStringClass(ender)
+ self.not_ender = ~ender if ender is not None else None
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ self_expr_parse = self.expr._parse
+ self_skip_ignorables = self._skipIgnorables
+ check_ender = self.not_ender is not None
+ if check_ender:
+ try_not_ender = self.not_ender.tryParse
+
+ # must be at least one (but first see if we are the stopOn sentinel;
+ # if so, fail)
+ if check_ender:
+ try_not_ender(instring, loc)
+ loc, tokens = self_expr_parse( instring, loc, doActions, callPreParse=False )
+ try:
+ hasIgnoreExprs = (not not self.ignoreExprs)
+ while 1:
+ if check_ender:
+ try_not_ender(instring, loc)
+ if hasIgnoreExprs:
+ preloc = self_skip_ignorables( instring, loc )
+ else:
+ preloc = loc
+ loc, tmptokens = self_expr_parse( instring, preloc, doActions )
+ if tmptokens or tmptokens.haskeys():
+ tokens += tmptokens
+ except (ParseException,IndexError):
+ pass
+
+ return loc, tokens
+
+class OneOrMore(_MultipleMatch):
+ """
+ Repetition of one or more of the given expression.
+
+ Parameters:
+ - expr - expression that must match one or more times
+ - stopOn - (default=C{None}) - expression for a terminating sentinel
+ (only required if the sentinel would ordinarily match the repetition
+ expression)
+
+ Example::
+ data_word = Word(alphas)
+ label = data_word + FollowedBy(':')
+ attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join))
+
+ text = "shape: SQUARE posn: upper left color: BLACK"
+ OneOrMore(attr_expr).parseString(text).pprint() # Fail! read 'color' as data instead of next label -> [['shape', 'SQUARE color']]
+
+ # use stopOn attribute for OneOrMore to avoid reading label string as part of the data
+ attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
+ OneOrMore(attr_expr).parseString(text).pprint() # Better -> [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'BLACK']]
+
+ # could also be written as
+ (attr_expr * (1,)).parseString(text).pprint()
+ """
+
+ def __str__( self ):
+ if hasattr(self,"name"):
+ return self.name
+
+ if self.strRepr is None:
+ self.strRepr = "{" + _ustr(self.expr) + "}..."
+
+ return self.strRepr
+
+class ZeroOrMore(_MultipleMatch):
+ """
+ Optional repetition of zero or more of the given expression.
+
+ Parameters:
+ - expr - expression that must match zero or more times
+ - stopOn - (default=C{None}) - expression for a terminating sentinel
+ (only required if the sentinel would ordinarily match the repetition
+ expression)
+
+ Example: similar to L{OneOrMore}
+ """
+ def __init__( self, expr, stopOn=None):
+ super(ZeroOrMore,self).__init__(expr, stopOn=stopOn)
+ self.mayReturnEmpty = True
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ try:
+ return super(ZeroOrMore, self).parseImpl(instring, loc, doActions)
+ except (ParseException,IndexError):
+ return loc, []
+
+ def __str__( self ):
+ if hasattr(self,"name"):
+ return self.name
+
+ if self.strRepr is None:
+ self.strRepr = "[" + _ustr(self.expr) + "]..."
+
+ return self.strRepr
+
+class _NullToken(object):
+ def __bool__(self):
+ return False
+ __nonzero__ = __bool__
+ def __str__(self):
+ return ""
+
+_optionalNotMatched = _NullToken()
+class Optional(ParseElementEnhance):
+ """
+ Optional matching of the given expression.
+
+ Parameters:
+ - expr - expression that must match zero or more times
+ - default (optional) - value to be returned if the optional expression is not found.
+
+ Example::
+ # US postal code can be a 5-digit zip, plus optional 4-digit qualifier
+ zip = Combine(Word(nums, exact=5) + Optional('-' + Word(nums, exact=4)))
+ zip.runTests('''
+ # traditional ZIP code
+ 12345
+
+ # ZIP+4 form
+ 12101-0001
+
+ # invalid ZIP
+ 98765-
+ ''')
+ prints::
+ # traditional ZIP code
+ 12345
+ ['12345']
+
+ # ZIP+4 form
+ 12101-0001
+ ['12101-0001']
+
+ # invalid ZIP
+ 98765-
+ ^
+ FAIL: Expected end of text (at char 5), (line:1, col:6)
+ """
+ def __init__( self, expr, default=_optionalNotMatched ):
+ super(Optional,self).__init__( expr, savelist=False )
+ self.saveAsList = self.expr.saveAsList
+ self.defaultValue = default
+ self.mayReturnEmpty = True
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ try:
+ loc, tokens = self.expr._parse( instring, loc, doActions, callPreParse=False )
+ except (ParseException,IndexError):
+ if self.defaultValue is not _optionalNotMatched:
+ if self.expr.resultsName:
+ tokens = ParseResults([ self.defaultValue ])
+ tokens[self.expr.resultsName] = self.defaultValue
+ else:
+ tokens = [ self.defaultValue ]
+ else:
+ tokens = []
+ return loc, tokens
+
+ def __str__( self ):
+ if hasattr(self,"name"):
+ return self.name
+
+ if self.strRepr is None:
+ self.strRepr = "[" + _ustr(self.expr) + "]"
+
+ return self.strRepr
+
+class SkipTo(ParseElementEnhance):
+ """
+ Token for skipping over all undefined text until the matched expression is found.
+
+ Parameters:
+ - expr - target expression marking the end of the data to be skipped
+ - include - (default=C{False}) if True, the target expression is also parsed
+ (the skipped text and target expression are returned as a 2-element list).
+ - ignore - (default=C{None}) used to define grammars (typically quoted strings and
+ comments) that might contain false matches to the target expression
+ - failOn - (default=C{None}) define expressions that are not allowed to be
+ included in the skipped test; if found before the target expression is found,
+ the SkipTo is not a match
+
+ Example::
+ report = '''
+ Outstanding Issues Report - 1 Jan 2000
+
+ # | Severity | Description | Days Open
+ -----+----------+-------------------------------------------+-----------
+ 101 | Critical | Intermittent system crash | 6
+ 94 | Cosmetic | Spelling error on Login ('log|n') | 14
+ 79 | Minor | System slow when running too many reports | 47
+ '''
+ integer = Word(nums)
+ SEP = Suppress('|')
+ # use SkipTo to simply match everything up until the next SEP
+ # - ignore quoted strings, so that a '|' character inside a quoted string does not match
+ # - parse action will call token.strip() for each matched token, i.e., the description body
+ string_data = SkipTo(SEP, ignore=quotedString)
+ string_data.setParseAction(tokenMap(str.strip))
+ ticket_expr = (integer("issue_num") + SEP
+ + string_data("sev") + SEP
+ + string_data("desc") + SEP
+ + integer("days_open"))
+
+ for tkt in ticket_expr.searchString(report):
+ print tkt.dump()
+ prints::
+ ['101', 'Critical', 'Intermittent system crash', '6']
+ - days_open: 6
+ - desc: Intermittent system crash
+ - issue_num: 101
+ - sev: Critical
+ ['94', 'Cosmetic', "Spelling error on Login ('log|n')", '14']
+ - days_open: 14
+ - desc: Spelling error on Login ('log|n')
+ - issue_num: 94
+ - sev: Cosmetic
+ ['79', 'Minor', 'System slow when running too many reports', '47']
+ - days_open: 47
+ - desc: System slow when running too many reports
+ - issue_num: 79
+ - sev: Minor
+ """
+ def __init__( self, other, include=False, ignore=None, failOn=None ):
+ super( SkipTo, self ).__init__( other )
+ self.ignoreExpr = ignore
+ self.mayReturnEmpty = True
+ self.mayIndexError = False
+ self.includeMatch = include
+ self.asList = False
+ if isinstance(failOn, basestring):
+ self.failOn = ParserElement._literalStringClass(failOn)
+ else:
+ self.failOn = failOn
+ self.errmsg = "No match found for "+_ustr(self.expr)
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ startloc = loc
+ instrlen = len(instring)
+ expr = self.expr
+ expr_parse = self.expr._parse
+ self_failOn_canParseNext = self.failOn.canParseNext if self.failOn is not None else None
+ self_ignoreExpr_tryParse = self.ignoreExpr.tryParse if self.ignoreExpr is not None else None
+
+ tmploc = loc
+ while tmploc <= instrlen:
+ if self_failOn_canParseNext is not None:
+ # break if failOn expression matches
+ if self_failOn_canParseNext(instring, tmploc):
+ break
+
+ if self_ignoreExpr_tryParse is not None:
+ # advance past ignore expressions
+ while 1:
+ try:
+ tmploc = self_ignoreExpr_tryParse(instring, tmploc)
+ except ParseBaseException:
+ break
+
+ try:
+ expr_parse(instring, tmploc, doActions=False, callPreParse=False)
+ except (ParseException, IndexError):
+ # no match, advance loc in string
+ tmploc += 1
+ else:
+ # matched skipto expr, done
+ break
+
+ else:
+ # ran off the end of the input string without matching skipto expr, fail
+ raise ParseException(instring, loc, self.errmsg, self)
+
+ # build up return values
+ loc = tmploc
+ skiptext = instring[startloc:loc]
+ skipresult = ParseResults(skiptext)
+
+ if self.includeMatch:
+ loc, mat = expr_parse(instring,loc,doActions,callPreParse=False)
+ skipresult += mat
+
+ return loc, skipresult
+
+class Forward(ParseElementEnhance):
+ """
+ Forward declaration of an expression to be defined later -
+ used for recursive grammars, such as algebraic infix notation.
+ When the expression is known, it is assigned to the C{Forward} variable using the '<<' operator.
+
+ Note: take care when assigning to C{Forward} not to overlook precedence of operators.
+ Specifically, '|' has a lower precedence than '<<', so that::
+ fwdExpr << a | b | c
+ will actually be evaluated as::
+ (fwdExpr << a) | b | c
+ thereby leaving b and c out as parseable alternatives. It is recommended that you
+ explicitly group the values inserted into the C{Forward}::
+ fwdExpr << (a | b | c)
+ Converting to use the '<<=' operator instead will avoid this problem.
+
+ See L{ParseResults.pprint} for an example of a recursive parser created using
+ C{Forward}.
+ """
+ def __init__( self, other=None ):
+ super(Forward,self).__init__( other, savelist=False )
+
+ def __lshift__( self, other ):
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass(other)
+ self.expr = other
+ self.strRepr = None
+ self.mayIndexError = self.expr.mayIndexError
+ self.mayReturnEmpty = self.expr.mayReturnEmpty
+ self.setWhitespaceChars( self.expr.whiteChars )
+ self.skipWhitespace = self.expr.skipWhitespace
+ self.saveAsList = self.expr.saveAsList
+ self.ignoreExprs.extend(self.expr.ignoreExprs)
+ return self
+
+ def __ilshift__(self, other):
+ return self << other
+
+ def leaveWhitespace( self ):
+ self.skipWhitespace = False
+ return self
+
+ def streamline( self ):
+ if not self.streamlined:
+ self.streamlined = True
+ if self.expr is not None:
+ self.expr.streamline()
+ return self
+
+ def validate( self, validateTrace=[] ):
+ if self not in validateTrace:
+ tmp = validateTrace[:]+[self]
+ if self.expr is not None:
+ self.expr.validate(tmp)
+ self.checkRecursion([])
+
+ def __str__( self ):
+ if hasattr(self,"name"):
+ return self.name
+ return self.__class__.__name__ + ": ..."
+
+ # stubbed out for now - creates awful memory and perf issues
+ self._revertClass = self.__class__
+ self.__class__ = _ForwardNoRecurse
+ try:
+ if self.expr is not None:
+ retString = _ustr(self.expr)
+ else:
+ retString = "None"
+ finally:
+ self.__class__ = self._revertClass
+ return self.__class__.__name__ + ": " + retString
+
+ def copy(self):
+ if self.expr is not None:
+ return super(Forward,self).copy()
+ else:
+ ret = Forward()
+ ret <<= self
+ return ret
+
+class _ForwardNoRecurse(Forward):
+ def __str__( self ):
+ return "..."
+
+class TokenConverter(ParseElementEnhance):
+ """
+ Abstract subclass of C{ParseExpression}, for converting parsed results.
+ """
+ def __init__( self, expr, savelist=False ):
+ super(TokenConverter,self).__init__( expr )#, savelist )
+ self.saveAsList = False
+
+class Combine(TokenConverter):
+ """
+ Converter to concatenate all matching tokens to a single string.
+ By default, the matching patterns must also be contiguous in the input string;
+ this can be disabled by specifying C{'adjacent=False'} in the constructor.
+
+ Example::
+ real = Word(nums) + '.' + Word(nums)
+ print(real.parseString('3.1416')) # -> ['3', '.', '1416']
+ # will also erroneously match the following
+ print(real.parseString('3. 1416')) # -> ['3', '.', '1416']
+
+ real = Combine(Word(nums) + '.' + Word(nums))
+ print(real.parseString('3.1416')) # -> ['3.1416']
+ # no match when there are internal spaces
+ print(real.parseString('3. 1416')) # -> Exception: Expected W:(0123...)
+ """
+ def __init__( self, expr, joinString="", adjacent=True ):
+ super(Combine,self).__init__( expr )
+ # suppress whitespace-stripping in contained parse expressions, but re-enable it on the Combine itself
+ if adjacent:
+ self.leaveWhitespace()
+ self.adjacent = adjacent
+ self.skipWhitespace = True
+ self.joinString = joinString
+ self.callPreparse = True
+
+ def ignore( self, other ):
+ if self.adjacent:
+ ParserElement.ignore(self, other)
+ else:
+ super( Combine, self).ignore( other )
+ return self
+
+ def postParse( self, instring, loc, tokenlist ):
+ retToks = tokenlist.copy()
+ del retToks[:]
+ retToks += ParseResults([ "".join(tokenlist._asStringList(self.joinString)) ], modal=self.modalResults)
+
+ if self.resultsName and retToks.haskeys():
+ return [ retToks ]
+ else:
+ return retToks
+
+class Group(TokenConverter):
+ """
+ Converter to return the matched tokens as a list - useful for returning tokens of C{L{ZeroOrMore}} and C{L{OneOrMore}} expressions.
+
+ Example::
+ ident = Word(alphas)
+ num = Word(nums)
+ term = ident | num
+ func = ident + Optional(delimitedList(term))
+ print(func.parseString("fn a,b,100")) # -> ['fn', 'a', 'b', '100']
+
+ func = ident + Group(Optional(delimitedList(term)))
+ print(func.parseString("fn a,b,100")) # -> ['fn', ['a', 'b', '100']]
+ """
+ def __init__( self, expr ):
+ super(Group,self).__init__( expr )
+ self.saveAsList = True
+
+ def postParse( self, instring, loc, tokenlist ):
+ return [ tokenlist ]
+
+class Dict(TokenConverter):
+ """
+ Converter to return a repetitive expression as a list, but also as a dictionary.
+ Each element can also be referenced using the first token in the expression as its key.
+ Useful for tabular report scraping when the first column can be used as a item key.
+
+ Example::
+ data_word = Word(alphas)
+ label = data_word + FollowedBy(':')
+ attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join))
+
+ text = "shape: SQUARE posn: upper left color: light blue texture: burlap"
+ attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
+
+ # print attributes as plain groups
+ print(OneOrMore(attr_expr).parseString(text).dump())
+
+ # instead of OneOrMore(expr), parse using Dict(OneOrMore(Group(expr))) - Dict will auto-assign names
+ result = Dict(OneOrMore(Group(attr_expr))).parseString(text)
+ print(result.dump())
+
+ # access named fields as dict entries, or output as dict
+ print(result['shape'])
+ print(result.asDict())
+ prints::
+ ['shape', 'SQUARE', 'posn', 'upper left', 'color', 'light blue', 'texture', 'burlap']
+
+ [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']]
+ - color: light blue
+ - posn: upper left
+ - shape: SQUARE
+ - texture: burlap
+ SQUARE
+ {'color': 'light blue', 'posn': 'upper left', 'texture': 'burlap', 'shape': 'SQUARE'}
+ See more examples at L{ParseResults} of accessing fields by results name.
+ """
+ def __init__( self, expr ):
+ super(Dict,self).__init__( expr )
+ self.saveAsList = True
+
+ def postParse( self, instring, loc, tokenlist ):
+ for i,tok in enumerate(tokenlist):
+ if len(tok) == 0:
+ continue
+ ikey = tok[0]
+ if isinstance(ikey,int):
+ ikey = _ustr(tok[0]).strip()
+ if len(tok)==1:
+ tokenlist[ikey] = _ParseResultsWithOffset("",i)
+ elif len(tok)==2 and not isinstance(tok[1],ParseResults):
+ tokenlist[ikey] = _ParseResultsWithOffset(tok[1],i)
+ else:
+ dictvalue = tok.copy() #ParseResults(i)
+ del dictvalue[0]
+ if len(dictvalue)!= 1 or (isinstance(dictvalue,ParseResults) and dictvalue.haskeys()):
+ tokenlist[ikey] = _ParseResultsWithOffset(dictvalue,i)
+ else:
+ tokenlist[ikey] = _ParseResultsWithOffset(dictvalue[0],i)
+
+ if self.resultsName:
+ return [ tokenlist ]
+ else:
+ return tokenlist
+
+
+class Suppress(TokenConverter):
+ """
+ Converter for ignoring the results of a parsed expression.
+
+ Example::
+ source = "a, b, c,d"
+ wd = Word(alphas)
+ wd_list1 = wd + ZeroOrMore(',' + wd)
+ print(wd_list1.parseString(source))
+
+ # often, delimiters that are useful during parsing are just in the
+ # way afterward - use Suppress to keep them out of the parsed output
+ wd_list2 = wd + ZeroOrMore(Suppress(',') + wd)
+ print(wd_list2.parseString(source))
+ prints::
+ ['a', ',', 'b', ',', 'c', ',', 'd']
+ ['a', 'b', 'c', 'd']
+ (See also L{delimitedList}.)
+ """
+ def postParse( self, instring, loc, tokenlist ):
+ return []
+
+ def suppress( self ):
+ return self
+
+
+class OnlyOnce(object):
+ """
+ Wrapper for parse actions, to ensure they are only called once.
+ """
+ def __init__(self, methodCall):
+ self.callable = _trim_arity(methodCall)
+ self.called = False
+ def __call__(self,s,l,t):
+ if not self.called:
+ results = self.callable(s,l,t)
+ self.called = True
+ return results
+ raise ParseException(s,l,"")
+ def reset(self):
+ self.called = False
+
+def traceParseAction(f):
+ """
+ Decorator for debugging parse actions.
+
+ When the parse action is called, this decorator will print C{">> entering I{method-name}(line:I{current_source_line}, I{parse_location}, I{matched_tokens})".}
+ When the parse action completes, the decorator will print C{"<<"} followed by the returned value, or any exception that the parse action raised.
+
+ Example::
+ wd = Word(alphas)
+
+ @traceParseAction
+ def remove_duplicate_chars(tokens):
+ return ''.join(sorted(set(''.join(tokens)))
+
+ wds = OneOrMore(wd).setParseAction(remove_duplicate_chars)
+ print(wds.parseString("slkdjs sld sldd sdlf sdljf"))
+ prints::
+ >>entering remove_duplicate_chars(line: 'slkdjs sld sldd sdlf sdljf', 0, (['slkdjs', 'sld', 'sldd', 'sdlf', 'sdljf'], {}))
+ <<leaving remove_duplicate_chars (ret: 'dfjkls')
+ ['dfjkls']
+ """
+ f = _trim_arity(f)
+ def z(*paArgs):
+ thisFunc = f.__name__
+ s,l,t = paArgs[-3:]
+ if len(paArgs)>3:
+ thisFunc = paArgs[0].__class__.__name__ + '.' + thisFunc
+ sys.stderr.write( ">>entering %s(line: '%s', %d, %r)\n" % (thisFunc,line(l,s),l,t) )
+ try:
+ ret = f(*paArgs)
+ except Exception as exc:
+ sys.stderr.write( "<<leaving %s (exception: %s)\n" % (thisFunc,exc) )
+ raise
+ sys.stderr.write( "<<leaving %s (ret: %r)\n" % (thisFunc,ret) )
+ return ret
+ try:
+ z.__name__ = f.__name__
+ except AttributeError:
+ pass
+ return z
+
+#
+# global helpers
+#
+def delimitedList( expr, delim=",", combine=False ):
+ """
+ Helper to define a delimited list of expressions - the delimiter defaults to ','.
+ By default, the list elements and delimiters can have intervening whitespace, and
+ comments, but this can be overridden by passing C{combine=True} in the constructor.
+ If C{combine} is set to C{True}, the matching tokens are returned as a single token
+ string, with the delimiters included; otherwise, the matching tokens are returned
+ as a list of tokens, with the delimiters suppressed.
+
+ Example::
+ delimitedList(Word(alphas)).parseString("aa,bb,cc") # -> ['aa', 'bb', 'cc']
+ delimitedList(Word(hexnums), delim=':', combine=True).parseString("AA:BB:CC:DD:EE") # -> ['AA:BB:CC:DD:EE']
+ """
+ dlName = _ustr(expr)+" ["+_ustr(delim)+" "+_ustr(expr)+"]..."
+ if combine:
+ return Combine( expr + ZeroOrMore( delim + expr ) ).setName(dlName)
+ else:
+ return ( expr + ZeroOrMore( Suppress( delim ) + expr ) ).setName(dlName)
+
+def countedArray( expr, intExpr=None ):
+ """
+ Helper to define a counted list of expressions.
+ This helper defines a pattern of the form::
+ integer expr expr expr...
+ where the leading integer tells how many expr expressions follow.
+ The matched tokens returns the array of expr tokens as a list - the leading count token is suppressed.
+
+ If C{intExpr} is specified, it should be a pyparsing expression that produces an integer value.
+
+ Example::
+ countedArray(Word(alphas)).parseString('2 ab cd ef') # -> ['ab', 'cd']
+
+ # in this parser, the leading integer value is given in binary,
+ # '10' indicating that 2 values are in the array
+ binaryConstant = Word('01').setParseAction(lambda t: int(t[0], 2))
+ countedArray(Word(alphas), intExpr=binaryConstant).parseString('10 ab cd ef') # -> ['ab', 'cd']
+ """
+ arrayExpr = Forward()
+ def countFieldParseAction(s,l,t):
+ n = t[0]
+ arrayExpr << (n and Group(And([expr]*n)) or Group(empty))
+ return []
+ if intExpr is None:
+ intExpr = Word(nums).setParseAction(lambda t:int(t[0]))
+ else:
+ intExpr = intExpr.copy()
+ intExpr.setName("arrayLen")
+ intExpr.addParseAction(countFieldParseAction, callDuringTry=True)
+ return ( intExpr + arrayExpr ).setName('(len) ' + _ustr(expr) + '...')
+
+def _flatten(L):
+ ret = []
+ for i in L:
+ if isinstance(i,list):
+ ret.extend(_flatten(i))
+ else:
+ ret.append(i)
+ return ret
+
+def matchPreviousLiteral(expr):
+ """
+ Helper to define an expression that is indirectly defined from
+ the tokens matched in a previous expression, that is, it looks
+ for a 'repeat' of a previous expression. For example::
+ first = Word(nums)
+ second = matchPreviousLiteral(first)
+ matchExpr = first + ":" + second
+ will match C{"1:1"}, but not C{"1:2"}. Because this matches a
+ previous literal, will also match the leading C{"1:1"} in C{"1:10"}.
+ If this is not desired, use C{matchPreviousExpr}.
+ Do I{not} use with packrat parsing enabled.
+ """
+ rep = Forward()
+ def copyTokenToRepeater(s,l,t):
+ if t:
+ if len(t) == 1:
+ rep << t[0]
+ else:
+ # flatten t tokens
+ tflat = _flatten(t.asList())
+ rep << And(Literal(tt) for tt in tflat)
+ else:
+ rep << Empty()
+ expr.addParseAction(copyTokenToRepeater, callDuringTry=True)
+ rep.setName('(prev) ' + _ustr(expr))
+ return rep
+
+def matchPreviousExpr(expr):
+ """
+ Helper to define an expression that is indirectly defined from
+ the tokens matched in a previous expression, that is, it looks
+ for a 'repeat' of a previous expression. For example::
+ first = Word(nums)
+ second = matchPreviousExpr(first)
+ matchExpr = first + ":" + second
+ will match C{"1:1"}, but not C{"1:2"}. Because this matches by
+ expressions, will I{not} match the leading C{"1:1"} in C{"1:10"};
+ the expressions are evaluated first, and then compared, so
+ C{"1"} is compared with C{"10"}.
+ Do I{not} use with packrat parsing enabled.
+ """
+ rep = Forward()
+ e2 = expr.copy()
+ rep <<= e2
+ def copyTokenToRepeater(s,l,t):
+ matchTokens = _flatten(t.asList())
+ def mustMatchTheseTokens(s,l,t):
+ theseTokens = _flatten(t.asList())
+ if theseTokens != matchTokens:
+ raise ParseException("",0,"")
+ rep.setParseAction( mustMatchTheseTokens, callDuringTry=True )
+ expr.addParseAction(copyTokenToRepeater, callDuringTry=True)
+ rep.setName('(prev) ' + _ustr(expr))
+ return rep
+
+def _escapeRegexRangeChars(s):
+ #~ escape these chars: ^-]
+ for c in r"\^-]":
+ s = s.replace(c,_bslash+c)
+ s = s.replace("\n",r"\n")
+ s = s.replace("\t",r"\t")
+ return _ustr(s)
+
+def oneOf( strs, caseless=False, useRegex=True ):
+ """
+ Helper to quickly define a set of alternative Literals, and makes sure to do
+ longest-first testing when there is a conflict, regardless of the input order,
+ but returns a C{L{MatchFirst}} for best performance.
+
+ Parameters:
+ - strs - a string of space-delimited literals, or a collection of string literals
+ - caseless - (default=C{False}) - treat all literals as caseless
+ - useRegex - (default=C{True}) - as an optimization, will generate a Regex
+ object; otherwise, will generate a C{MatchFirst} object (if C{caseless=True}, or
+ if creating a C{Regex} raises an exception)
+
+ Example::
+ comp_oper = oneOf("< = > <= >= !=")
+ var = Word(alphas)
+ number = Word(nums)
+ term = var | number
+ comparison_expr = term + comp_oper + term
+ print(comparison_expr.searchString("B = 12 AA=23 B<=AA AA>12"))
+ prints::
+ [['B', '=', '12'], ['AA', '=', '23'], ['B', '<=', 'AA'], ['AA', '>', '12']]
+ """
+ if caseless:
+ isequal = ( lambda a,b: a.upper() == b.upper() )
+ masks = ( lambda a,b: b.upper().startswith(a.upper()) )
+ parseElementClass = CaselessLiteral
+ else:
+ isequal = ( lambda a,b: a == b )
+ masks = ( lambda a,b: b.startswith(a) )
+ parseElementClass = Literal
+
+ symbols = []
+ if isinstance(strs,basestring):
+ symbols = strs.split()
+ elif isinstance(strs, collections.Iterable):
+ symbols = list(strs)
+ else:
+ warnings.warn("Invalid argument to oneOf, expected string or iterable",
+ SyntaxWarning, stacklevel=2)
+ if not symbols:
+ return NoMatch()
+
+ i = 0
+ while i < len(symbols)-1:
+ cur = symbols[i]
+ for j,other in enumerate(symbols[i+1:]):
+ if ( isequal(other, cur) ):
+ del symbols[i+j+1]
+ break
+ elif ( masks(cur, other) ):
+ del symbols[i+j+1]
+ symbols.insert(i,other)
+ cur = other
+ break
+ else:
+ i += 1
+
+ if not caseless and useRegex:
+ #~ print (strs,"->", "|".join( [ _escapeRegexChars(sym) for sym in symbols] ))
+ try:
+ if len(symbols)==len("".join(symbols)):
+ return Regex( "[%s]" % "".join(_escapeRegexRangeChars(sym) for sym in symbols) ).setName(' | '.join(symbols))
+ else:
+ return Regex( "|".join(re.escape(sym) for sym in symbols) ).setName(' | '.join(symbols))
+ except Exception:
+ warnings.warn("Exception creating Regex for oneOf, building MatchFirst",
+ SyntaxWarning, stacklevel=2)
+
+
+ # last resort, just use MatchFirst
+ return MatchFirst(parseElementClass(sym) for sym in symbols).setName(' | '.join(symbols))
+
+def dictOf( key, value ):
+ """
+ Helper to easily and clearly define a dictionary by specifying the respective patterns
+ for the key and value. Takes care of defining the C{L{Dict}}, C{L{ZeroOrMore}}, and C{L{Group}} tokens
+ in the proper order. The key pattern can include delimiting markers or punctuation,
+ as long as they are suppressed, thereby leaving the significant key text. The value
+ pattern can include named results, so that the C{Dict} results can include named token
+ fields.
+
+ Example::
+ text = "shape: SQUARE posn: upper left color: light blue texture: burlap"
+ attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
+ print(OneOrMore(attr_expr).parseString(text).dump())
+
+ attr_label = label
+ attr_value = Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)
+
+ # similar to Dict, but simpler call format
+ result = dictOf(attr_label, attr_value).parseString(text)
+ print(result.dump())
+ print(result['shape'])
+ print(result.shape) # object attribute access works too
+ print(result.asDict())
+ prints::
+ [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']]
+ - color: light blue
+ - posn: upper left
+ - shape: SQUARE
+ - texture: burlap
+ SQUARE
+ SQUARE
+ {'color': 'light blue', 'shape': 'SQUARE', 'posn': 'upper left', 'texture': 'burlap'}
+ """
+ return Dict( ZeroOrMore( Group ( key + value ) ) )
+
+def originalTextFor(expr, asString=True):
+ """
+ Helper to return the original, untokenized text for a given expression. Useful to
+ restore the parsed fields of an HTML start tag into the raw tag text itself, or to
+ revert separate tokens with intervening whitespace back to the original matching
+ input text. By default, returns astring containing the original parsed text.
+
+ If the optional C{asString} argument is passed as C{False}, then the return value is a
+ C{L{ParseResults}} containing any results names that were originally matched, and a
+ single token containing the original matched text from the input string. So if
+ the expression passed to C{L{originalTextFor}} contains expressions with defined
+ results names, you must set C{asString} to C{False} if you want to preserve those
+ results name values.
+
+ Example::
+ src = "this is test <b> bold <i>text</i> </b> normal text "
+ for tag in ("b","i"):
+ opener,closer = makeHTMLTags(tag)
+ patt = originalTextFor(opener + SkipTo(closer) + closer)
+ print(patt.searchString(src)[0])
+ prints::
+ ['<b> bold <i>text</i> </b>']
+ ['<i>text</i>']
+ """
+ locMarker = Empty().setParseAction(lambda s,loc,t: loc)
+ endlocMarker = locMarker.copy()
+ endlocMarker.callPreparse = False
+ matchExpr = locMarker("_original_start") + expr + endlocMarker("_original_end")
+ if asString:
+ extractText = lambda s,l,t: s[t._original_start:t._original_end]
+ else:
+ def extractText(s,l,t):
+ t[:] = [s[t.pop('_original_start'):t.pop('_original_end')]]
+ matchExpr.setParseAction(extractText)
+ matchExpr.ignoreExprs = expr.ignoreExprs
+ return matchExpr
+
+def ungroup(expr):
+ """
+ Helper to undo pyparsing's default grouping of And expressions, even
+ if all but one are non-empty.
+ """
+ return TokenConverter(expr).setParseAction(lambda t:t[0])
+
+def locatedExpr(expr):
+ """
+ Helper to decorate a returned token with its starting and ending locations in the input string.
+ This helper adds the following results names:
+ - locn_start = location where matched expression begins
+ - locn_end = location where matched expression ends
+ - value = the actual parsed results
+
+ Be careful if the input text contains C{<TAB>} characters, you may want to call
+ C{L{ParserElement.parseWithTabs}}
+
+ Example::
+ wd = Word(alphas)
+ for match in locatedExpr(wd).searchString("ljsdf123lksdjjf123lkkjj1222"):
+ print(match)
+ prints::
+ [[0, 'ljsdf', 5]]
+ [[8, 'lksdjjf', 15]]
+ [[18, 'lkkjj', 23]]
+ """
+ locator = Empty().setParseAction(lambda s,l,t: l)
+ return Group(locator("locn_start") + expr("value") + locator.copy().leaveWhitespace()("locn_end"))
+
+
+# convenience constants for positional expressions
+empty = Empty().setName("empty")
+lineStart = LineStart().setName("lineStart")
+lineEnd = LineEnd().setName("lineEnd")
+stringStart = StringStart().setName("stringStart")
+stringEnd = StringEnd().setName("stringEnd")
+
+_escapedPunc = Word( _bslash, r"\[]-*.$+^?()~ ", exact=2 ).setParseAction(lambda s,l,t:t[0][1])
+_escapedHexChar = Regex(r"\\0?[xX][0-9a-fA-F]+").setParseAction(lambda s,l,t:unichr(int(t[0].lstrip(r'\0x'),16)))
+_escapedOctChar = Regex(r"\\0[0-7]+").setParseAction(lambda s,l,t:unichr(int(t[0][1:],8)))
+_singleChar = _escapedPunc | _escapedHexChar | _escapedOctChar | Word(printables, excludeChars=r'\]', exact=1) | Regex(r"\w", re.UNICODE)
+_charRange = Group(_singleChar + Suppress("-") + _singleChar)
+_reBracketExpr = Literal("[") + Optional("^").setResultsName("negate") + Group( OneOrMore( _charRange | _singleChar ) ).setResultsName("body") + "]"
+
+def srange(s):
+ r"""
+ Helper to easily define string ranges for use in Word construction. Borrows
+ syntax from regexp '[]' string range definitions::
+ srange("[0-9]") -> "0123456789"
+ srange("[a-z]") -> "abcdefghijklmnopqrstuvwxyz"
+ srange("[a-z$_]") -> "abcdefghijklmnopqrstuvwxyz$_"
+ The input string must be enclosed in []'s, and the returned string is the expanded
+ character set joined into a single string.
+ The values enclosed in the []'s may be:
+ - a single character
+ - an escaped character with a leading backslash (such as C{\-} or C{\]})
+ - an escaped hex character with a leading C{'\x'} (C{\x21}, which is a C{'!'} character)
+ (C{\0x##} is also supported for backwards compatibility)
+ - an escaped octal character with a leading C{'\0'} (C{\041}, which is a C{'!'} character)
+ - a range of any of the above, separated by a dash (C{'a-z'}, etc.)
+ - any combination of the above (C{'aeiouy'}, C{'a-zA-Z0-9_$'}, etc.)
+ """
+ _expanded = lambda p: p if not isinstance(p,ParseResults) else ''.join(unichr(c) for c in range(ord(p[0]),ord(p[1])+1))
+ try:
+ return "".join(_expanded(part) for part in _reBracketExpr.parseString(s).body)
+ except Exception:
+ return ""
+
+def matchOnlyAtCol(n):
+ """
+ Helper method for defining parse actions that require matching at a specific
+ column in the input text.
+ """
+ def verifyCol(strg,locn,toks):
+ if col(locn,strg) != n:
+ raise ParseException(strg,locn,"matched token not at column %d" % n)
+ return verifyCol
+
+def replaceWith(replStr):
+ """
+ Helper method for common parse actions that simply return a literal value. Especially
+ useful when used with C{L{transformString<ParserElement.transformString>}()}.
+
+ Example::
+ num = Word(nums).setParseAction(lambda toks: int(toks[0]))
+ na = oneOf("N/A NA").setParseAction(replaceWith(math.nan))
+ term = na | num
+
+ OneOrMore(term).parseString("324 234 N/A 234") # -> [324, 234, nan, 234]
+ """
+ return lambda s,l,t: [replStr]
+
+def removeQuotes(s,l,t):
+ """
+ Helper parse action for removing quotation marks from parsed quoted strings.
+
+ Example::
+ # by default, quotation marks are included in parsed results
+ quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["'Now is the Winter of our Discontent'"]
+
+ # use removeQuotes to strip quotation marks from parsed results
+ quotedString.setParseAction(removeQuotes)
+ quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["Now is the Winter of our Discontent"]
+ """
+ return t[0][1:-1]
+
+def tokenMap(func, *args):
+ """
+ Helper to define a parse action by mapping a function to all elements of a ParseResults list.If any additional
+ args are passed, they are forwarded to the given function as additional arguments after
+ the token, as in C{hex_integer = Word(hexnums).setParseAction(tokenMap(int, 16))}, which will convert the
+ parsed data to an integer using base 16.
+
+ Example (compare the last to example in L{ParserElement.transformString}::
+ hex_ints = OneOrMore(Word(hexnums)).setParseAction(tokenMap(int, 16))
+ hex_ints.runTests('''
+ 00 11 22 aa FF 0a 0d 1a
+ ''')
+
+ upperword = Word(alphas).setParseAction(tokenMap(str.upper))
+ OneOrMore(upperword).runTests('''
+ my kingdom for a horse
+ ''')
+
+ wd = Word(alphas).setParseAction(tokenMap(str.title))
+ OneOrMore(wd).setParseAction(' '.join).runTests('''
+ now is the winter of our discontent made glorious summer by this sun of york
+ ''')
+ prints::
+ 00 11 22 aa FF 0a 0d 1a
+ [0, 17, 34, 170, 255, 10, 13, 26]
+
+ my kingdom for a horse
+ ['MY', 'KINGDOM', 'FOR', 'A', 'HORSE']
+
+ now is the winter of our discontent made glorious summer by this sun of york
+ ['Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York']
+ """
+ def pa(s,l,t):
+ return [func(tokn, *args) for tokn in t]
+
+ try:
+ func_name = getattr(func, '__name__',
+ getattr(func, '__class__').__name__)
+ except Exception:
+ func_name = str(func)
+ pa.__name__ = func_name
+
+ return pa
+
+upcaseTokens = tokenMap(lambda t: _ustr(t).upper())
+"""(Deprecated) Helper parse action to convert tokens to upper case. Deprecated in favor of L{pyparsing_common.upcaseTokens}"""
+
+downcaseTokens = tokenMap(lambda t: _ustr(t).lower())
+"""(Deprecated) Helper parse action to convert tokens to lower case. Deprecated in favor of L{pyparsing_common.downcaseTokens}"""
+
+def _makeTags(tagStr, xml):
+ """Internal helper to construct opening and closing tag expressions, given a tag name"""
+ if isinstance(tagStr,basestring):
+ resname = tagStr
+ tagStr = Keyword(tagStr, caseless=not xml)
+ else:
+ resname = tagStr.name
+
+ tagAttrName = Word(alphas,alphanums+"_-:")
+ if (xml):
+ tagAttrValue = dblQuotedString.copy().setParseAction( removeQuotes )
+ openTag = Suppress("<") + tagStr("tag") + \
+ Dict(ZeroOrMore(Group( tagAttrName + Suppress("=") + tagAttrValue ))) + \
+ Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">")
+ else:
+ printablesLessRAbrack = "".join(c for c in printables if c not in ">")
+ tagAttrValue = quotedString.copy().setParseAction( removeQuotes ) | Word(printablesLessRAbrack)
+ openTag = Suppress("<") + tagStr("tag") + \
+ Dict(ZeroOrMore(Group( tagAttrName.setParseAction(downcaseTokens) + \
+ Optional( Suppress("=") + tagAttrValue ) ))) + \
+ Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">")
+ closeTag = Combine(_L("</") + tagStr + ">")
+
+ openTag = openTag.setResultsName("start"+"".join(resname.replace(":"," ").title().split())).setName("<%s>" % resname)
+ closeTag = closeTag.setResultsName("end"+"".join(resname.replace(":"," ").title().split())).setName("</%s>" % resname)
+ openTag.tag = resname
+ closeTag.tag = resname
+ return openTag, closeTag
+
+def makeHTMLTags(tagStr):
+ """
+ Helper to construct opening and closing tag expressions for HTML, given a tag name. Matches
+ tags in either upper or lower case, attributes with namespaces and with quoted or unquoted values.
+
+ Example::
+ text = '<td>More info at the <a href="http://pyparsing.wikispaces.com">pyparsing</a> wiki page</td>'
+ # makeHTMLTags returns pyparsing expressions for the opening and closing tags as a 2-tuple
+ a,a_end = makeHTMLTags("A")
+ link_expr = a + SkipTo(a_end)("link_text") + a_end
+
+ for link in link_expr.searchString(text):
+ # attributes in the <A> tag (like "href" shown here) are also accessible as named results
+ print(link.link_text, '->', link.href)
+ prints::
+ pyparsing -> http://pyparsing.wikispaces.com
+ """
+ return _makeTags( tagStr, False )
+
+def makeXMLTags(tagStr):
+ """
+ Helper to construct opening and closing tag expressions for XML, given a tag name. Matches
+ tags only in the given upper/lower case.
+
+ Example: similar to L{makeHTMLTags}
+ """
+ return _makeTags( tagStr, True )
+
+def withAttribute(*args,**attrDict):
+ """
+ Helper to create a validating parse action to be used with start tags created
+ with C{L{makeXMLTags}} or C{L{makeHTMLTags}}. Use C{withAttribute} to qualify a starting tag
+ with a required attribute value, to avoid false matches on common tags such as
+ C{<TD>} or C{<DIV>}.
+
+ Call C{withAttribute} with a series of attribute names and values. Specify the list
+ of filter attributes names and values as:
+ - keyword arguments, as in C{(align="right")}, or
+ - as an explicit dict with C{**} operator, when an attribute name is also a Python
+ reserved word, as in C{**{"class":"Customer", "align":"right"}}
+ - a list of name-value tuples, as in ( ("ns1:class", "Customer"), ("ns2:align","right") )
+ For attribute names with a namespace prefix, you must use the second form. Attribute
+ names are matched insensitive to upper/lower case.
+
+ If just testing for C{class} (with or without a namespace), use C{L{withClass}}.
+
+ To verify that the attribute exists, but without specifying a value, pass
+ C{withAttribute.ANY_VALUE} as the value.
+
+ Example::
+ html = '''
+ <div>
+ Some text
+ <div type="grid">1 4 0 1 0</div>
+ <div type="graph">1,3 2,3 1,1</div>
+ <div>this has no type</div>
+ </div>
+
+ '''
+ div,div_end = makeHTMLTags("div")
+
+ # only match div tag having a type attribute with value "grid"
+ div_grid = div().setParseAction(withAttribute(type="grid"))
+ grid_expr = div_grid + SkipTo(div | div_end)("body")
+ for grid_header in grid_expr.searchString(html):
+ print(grid_header.body)
+
+ # construct a match with any div tag having a type attribute, regardless of the value
+ div_any_type = div().setParseAction(withAttribute(type=withAttribute.ANY_VALUE))
+ div_expr = div_any_type + SkipTo(div | div_end)("body")
+ for div_header in div_expr.searchString(html):
+ print(div_header.body)
+ prints::
+ 1 4 0 1 0
+
+ 1 4 0 1 0
+ 1,3 2,3 1,1
+ """
+ if args:
+ attrs = args[:]
+ else:
+ attrs = attrDict.items()
+ attrs = [(k,v) for k,v in attrs]
+ def pa(s,l,tokens):
+ for attrName,attrValue in attrs:
+ if attrName not in tokens:
+ raise ParseException(s,l,"no matching attribute " + attrName)
+ if attrValue != withAttribute.ANY_VALUE and tokens[attrName] != attrValue:
+ raise ParseException(s,l,"attribute '%s' has value '%s', must be '%s'" %
+ (attrName, tokens[attrName], attrValue))
+ return pa
+withAttribute.ANY_VALUE = object()
+
+def withClass(classname, namespace=''):
+ """
+ Simplified version of C{L{withAttribute}} when matching on a div class - made
+ difficult because C{class} is a reserved word in Python.
+
+ Example::
+ html = '''
+ <div>
+ Some text
+ <div class="grid">1 4 0 1 0</div>
+ <div class="graph">1,3 2,3 1,1</div>
+ <div>this &lt;div&gt; has no class</div>
+ </div>
+
+ '''
+ div,div_end = makeHTMLTags("div")
+ div_grid = div().setParseAction(withClass("grid"))
+
+ grid_expr = div_grid + SkipTo(div | div_end)("body")
+ for grid_header in grid_expr.searchString(html):
+ print(grid_header.body)
+
+ div_any_type = div().setParseAction(withClass(withAttribute.ANY_VALUE))
+ div_expr = div_any_type + SkipTo(div | div_end)("body")
+ for div_header in div_expr.searchString(html):
+ print(div_header.body)
+ prints::
+ 1 4 0 1 0
+
+ 1 4 0 1 0
+ 1,3 2,3 1,1
+ """
+ classattr = "%s:class" % namespace if namespace else "class"
+ return withAttribute(**{classattr : classname})
+
+opAssoc = _Constants()
+opAssoc.LEFT = object()
+opAssoc.RIGHT = object()
+
+def infixNotation( baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')') ):
+ """
+ Helper method for constructing grammars of expressions made up of
+ operators working in a precedence hierarchy. Operators may be unary or
+ binary, left- or right-associative. Parse actions can also be attached
+ to operator expressions. The generated parser will also recognize the use
+ of parentheses to override operator precedences (see example below).
+
+ Note: if you define a deep operator list, you may see performance issues
+ when using infixNotation. See L{ParserElement.enablePackrat} for a
+ mechanism to potentially improve your parser performance.
+
+ Parameters:
+ - baseExpr - expression representing the most basic element for the nested
+ - opList - list of tuples, one for each operator precedence level in the
+ expression grammar; each tuple is of the form
+ (opExpr, numTerms, rightLeftAssoc, parseAction), where:
+ - opExpr is the pyparsing expression for the operator;
+ may also be a string, which will be converted to a Literal;
+ if numTerms is 3, opExpr is a tuple of two expressions, for the
+ two operators separating the 3 terms
+ - numTerms is the number of terms for this operator (must
+ be 1, 2, or 3)
+ - rightLeftAssoc is the indicator whether the operator is
+ right or left associative, using the pyparsing-defined
+ constants C{opAssoc.RIGHT} and C{opAssoc.LEFT}.
+ - parseAction is the parse action to be associated with
+ expressions matching this operator expression (the
+ parse action tuple member may be omitted)
+ - lpar - expression for matching left-parentheses (default=C{Suppress('(')})
+ - rpar - expression for matching right-parentheses (default=C{Suppress(')')})
+
+ Example::
+ # simple example of four-function arithmetic with ints and variable names
+ integer = pyparsing_common.signed_integer
+ varname = pyparsing_common.identifier
+
+ arith_expr = infixNotation(integer | varname,
+ [
+ ('-', 1, opAssoc.RIGHT),
+ (oneOf('* /'), 2, opAssoc.LEFT),
+ (oneOf('+ -'), 2, opAssoc.LEFT),
+ ])
+
+ arith_expr.runTests('''
+ 5+3*6
+ (5+3)*6
+ -2--11
+ ''', fullDump=False)
+ prints::
+ 5+3*6
+ [[5, '+', [3, '*', 6]]]
+
+ (5+3)*6
+ [[[5, '+', 3], '*', 6]]
+
+ -2--11
+ [[['-', 2], '-', ['-', 11]]]
+ """
+ ret = Forward()
+ lastExpr = baseExpr | ( lpar + ret + rpar )
+ for i,operDef in enumerate(opList):
+ opExpr,arity,rightLeftAssoc,pa = (operDef + (None,))[:4]
+ termName = "%s term" % opExpr if arity < 3 else "%s%s term" % opExpr
+ if arity == 3:
+ if opExpr is None or len(opExpr) != 2:
+ raise ValueError("if numterms=3, opExpr must be a tuple or list of two expressions")
+ opExpr1, opExpr2 = opExpr
+ thisExpr = Forward().setName(termName)
+ if rightLeftAssoc == opAssoc.LEFT:
+ if arity == 1:
+ matchExpr = FollowedBy(lastExpr + opExpr) + Group( lastExpr + OneOrMore( opExpr ) )
+ elif arity == 2:
+ if opExpr is not None:
+ matchExpr = FollowedBy(lastExpr + opExpr + lastExpr) + Group( lastExpr + OneOrMore( opExpr + lastExpr ) )
+ else:
+ matchExpr = FollowedBy(lastExpr+lastExpr) + Group( lastExpr + OneOrMore(lastExpr) )
+ elif arity == 3:
+ matchExpr = FollowedBy(lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr) + \
+ Group( lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr )
+ else:
+ raise ValueError("operator must be unary (1), binary (2), or ternary (3)")
+ elif rightLeftAssoc == opAssoc.RIGHT:
+ if arity == 1:
+ # try to avoid LR with this extra test
+ if not isinstance(opExpr, Optional):
+ opExpr = Optional(opExpr)
+ matchExpr = FollowedBy(opExpr.expr + thisExpr) + Group( opExpr + thisExpr )
+ elif arity == 2:
+ if opExpr is not None:
+ matchExpr = FollowedBy(lastExpr + opExpr + thisExpr) + Group( lastExpr + OneOrMore( opExpr + thisExpr ) )
+ else:
+ matchExpr = FollowedBy(lastExpr + thisExpr) + Group( lastExpr + OneOrMore( thisExpr ) )
+ elif arity == 3:
+ matchExpr = FollowedBy(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr) + \
+ Group( lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr )
+ else:
+ raise ValueError("operator must be unary (1), binary (2), or ternary (3)")
+ else:
+ raise ValueError("operator must indicate right or left associativity")
+ if pa:
+ matchExpr.setParseAction( pa )
+ thisExpr <<= ( matchExpr.setName(termName) | lastExpr )
+ lastExpr = thisExpr
+ ret <<= lastExpr
+ return ret
+
+operatorPrecedence = infixNotation
+"""(Deprecated) Former name of C{L{infixNotation}}, will be dropped in a future release."""
+
+dblQuotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"').setName("string enclosed in double quotes")
+sglQuotedString = Combine(Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("string enclosed in single quotes")
+quotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"'|
+ Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("quotedString using single or double quotes")
+unicodeString = Combine(_L('u') + quotedString.copy()).setName("unicode string literal")
+
+def nestedExpr(opener="(", closer=")", content=None, ignoreExpr=quotedString.copy()):
+ """
+ Helper method for defining nested lists enclosed in opening and closing
+ delimiters ("(" and ")" are the default).
+
+ Parameters:
+ - opener - opening character for a nested list (default=C{"("}); can also be a pyparsing expression
+ - closer - closing character for a nested list (default=C{")"}); can also be a pyparsing expression
+ - content - expression for items within the nested lists (default=C{None})
+ - ignoreExpr - expression for ignoring opening and closing delimiters (default=C{quotedString})
+
+ If an expression is not provided for the content argument, the nested
+ expression will capture all whitespace-delimited content between delimiters
+ as a list of separate values.
+
+ Use the C{ignoreExpr} argument to define expressions that may contain
+ opening or closing characters that should not be treated as opening
+ or closing characters for nesting, such as quotedString or a comment
+ expression. Specify multiple expressions using an C{L{Or}} or C{L{MatchFirst}}.
+ The default is L{quotedString}, but if no expressions are to be ignored,
+ then pass C{None} for this argument.
+
+ Example::
+ data_type = oneOf("void int short long char float double")
+ decl_data_type = Combine(data_type + Optional(Word('*')))
+ ident = Word(alphas+'_', alphanums+'_')
+ number = pyparsing_common.number
+ arg = Group(decl_data_type + ident)
+ LPAR,RPAR = map(Suppress, "()")
+
+ code_body = nestedExpr('{', '}', ignoreExpr=(quotedString | cStyleComment))
+
+ c_function = (decl_data_type("type")
+ + ident("name")
+ + LPAR + Optional(delimitedList(arg), [])("args") + RPAR
+ + code_body("body"))
+ c_function.ignore(cStyleComment)
+
+ source_code = '''
+ int is_odd(int x) {
+ return (x%2);
+ }
+
+ int dec_to_hex(char hchar) {
+ if (hchar >= '0' && hchar <= '9') {
+ return (ord(hchar)-ord('0'));
+ } else {
+ return (10+ord(hchar)-ord('A'));
+ }
+ }
+ '''
+ for func in c_function.searchString(source_code):
+ print("%(name)s (%(type)s) args: %(args)s" % func)
+
+ prints::
+ is_odd (int) args: [['int', 'x']]
+ dec_to_hex (int) args: [['char', 'hchar']]
+ """
+ if opener == closer:
+ raise ValueError("opening and closing strings cannot be the same")
+ if content is None:
+ if isinstance(opener,basestring) and isinstance(closer,basestring):
+ if len(opener) == 1 and len(closer)==1:
+ if ignoreExpr is not None:
+ content = (Combine(OneOrMore(~ignoreExpr +
+ CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS,exact=1))
+ ).setParseAction(lambda t:t[0].strip()))
+ else:
+ content = (empty.copy()+CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS
+ ).setParseAction(lambda t:t[0].strip()))
+ else:
+ if ignoreExpr is not None:
+ content = (Combine(OneOrMore(~ignoreExpr +
+ ~Literal(opener) + ~Literal(closer) +
+ CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1))
+ ).setParseAction(lambda t:t[0].strip()))
+ else:
+ content = (Combine(OneOrMore(~Literal(opener) + ~Literal(closer) +
+ CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1))
+ ).setParseAction(lambda t:t[0].strip()))
+ else:
+ raise ValueError("opening and closing arguments must be strings if no content expression is given")
+ ret = Forward()
+ if ignoreExpr is not None:
+ ret <<= Group( Suppress(opener) + ZeroOrMore( ignoreExpr | ret | content ) + Suppress(closer) )
+ else:
+ ret <<= Group( Suppress(opener) + ZeroOrMore( ret | content ) + Suppress(closer) )
+ ret.setName('nested %s%s expression' % (opener,closer))
+ return ret
+
+def indentedBlock(blockStatementExpr, indentStack, indent=True):
+ """
+ Helper method for defining space-delimited indentation blocks, such as
+ those used to define block statements in Python source code.
+
+ Parameters:
+ - blockStatementExpr - expression defining syntax of statement that
+ is repeated within the indented block
+ - indentStack - list created by caller to manage indentation stack
+ (multiple statementWithIndentedBlock expressions within a single grammar
+ should share a common indentStack)
+ - indent - boolean indicating whether block must be indented beyond the
+ the current level; set to False for block of left-most statements
+ (default=C{True})
+
+ A valid block must contain at least one C{blockStatement}.
+
+ Example::
+ data = '''
+ def A(z):
+ A1
+ B = 100
+ G = A2
+ A2
+ A3
+ B
+ def BB(a,b,c):
+ BB1
+ def BBA():
+ bba1
+ bba2
+ bba3
+ C
+ D
+ def spam(x,y):
+ def eggs(z):
+ pass
+ '''
+
+
+ indentStack = [1]
+ stmt = Forward()
+
+ identifier = Word(alphas, alphanums)
+ funcDecl = ("def" + identifier + Group( "(" + Optional( delimitedList(identifier) ) + ")" ) + ":")
+ func_body = indentedBlock(stmt, indentStack)
+ funcDef = Group( funcDecl + func_body )
+
+ rvalue = Forward()
+ funcCall = Group(identifier + "(" + Optional(delimitedList(rvalue)) + ")")
+ rvalue << (funcCall | identifier | Word(nums))
+ assignment = Group(identifier + "=" + rvalue)
+ stmt << ( funcDef | assignment | identifier )
+
+ module_body = OneOrMore(stmt)
+
+ parseTree = module_body.parseString(data)
+ parseTree.pprint()
+ prints::
+ [['def',
+ 'A',
+ ['(', 'z', ')'],
+ ':',
+ [['A1'], [['B', '=', '100']], [['G', '=', 'A2']], ['A2'], ['A3']]],
+ 'B',
+ ['def',
+ 'BB',
+ ['(', 'a', 'b', 'c', ')'],
+ ':',
+ [['BB1'], [['def', 'BBA', ['(', ')'], ':', [['bba1'], ['bba2'], ['bba3']]]]]],
+ 'C',
+ 'D',
+ ['def',
+ 'spam',
+ ['(', 'x', 'y', ')'],
+ ':',
+ [[['def', 'eggs', ['(', 'z', ')'], ':', [['pass']]]]]]]
+ """
+ def checkPeerIndent(s,l,t):
+ if l >= len(s): return
+ curCol = col(l,s)
+ if curCol != indentStack[-1]:
+ if curCol > indentStack[-1]:
+ raise ParseFatalException(s,l,"illegal nesting")
+ raise ParseException(s,l,"not a peer entry")
+
+ def checkSubIndent(s,l,t):
+ curCol = col(l,s)
+ if curCol > indentStack[-1]:
+ indentStack.append( curCol )
+ else:
+ raise ParseException(s,l,"not a subentry")
+
+ def checkUnindent(s,l,t):
+ if l >= len(s): return
+ curCol = col(l,s)
+ if not(indentStack and curCol < indentStack[-1] and curCol <= indentStack[-2]):
+ raise ParseException(s,l,"not an unindent")
+ indentStack.pop()
+
+ NL = OneOrMore(LineEnd().setWhitespaceChars("\t ").suppress())
+ INDENT = (Empty() + Empty().setParseAction(checkSubIndent)).setName('INDENT')
+ PEER = Empty().setParseAction(checkPeerIndent).setName('')
+ UNDENT = Empty().setParseAction(checkUnindent).setName('UNINDENT')
+ if indent:
+ smExpr = Group( Optional(NL) +
+ #~ FollowedBy(blockStatementExpr) +
+ INDENT + (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) + UNDENT)
+ else:
+ smExpr = Group( Optional(NL) +
+ (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) )
+ blockStatementExpr.ignore(_bslash + LineEnd())
+ return smExpr.setName('indented block')
+
+alphas8bit = srange(r"[\0xc0-\0xd6\0xd8-\0xf6\0xf8-\0xff]")
+punc8bit = srange(r"[\0xa1-\0xbf\0xd7\0xf7]")
+
+anyOpenTag,anyCloseTag = makeHTMLTags(Word(alphas,alphanums+"_:").setName('any tag'))
+_htmlEntityMap = dict(zip("gt lt amp nbsp quot apos".split(),'><& "\''))
+commonHTMLEntity = Regex('&(?P<entity>' + '|'.join(_htmlEntityMap.keys()) +");").setName("common HTML entity")
+def replaceHTMLEntity(t):
+ """Helper parser action to replace common HTML entities with their special characters"""
+ return _htmlEntityMap.get(t.entity)
+
+# it's easy to get these comment structures wrong - they're very common, so may as well make them available
+cStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/').setName("C style comment")
+"Comment of the form C{/* ... */}"
+
+htmlComment = Regex(r"<!--[\s\S]*?-->").setName("HTML comment")
+"Comment of the form C{<!-- ... -->}"
+
+restOfLine = Regex(r".*").leaveWhitespace().setName("rest of line")
+dblSlashComment = Regex(r"//(?:\\\n|[^\n])*").setName("// comment")
+"Comment of the form C{// ... (to end of line)}"
+
+cppStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/'| dblSlashComment).setName("C++ style comment")
+"Comment of either form C{L{cStyleComment}} or C{L{dblSlashComment}}"
+
+javaStyleComment = cppStyleComment
+"Same as C{L{cppStyleComment}}"
+
+pythonStyleComment = Regex(r"#.*").setName("Python style comment")
+"Comment of the form C{# ... (to end of line)}"
+
+_commasepitem = Combine(OneOrMore(Word(printables, excludeChars=',') +
+ Optional( Word(" \t") +
+ ~Literal(",") + ~LineEnd() ) ) ).streamline().setName("commaItem")
+commaSeparatedList = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("commaSeparatedList")
+"""(Deprecated) Predefined expression of 1 or more printable words or quoted strings, separated by commas.
+ This expression is deprecated in favor of L{pyparsing_common.comma_separated_list}."""
+
+# some other useful expressions - using lower-case class name since we are really using this as a namespace
+class pyparsing_common:
+ """
+ Here are some common low-level expressions that may be useful in jump-starting parser development:
+ - numeric forms (L{integers<integer>}, L{reals<real>}, L{scientific notation<sci_real>})
+ - common L{programming identifiers<identifier>}
+ - network addresses (L{MAC<mac_address>}, L{IPv4<ipv4_address>}, L{IPv6<ipv6_address>})
+ - ISO8601 L{dates<iso8601_date>} and L{datetime<iso8601_datetime>}
+ - L{UUID<uuid>}
+ - L{comma-separated list<comma_separated_list>}
+ Parse actions:
+ - C{L{convertToInteger}}
+ - C{L{convertToFloat}}
+ - C{L{convertToDate}}
+ - C{L{convertToDatetime}}
+ - C{L{stripHTMLTags}}
+ - C{L{upcaseTokens}}
+ - C{L{downcaseTokens}}
+
+ Example::
+ pyparsing_common.number.runTests('''
+ # any int or real number, returned as the appropriate type
+ 100
+ -100
+ +100
+ 3.14159
+ 6.02e23
+ 1e-12
+ ''')
+
+ pyparsing_common.fnumber.runTests('''
+ # any int or real number, returned as float
+ 100
+ -100
+ +100
+ 3.14159
+ 6.02e23
+ 1e-12
+ ''')
+
+ pyparsing_common.hex_integer.runTests('''
+ # hex numbers
+ 100
+ FF
+ ''')
+
+ pyparsing_common.fraction.runTests('''
+ # fractions
+ 1/2
+ -3/4
+ ''')
+
+ pyparsing_common.mixed_integer.runTests('''
+ # mixed fractions
+ 1
+ 1/2
+ -3/4
+ 1-3/4
+ ''')
+
+ import uuid
+ pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID))
+ pyparsing_common.uuid.runTests('''
+ # uuid
+ 12345678-1234-5678-1234-567812345678
+ ''')
+ prints::
+ # any int or real number, returned as the appropriate type
+ 100
+ [100]
+
+ -100
+ [-100]
+
+ +100
+ [100]
+
+ 3.14159
+ [3.14159]
+
+ 6.02e23
+ [6.02e+23]
+
+ 1e-12
+ [1e-12]
+
+ # any int or real number, returned as float
+ 100
+ [100.0]
+
+ -100
+ [-100.0]
+
+ +100
+ [100.0]
+
+ 3.14159
+ [3.14159]
+
+ 6.02e23
+ [6.02e+23]
+
+ 1e-12
+ [1e-12]
+
+ # hex numbers
+ 100
+ [256]
+
+ FF
+ [255]
+
+ # fractions
+ 1/2
+ [0.5]
+
+ -3/4
+ [-0.75]
+
+ # mixed fractions
+ 1
+ [1]
+
+ 1/2
+ [0.5]
+
+ -3/4
+ [-0.75]
+
+ 1-3/4
+ [1.75]
+
+ # uuid
+ 12345678-1234-5678-1234-567812345678
+ [UUID('12345678-1234-5678-1234-567812345678')]
+ """
+
+ convertToInteger = tokenMap(int)
+ """
+ Parse action for converting parsed integers to Python int
+ """
+
+ convertToFloat = tokenMap(float)
+ """
+ Parse action for converting parsed numbers to Python float
+ """
+
+ integer = Word(nums).setName("integer").setParseAction(convertToInteger)
+ """expression that parses an unsigned integer, returns an int"""
+
+ hex_integer = Word(hexnums).setName("hex integer").setParseAction(tokenMap(int,16))
+ """expression that parses a hexadecimal integer, returns an int"""
+
+ signed_integer = Regex(r'[+-]?\d+').setName("signed integer").setParseAction(convertToInteger)
+ """expression that parses an integer with optional leading sign, returns an int"""
+
+ fraction = (signed_integer().setParseAction(convertToFloat) + '/' + signed_integer().setParseAction(convertToFloat)).setName("fraction")
+ """fractional expression of an integer divided by an integer, returns a float"""
+ fraction.addParseAction(lambda t: t[0]/t[-1])
+
+ mixed_integer = (fraction | signed_integer + Optional(Optional('-').suppress() + fraction)).setName("fraction or mixed integer-fraction")
+ """mixed integer of the form 'integer - fraction', with optional leading integer, returns float"""
+ mixed_integer.addParseAction(sum)
+
+ real = Regex(r'[+-]?\d+\.\d*').setName("real number").setParseAction(convertToFloat)
+ """expression that parses a floating point number and returns a float"""
+
+ sci_real = Regex(r'[+-]?\d+([eE][+-]?\d+|\.\d*([eE][+-]?\d+)?)').setName("real number with scientific notation").setParseAction(convertToFloat)
+ """expression that parses a floating point number with optional scientific notation and returns a float"""
+
+ # streamlining this expression makes the docs nicer-looking
+ number = (sci_real | real | signed_integer).streamline()
+ """any numeric expression, returns the corresponding Python type"""
+
+ fnumber = Regex(r'[+-]?\d+\.?\d*([eE][+-]?\d+)?').setName("fnumber").setParseAction(convertToFloat)
+ """any int or real number, returned as float"""
+
+ identifier = Word(alphas+'_', alphanums+'_').setName("identifier")
+ """typical code identifier (leading alpha or '_', followed by 0 or more alphas, nums, or '_')"""
+
+ ipv4_address = Regex(r'(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})(\.(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})){3}').setName("IPv4 address")
+ "IPv4 address (C{0.0.0.0 - 255.255.255.255})"
+
+ _ipv6_part = Regex(r'[0-9a-fA-F]{1,4}').setName("hex_integer")
+ _full_ipv6_address = (_ipv6_part + (':' + _ipv6_part)*7).setName("full IPv6 address")
+ _short_ipv6_address = (Optional(_ipv6_part + (':' + _ipv6_part)*(0,6)) + "::" + Optional(_ipv6_part + (':' + _ipv6_part)*(0,6))).setName("short IPv6 address")
+ _short_ipv6_address.addCondition(lambda t: sum(1 for tt in t if pyparsing_common._ipv6_part.matches(tt)) < 8)
+ _mixed_ipv6_address = ("::ffff:" + ipv4_address).setName("mixed IPv6 address")
+ ipv6_address = Combine((_full_ipv6_address | _mixed_ipv6_address | _short_ipv6_address).setName("IPv6 address")).setName("IPv6 address")
+ "IPv6 address (long, short, or mixed form)"
+
+ mac_address = Regex(r'[0-9a-fA-F]{2}([:.-])[0-9a-fA-F]{2}(?:\1[0-9a-fA-F]{2}){4}').setName("MAC address")
+ "MAC address xx:xx:xx:xx:xx (may also have '-' or '.' delimiters)"
+
+ @staticmethod
+ def convertToDate(fmt="%Y-%m-%d"):
+ """
+ Helper to create a parse action for converting parsed date string to Python datetime.date
+
+ Params -
+ - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%d"})
+
+ Example::
+ date_expr = pyparsing_common.iso8601_date.copy()
+ date_expr.setParseAction(pyparsing_common.convertToDate())
+ print(date_expr.parseString("1999-12-31"))
+ prints::
+ [datetime.date(1999, 12, 31)]
+ """
+ def cvt_fn(s,l,t):
+ try:
+ return datetime.strptime(t[0], fmt).date()
+ except ValueError as ve:
+ raise ParseException(s, l, str(ve))
+ return cvt_fn
+
+ @staticmethod
+ def convertToDatetime(fmt="%Y-%m-%dT%H:%M:%S.%f"):
+ """
+ Helper to create a parse action for converting parsed datetime string to Python datetime.datetime
+
+ Params -
+ - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%dT%H:%M:%S.%f"})
+
+ Example::
+ dt_expr = pyparsing_common.iso8601_datetime.copy()
+ dt_expr.setParseAction(pyparsing_common.convertToDatetime())
+ print(dt_expr.parseString("1999-12-31T23:59:59.999"))
+ prints::
+ [datetime.datetime(1999, 12, 31, 23, 59, 59, 999000)]
+ """
+ def cvt_fn(s,l,t):
+ try:
+ return datetime.strptime(t[0], fmt)
+ except ValueError as ve:
+ raise ParseException(s, l, str(ve))
+ return cvt_fn
+
+ iso8601_date = Regex(r'(?P<year>\d{4})(?:-(?P<month>\d\d)(?:-(?P<day>\d\d))?)?').setName("ISO8601 date")
+ "ISO8601 date (C{yyyy-mm-dd})"
+
+ iso8601_datetime = Regex(r'(?P<year>\d{4})-(?P<month>\d\d)-(?P<day>\d\d)[T ](?P<hour>\d\d):(?P<minute>\d\d)(:(?P<second>\d\d(\.\d*)?)?)?(?P<tz>Z|[+-]\d\d:?\d\d)?').setName("ISO8601 datetime")
+ "ISO8601 datetime (C{yyyy-mm-ddThh:mm:ss.s(Z|+-00:00)}) - trailing seconds, milliseconds, and timezone optional; accepts separating C{'T'} or C{' '}"
+
+ uuid = Regex(r'[0-9a-fA-F]{8}(-[0-9a-fA-F]{4}){3}-[0-9a-fA-F]{12}').setName("UUID")
+ "UUID (C{xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx})"
+
+ _html_stripper = anyOpenTag.suppress() | anyCloseTag.suppress()
+ @staticmethod
+ def stripHTMLTags(s, l, tokens):
+ """
+ Parse action to remove HTML tags from web page HTML source
+
+ Example::
+ # strip HTML links from normal text
+ text = '<td>More info at the <a href="http://pyparsing.wikispaces.com">pyparsing</a> wiki page</td>'
+ td,td_end = makeHTMLTags("TD")
+ table_text = td + SkipTo(td_end).setParseAction(pyparsing_common.stripHTMLTags)("body") + td_end
+
+ print(table_text.parseString(text).body) # -> 'More info at the pyparsing wiki page'
+ """
+ return pyparsing_common._html_stripper.transformString(tokens[0])
+
+ _commasepitem = Combine(OneOrMore(~Literal(",") + ~LineEnd() + Word(printables, excludeChars=',')
+ + Optional( White(" \t") ) ) ).streamline().setName("commaItem")
+ comma_separated_list = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("comma separated list")
+ """Predefined expression of 1 or more printable words or quoted strings, separated by commas."""
+
+ upcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).upper()))
+ """Parse action to convert tokens to upper case."""
+
+ downcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).lower()))
+ """Parse action to convert tokens to lower case."""
+
+
+if __name__ == "__main__":
+
+ selectToken = CaselessLiteral("select")
+ fromToken = CaselessLiteral("from")
+
+ ident = Word(alphas, alphanums + "_$")
+
+ columnName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens)
+ columnNameList = Group(delimitedList(columnName)).setName("columns")
+ columnSpec = ('*' | columnNameList)
+
+ tableName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens)
+ tableNameList = Group(delimitedList(tableName)).setName("tables")
+
+ simpleSQL = selectToken("command") + columnSpec("columns") + fromToken + tableNameList("tables")
+
+ # demo runTests method, including embedded comments in test string
+ simpleSQL.runTests("""
+ # '*' as column list and dotted table name
+ select * from SYS.XYZZY
+
+ # caseless match on "SELECT", and casts back to "select"
+ SELECT * from XYZZY, ABC
+
+ # list of column names, and mixed case SELECT keyword
+ Select AA,BB,CC from Sys.dual
+
+ # multiple tables
+ Select A, B, C from Sys.dual, Table2
+
+ # invalid SELECT keyword - should fail
+ Xelect A, B, C from Sys.dual
+
+ # incomplete command - should fail
+ Select
+
+ # invalid column name - should fail
+ Select ^^^ frox Sys.dual
+
+ """)
+
+ pyparsing_common.number.runTests("""
+ 100
+ -100
+ +100
+ 3.14159
+ 6.02e23
+ 1e-12
+ """)
+
+ # any int or real number, returned as float
+ pyparsing_common.fnumber.runTests("""
+ 100
+ -100
+ +100
+ 3.14159
+ 6.02e23
+ 1e-12
+ """)
+
+ pyparsing_common.hex_integer.runTests("""
+ 100
+ FF
+ """)
+
+ import uuid
+ pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID))
+ pyparsing_common.uuid.runTests("""
+ 12345678-1234-5678-1234-567812345678
+ """)
diff --git a/setuptools/_vendor/six.py b/setuptools/_vendor/six.py
new file mode 100644
index 0000000..190c023
--- /dev/null
+++ b/setuptools/_vendor/six.py
@@ -0,0 +1,868 @@
+"""Utilities for writing code that runs on Python 2 and 3"""
+
+# Copyright (c) 2010-2015 Benjamin Peterson
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+from __future__ import absolute_import
+
+import functools
+import itertools
+import operator
+import sys
+import types
+
+__author__ = "Benjamin Peterson <benjamin@python.org>"
+__version__ = "1.10.0"
+
+
+# Useful for very coarse version differentiation.
+PY2 = sys.version_info[0] == 2
+PY3 = sys.version_info[0] == 3
+PY34 = sys.version_info[0:2] >= (3, 4)
+
+if PY3:
+ string_types = str,
+ integer_types = int,
+ class_types = type,
+ text_type = str
+ binary_type = bytes
+
+ MAXSIZE = sys.maxsize
+else:
+ string_types = basestring,
+ integer_types = (int, long)
+ class_types = (type, types.ClassType)
+ text_type = unicode
+ binary_type = str
+
+ if sys.platform.startswith("java"):
+ # Jython always uses 32 bits.
+ MAXSIZE = int((1 << 31) - 1)
+ else:
+ # It's possible to have sizeof(long) != sizeof(Py_ssize_t).
+ class X(object):
+
+ def __len__(self):
+ return 1 << 31
+ try:
+ len(X())
+ except OverflowError:
+ # 32-bit
+ MAXSIZE = int((1 << 31) - 1)
+ else:
+ # 64-bit
+ MAXSIZE = int((1 << 63) - 1)
+ del X
+
+
+def _add_doc(func, doc):
+ """Add documentation to a function."""
+ func.__doc__ = doc
+
+
+def _import_module(name):
+ """Import module, returning the module after the last dot."""
+ __import__(name)
+ return sys.modules[name]
+
+
+class _LazyDescr(object):
+
+ def __init__(self, name):
+ self.name = name
+
+ def __get__(self, obj, tp):
+ result = self._resolve()
+ setattr(obj, self.name, result) # Invokes __set__.
+ try:
+ # This is a bit ugly, but it avoids running this again by
+ # removing this descriptor.
+ delattr(obj.__class__, self.name)
+ except AttributeError:
+ pass
+ return result
+
+
+class MovedModule(_LazyDescr):
+
+ def __init__(self, name, old, new=None):
+ super(MovedModule, self).__init__(name)
+ if PY3:
+ if new is None:
+ new = name
+ self.mod = new
+ else:
+ self.mod = old
+
+ def _resolve(self):
+ return _import_module(self.mod)
+
+ def __getattr__(self, attr):
+ _module = self._resolve()
+ value = getattr(_module, attr)
+ setattr(self, attr, value)
+ return value
+
+
+class _LazyModule(types.ModuleType):
+
+ def __init__(self, name):
+ super(_LazyModule, self).__init__(name)
+ self.__doc__ = self.__class__.__doc__
+
+ def __dir__(self):
+ attrs = ["__doc__", "__name__"]
+ attrs += [attr.name for attr in self._moved_attributes]
+ return attrs
+
+ # Subclasses should override this
+ _moved_attributes = []
+
+
+class MovedAttribute(_LazyDescr):
+
+ def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
+ super(MovedAttribute, self).__init__(name)
+ if PY3:
+ if new_mod is None:
+ new_mod = name
+ self.mod = new_mod
+ if new_attr is None:
+ if old_attr is None:
+ new_attr = name
+ else:
+ new_attr = old_attr
+ self.attr = new_attr
+ else:
+ self.mod = old_mod
+ if old_attr is None:
+ old_attr = name
+ self.attr = old_attr
+
+ def _resolve(self):
+ module = _import_module(self.mod)
+ return getattr(module, self.attr)
+
+
+class _SixMetaPathImporter(object):
+
+ """
+ A meta path importer to import six.moves and its submodules.
+
+ This class implements a PEP302 finder and loader. It should be compatible
+ with Python 2.5 and all existing versions of Python3
+ """
+
+ def __init__(self, six_module_name):
+ self.name = six_module_name
+ self.known_modules = {}
+
+ def _add_module(self, mod, *fullnames):
+ for fullname in fullnames:
+ self.known_modules[self.name + "." + fullname] = mod
+
+ def _get_module(self, fullname):
+ return self.known_modules[self.name + "." + fullname]
+
+ def find_module(self, fullname, path=None):
+ if fullname in self.known_modules:
+ return self
+ return None
+
+ def __get_module(self, fullname):
+ try:
+ return self.known_modules[fullname]
+ except KeyError:
+ raise ImportError("This loader does not know module " + fullname)
+
+ def load_module(self, fullname):
+ try:
+ # in case of a reload
+ return sys.modules[fullname]
+ except KeyError:
+ pass
+ mod = self.__get_module(fullname)
+ if isinstance(mod, MovedModule):
+ mod = mod._resolve()
+ else:
+ mod.__loader__ = self
+ sys.modules[fullname] = mod
+ return mod
+
+ def is_package(self, fullname):
+ """
+ Return true, if the named module is a package.
+
+ We need this method to get correct spec objects with
+ Python 3.4 (see PEP451)
+ """
+ return hasattr(self.__get_module(fullname), "__path__")
+
+ def get_code(self, fullname):
+ """Return None
+
+ Required, if is_package is implemented"""
+ self.__get_module(fullname) # eventually raises ImportError
+ return None
+ get_source = get_code # same as get_code
+
+_importer = _SixMetaPathImporter(__name__)
+
+
+class _MovedItems(_LazyModule):
+
+ """Lazy loading of moved objects"""
+ __path__ = [] # mark as package
+
+
+_moved_attributes = [
+ MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
+ MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
+ MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
+ MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
+ MovedAttribute("intern", "__builtin__", "sys"),
+ MovedAttribute("map", "itertools", "builtins", "imap", "map"),
+ MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
+ MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
+ MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
+ MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
+ MovedAttribute("reduce", "__builtin__", "functools"),
+ MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
+ MovedAttribute("StringIO", "StringIO", "io"),
+ MovedAttribute("UserDict", "UserDict", "collections"),
+ MovedAttribute("UserList", "UserList", "collections"),
+ MovedAttribute("UserString", "UserString", "collections"),
+ MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
+ MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
+ MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
+ MovedModule("builtins", "__builtin__"),
+ MovedModule("configparser", "ConfigParser"),
+ MovedModule("copyreg", "copy_reg"),
+ MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
+ MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
+ MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
+ MovedModule("http_cookies", "Cookie", "http.cookies"),
+ MovedModule("html_entities", "htmlentitydefs", "html.entities"),
+ MovedModule("html_parser", "HTMLParser", "html.parser"),
+ MovedModule("http_client", "httplib", "http.client"),
+ MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
+ MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
+ MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
+ MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
+ MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
+ MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
+ MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
+ MovedModule("cPickle", "cPickle", "pickle"),
+ MovedModule("queue", "Queue"),
+ MovedModule("reprlib", "repr"),
+ MovedModule("socketserver", "SocketServer"),
+ MovedModule("_thread", "thread", "_thread"),
+ MovedModule("tkinter", "Tkinter"),
+ MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
+ MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
+ MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
+ MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
+ MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
+ MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
+ MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
+ MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
+ MovedModule("tkinter_colorchooser", "tkColorChooser",
+ "tkinter.colorchooser"),
+ MovedModule("tkinter_commondialog", "tkCommonDialog",
+ "tkinter.commondialog"),
+ MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
+ MovedModule("tkinter_font", "tkFont", "tkinter.font"),
+ MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
+ MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
+ "tkinter.simpledialog"),
+ MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
+ MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
+ MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
+ MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
+ MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
+ MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
+]
+# Add windows specific modules.
+if sys.platform == "win32":
+ _moved_attributes += [
+ MovedModule("winreg", "_winreg"),
+ ]
+
+for attr in _moved_attributes:
+ setattr(_MovedItems, attr.name, attr)
+ if isinstance(attr, MovedModule):
+ _importer._add_module(attr, "moves." + attr.name)
+del attr
+
+_MovedItems._moved_attributes = _moved_attributes
+
+moves = _MovedItems(__name__ + ".moves")
+_importer._add_module(moves, "moves")
+
+
+class Module_six_moves_urllib_parse(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_parse"""
+
+
+_urllib_parse_moved_attributes = [
+ MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
+ MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
+ MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
+ MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
+ MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
+ MovedAttribute("urljoin", "urlparse", "urllib.parse"),
+ MovedAttribute("urlparse", "urlparse", "urllib.parse"),
+ MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
+ MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
+ MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
+ MovedAttribute("quote", "urllib", "urllib.parse"),
+ MovedAttribute("quote_plus", "urllib", "urllib.parse"),
+ MovedAttribute("unquote", "urllib", "urllib.parse"),
+ MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
+ MovedAttribute("urlencode", "urllib", "urllib.parse"),
+ MovedAttribute("splitquery", "urllib", "urllib.parse"),
+ MovedAttribute("splittag", "urllib", "urllib.parse"),
+ MovedAttribute("splituser", "urllib", "urllib.parse"),
+ MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_params", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_query", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
+]
+for attr in _urllib_parse_moved_attributes:
+ setattr(Module_six_moves_urllib_parse, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
+ "moves.urllib_parse", "moves.urllib.parse")
+
+
+class Module_six_moves_urllib_error(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_error"""
+
+
+_urllib_error_moved_attributes = [
+ MovedAttribute("URLError", "urllib2", "urllib.error"),
+ MovedAttribute("HTTPError", "urllib2", "urllib.error"),
+ MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
+]
+for attr in _urllib_error_moved_attributes:
+ setattr(Module_six_moves_urllib_error, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
+ "moves.urllib_error", "moves.urllib.error")
+
+
+class Module_six_moves_urllib_request(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_request"""
+
+
+_urllib_request_moved_attributes = [
+ MovedAttribute("urlopen", "urllib2", "urllib.request"),
+ MovedAttribute("install_opener", "urllib2", "urllib.request"),
+ MovedAttribute("build_opener", "urllib2", "urllib.request"),
+ MovedAttribute("pathname2url", "urllib", "urllib.request"),
+ MovedAttribute("url2pathname", "urllib", "urllib.request"),
+ MovedAttribute("getproxies", "urllib", "urllib.request"),
+ MovedAttribute("Request", "urllib2", "urllib.request"),
+ MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
+ MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
+ MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
+ MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
+ MovedAttribute("FileHandler", "urllib2", "urllib.request"),
+ MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
+ MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
+ MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
+ MovedAttribute("urlretrieve", "urllib", "urllib.request"),
+ MovedAttribute("urlcleanup", "urllib", "urllib.request"),
+ MovedAttribute("URLopener", "urllib", "urllib.request"),
+ MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
+ MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
+]
+for attr in _urllib_request_moved_attributes:
+ setattr(Module_six_moves_urllib_request, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
+ "moves.urllib_request", "moves.urllib.request")
+
+
+class Module_six_moves_urllib_response(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_response"""
+
+
+_urllib_response_moved_attributes = [
+ MovedAttribute("addbase", "urllib", "urllib.response"),
+ MovedAttribute("addclosehook", "urllib", "urllib.response"),
+ MovedAttribute("addinfo", "urllib", "urllib.response"),
+ MovedAttribute("addinfourl", "urllib", "urllib.response"),
+]
+for attr in _urllib_response_moved_attributes:
+ setattr(Module_six_moves_urllib_response, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
+ "moves.urllib_response", "moves.urllib.response")
+
+
+class Module_six_moves_urllib_robotparser(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_robotparser"""
+
+
+_urllib_robotparser_moved_attributes = [
+ MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
+]
+for attr in _urllib_robotparser_moved_attributes:
+ setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
+ "moves.urllib_robotparser", "moves.urllib.robotparser")
+
+
+class Module_six_moves_urllib(types.ModuleType):
+
+ """Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
+ __path__ = [] # mark as package
+ parse = _importer._get_module("moves.urllib_parse")
+ error = _importer._get_module("moves.urllib_error")
+ request = _importer._get_module("moves.urllib_request")
+ response = _importer._get_module("moves.urllib_response")
+ robotparser = _importer._get_module("moves.urllib_robotparser")
+
+ def __dir__(self):
+ return ['parse', 'error', 'request', 'response', 'robotparser']
+
+_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
+ "moves.urllib")
+
+
+def add_move(move):
+ """Add an item to six.moves."""
+ setattr(_MovedItems, move.name, move)
+
+
+def remove_move(name):
+ """Remove item from six.moves."""
+ try:
+ delattr(_MovedItems, name)
+ except AttributeError:
+ try:
+ del moves.__dict__[name]
+ except KeyError:
+ raise AttributeError("no such move, %r" % (name,))
+
+
+if PY3:
+ _meth_func = "__func__"
+ _meth_self = "__self__"
+
+ _func_closure = "__closure__"
+ _func_code = "__code__"
+ _func_defaults = "__defaults__"
+ _func_globals = "__globals__"
+else:
+ _meth_func = "im_func"
+ _meth_self = "im_self"
+
+ _func_closure = "func_closure"
+ _func_code = "func_code"
+ _func_defaults = "func_defaults"
+ _func_globals = "func_globals"
+
+
+try:
+ advance_iterator = next
+except NameError:
+ def advance_iterator(it):
+ return it.next()
+next = advance_iterator
+
+
+try:
+ callable = callable
+except NameError:
+ def callable(obj):
+ return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
+
+
+if PY3:
+ def get_unbound_function(unbound):
+ return unbound
+
+ create_bound_method = types.MethodType
+
+ def create_unbound_method(func, cls):
+ return func
+
+ Iterator = object
+else:
+ def get_unbound_function(unbound):
+ return unbound.im_func
+
+ def create_bound_method(func, obj):
+ return types.MethodType(func, obj, obj.__class__)
+
+ def create_unbound_method(func, cls):
+ return types.MethodType(func, None, cls)
+
+ class Iterator(object):
+
+ def next(self):
+ return type(self).__next__(self)
+
+ callable = callable
+_add_doc(get_unbound_function,
+ """Get the function out of a possibly unbound function""")
+
+
+get_method_function = operator.attrgetter(_meth_func)
+get_method_self = operator.attrgetter(_meth_self)
+get_function_closure = operator.attrgetter(_func_closure)
+get_function_code = operator.attrgetter(_func_code)
+get_function_defaults = operator.attrgetter(_func_defaults)
+get_function_globals = operator.attrgetter(_func_globals)
+
+
+if PY3:
+ def iterkeys(d, **kw):
+ return iter(d.keys(**kw))
+
+ def itervalues(d, **kw):
+ return iter(d.values(**kw))
+
+ def iteritems(d, **kw):
+ return iter(d.items(**kw))
+
+ def iterlists(d, **kw):
+ return iter(d.lists(**kw))
+
+ viewkeys = operator.methodcaller("keys")
+
+ viewvalues = operator.methodcaller("values")
+
+ viewitems = operator.methodcaller("items")
+else:
+ def iterkeys(d, **kw):
+ return d.iterkeys(**kw)
+
+ def itervalues(d, **kw):
+ return d.itervalues(**kw)
+
+ def iteritems(d, **kw):
+ return d.iteritems(**kw)
+
+ def iterlists(d, **kw):
+ return d.iterlists(**kw)
+
+ viewkeys = operator.methodcaller("viewkeys")
+
+ viewvalues = operator.methodcaller("viewvalues")
+
+ viewitems = operator.methodcaller("viewitems")
+
+_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
+_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
+_add_doc(iteritems,
+ "Return an iterator over the (key, value) pairs of a dictionary.")
+_add_doc(iterlists,
+ "Return an iterator over the (key, [values]) pairs of a dictionary.")
+
+
+if PY3:
+ def b(s):
+ return s.encode("latin-1")
+
+ def u(s):
+ return s
+ unichr = chr
+ import struct
+ int2byte = struct.Struct(">B").pack
+ del struct
+ byte2int = operator.itemgetter(0)
+ indexbytes = operator.getitem
+ iterbytes = iter
+ import io
+ StringIO = io.StringIO
+ BytesIO = io.BytesIO
+ _assertCountEqual = "assertCountEqual"
+ if sys.version_info[1] <= 1:
+ _assertRaisesRegex = "assertRaisesRegexp"
+ _assertRegex = "assertRegexpMatches"
+ else:
+ _assertRaisesRegex = "assertRaisesRegex"
+ _assertRegex = "assertRegex"
+else:
+ def b(s):
+ return s
+ # Workaround for standalone backslash
+
+ def u(s):
+ return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
+ unichr = unichr
+ int2byte = chr
+
+ def byte2int(bs):
+ return ord(bs[0])
+
+ def indexbytes(buf, i):
+ return ord(buf[i])
+ iterbytes = functools.partial(itertools.imap, ord)
+ import StringIO
+ StringIO = BytesIO = StringIO.StringIO
+ _assertCountEqual = "assertItemsEqual"
+ _assertRaisesRegex = "assertRaisesRegexp"
+ _assertRegex = "assertRegexpMatches"
+_add_doc(b, """Byte literal""")
+_add_doc(u, """Text literal""")
+
+
+def assertCountEqual(self, *args, **kwargs):
+ return getattr(self, _assertCountEqual)(*args, **kwargs)
+
+
+def assertRaisesRegex(self, *args, **kwargs):
+ return getattr(self, _assertRaisesRegex)(*args, **kwargs)
+
+
+def assertRegex(self, *args, **kwargs):
+ return getattr(self, _assertRegex)(*args, **kwargs)
+
+
+if PY3:
+ exec_ = getattr(moves.builtins, "exec")
+
+ def reraise(tp, value, tb=None):
+ if value is None:
+ value = tp()
+ if value.__traceback__ is not tb:
+ raise value.with_traceback(tb)
+ raise value
+
+else:
+ def exec_(_code_, _globs_=None, _locs_=None):
+ """Execute code in a namespace."""
+ if _globs_ is None:
+ frame = sys._getframe(1)
+ _globs_ = frame.f_globals
+ if _locs_ is None:
+ _locs_ = frame.f_locals
+ del frame
+ elif _locs_ is None:
+ _locs_ = _globs_
+ exec("""exec _code_ in _globs_, _locs_""")
+
+ exec_("""def reraise(tp, value, tb=None):
+ raise tp, value, tb
+""")
+
+
+if sys.version_info[:2] == (3, 2):
+ exec_("""def raise_from(value, from_value):
+ if from_value is None:
+ raise value
+ raise value from from_value
+""")
+elif sys.version_info[:2] > (3, 2):
+ exec_("""def raise_from(value, from_value):
+ raise value from from_value
+""")
+else:
+ def raise_from(value, from_value):
+ raise value
+
+
+print_ = getattr(moves.builtins, "print", None)
+if print_ is None:
+ def print_(*args, **kwargs):
+ """The new-style print function for Python 2.4 and 2.5."""
+ fp = kwargs.pop("file", sys.stdout)
+ if fp is None:
+ return
+
+ def write(data):
+ if not isinstance(data, basestring):
+ data = str(data)
+ # If the file has an encoding, encode unicode with it.
+ if (isinstance(fp, file) and
+ isinstance(data, unicode) and
+ fp.encoding is not None):
+ errors = getattr(fp, "errors", None)
+ if errors is None:
+ errors = "strict"
+ data = data.encode(fp.encoding, errors)
+ fp.write(data)
+ want_unicode = False
+ sep = kwargs.pop("sep", None)
+ if sep is not None:
+ if isinstance(sep, unicode):
+ want_unicode = True
+ elif not isinstance(sep, str):
+ raise TypeError("sep must be None or a string")
+ end = kwargs.pop("end", None)
+ if end is not None:
+ if isinstance(end, unicode):
+ want_unicode = True
+ elif not isinstance(end, str):
+ raise TypeError("end must be None or a string")
+ if kwargs:
+ raise TypeError("invalid keyword arguments to print()")
+ if not want_unicode:
+ for arg in args:
+ if isinstance(arg, unicode):
+ want_unicode = True
+ break
+ if want_unicode:
+ newline = unicode("\n")
+ space = unicode(" ")
+ else:
+ newline = "\n"
+ space = " "
+ if sep is None:
+ sep = space
+ if end is None:
+ end = newline
+ for i, arg in enumerate(args):
+ if i:
+ write(sep)
+ write(arg)
+ write(end)
+if sys.version_info[:2] < (3, 3):
+ _print = print_
+
+ def print_(*args, **kwargs):
+ fp = kwargs.get("file", sys.stdout)
+ flush = kwargs.pop("flush", False)
+ _print(*args, **kwargs)
+ if flush and fp is not None:
+ fp.flush()
+
+_add_doc(reraise, """Reraise an exception.""")
+
+if sys.version_info[0:2] < (3, 4):
+ def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
+ updated=functools.WRAPPER_UPDATES):
+ def wrapper(f):
+ f = functools.wraps(wrapped, assigned, updated)(f)
+ f.__wrapped__ = wrapped
+ return f
+ return wrapper
+else:
+ wraps = functools.wraps
+
+
+def with_metaclass(meta, *bases):
+ """Create a base class with a metaclass."""
+ # This requires a bit of explanation: the basic idea is to make a dummy
+ # metaclass for one level of class instantiation that replaces itself with
+ # the actual metaclass.
+ class metaclass(meta):
+
+ def __new__(cls, name, this_bases, d):
+ return meta(name, bases, d)
+ return type.__new__(metaclass, 'temporary_class', (), {})
+
+
+def add_metaclass(metaclass):
+ """Class decorator for creating a class with a metaclass."""
+ def wrapper(cls):
+ orig_vars = cls.__dict__.copy()
+ slots = orig_vars.get('__slots__')
+ if slots is not None:
+ if isinstance(slots, str):
+ slots = [slots]
+ for slots_var in slots:
+ orig_vars.pop(slots_var)
+ orig_vars.pop('__dict__', None)
+ orig_vars.pop('__weakref__', None)
+ return metaclass(cls.__name__, cls.__bases__, orig_vars)
+ return wrapper
+
+
+def python_2_unicode_compatible(klass):
+ """
+ A decorator that defines __unicode__ and __str__ methods under Python 2.
+ Under Python 3 it does nothing.
+
+ To support Python 2 and 3 with a single code base, define a __str__ method
+ returning text and apply this decorator to the class.
+ """
+ if PY2:
+ if '__str__' not in klass.__dict__:
+ raise ValueError("@python_2_unicode_compatible cannot be applied "
+ "to %s because it doesn't define __str__()." %
+ klass.__name__)
+ klass.__unicode__ = klass.__str__
+ klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
+ return klass
+
+
+# Complete the moves implementation.
+# This code is at the end of this module to speed up module loading.
+# Turn this module into a package.
+__path__ = [] # required for PEP 302 and PEP 451
+__package__ = __name__ # see PEP 366 @ReservedAssignment
+if globals().get("__spec__") is not None:
+ __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
+# Remove other six meta path importers, since they cause problems. This can
+# happen if six is removed from sys.modules and then reloaded. (Setuptools does
+# this for some reason.)
+if sys.meta_path:
+ for i, importer in enumerate(sys.meta_path):
+ # Here's some real nastiness: Another "instance" of the six module might
+ # be floating around. Therefore, we can't use isinstance() to check for
+ # the six meta path importer, since the other six instance will have
+ # inserted an importer with different class.
+ if (type(importer).__name__ == "_SixMetaPathImporter" and
+ importer.name == __name__):
+ del sys.meta_path[i]
+ break
+ del i, importer
+# Finally, add the importer to the meta path import hook.
+sys.meta_path.append(_importer)
diff --git a/setuptools/_vendor/vendored.txt b/setuptools/_vendor/vendored.txt
new file mode 100644
index 0000000..be3e72e
--- /dev/null
+++ b/setuptools/_vendor/vendored.txt
@@ -0,0 +1,3 @@
+packaging==16.8
+pyparsing==2.1.10
+six==1.10.0
diff --git a/setuptools/archive_util.py b/setuptools/archive_util.py
new file mode 100755
index 0000000..8143604
--- /dev/null
+++ b/setuptools/archive_util.py
@@ -0,0 +1,173 @@
+"""Utilities for extracting common archive formats"""
+
+import zipfile
+import tarfile
+import os
+import shutil
+import posixpath
+import contextlib
+from distutils.errors import DistutilsError
+
+from pkg_resources import ensure_directory
+
+__all__ = [
+ "unpack_archive", "unpack_zipfile", "unpack_tarfile", "default_filter",
+ "UnrecognizedFormat", "extraction_drivers", "unpack_directory",
+]
+
+
+class UnrecognizedFormat(DistutilsError):
+ """Couldn't recognize the archive type"""
+
+
+def default_filter(src, dst):
+ """The default progress/filter callback; returns True for all files"""
+ return dst
+
+
+def unpack_archive(filename, extract_dir, progress_filter=default_filter,
+ drivers=None):
+ """Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat``
+
+ `progress_filter` is a function taking two arguments: a source path
+ internal to the archive ('/'-separated), and a filesystem path where it
+ will be extracted. The callback must return the desired extract path
+ (which may be the same as the one passed in), or else ``None`` to skip
+ that file or directory. The callback can thus be used to report on the
+ progress of the extraction, as well as to filter the items extracted or
+ alter their extraction paths.
+
+ `drivers`, if supplied, must be a non-empty sequence of functions with the
+ same signature as this function (minus the `drivers` argument), that raise
+ ``UnrecognizedFormat`` if they do not support extracting the designated
+ archive type. The `drivers` are tried in sequence until one is found that
+ does not raise an error, or until all are exhausted (in which case
+ ``UnrecognizedFormat`` is raised). If you do not supply a sequence of
+ drivers, the module's ``extraction_drivers`` constant will be used, which
+ means that ``unpack_zipfile`` and ``unpack_tarfile`` will be tried, in that
+ order.
+ """
+ for driver in drivers or extraction_drivers:
+ try:
+ driver(filename, extract_dir, progress_filter)
+ except UnrecognizedFormat:
+ continue
+ else:
+ return
+ else:
+ raise UnrecognizedFormat(
+ "Not a recognized archive type: %s" % filename
+ )
+
+
+def unpack_directory(filename, extract_dir, progress_filter=default_filter):
+ """"Unpack" a directory, using the same interface as for archives
+
+ Raises ``UnrecognizedFormat`` if `filename` is not a directory
+ """
+ if not os.path.isdir(filename):
+ raise UnrecognizedFormat("%s is not a directory" % filename)
+
+ paths = {
+ filename: ('', extract_dir),
+ }
+ for base, dirs, files in os.walk(filename):
+ src, dst = paths[base]
+ for d in dirs:
+ paths[os.path.join(base, d)] = src + d + '/', os.path.join(dst, d)
+ for f in files:
+ target = os.path.join(dst, f)
+ target = progress_filter(src + f, target)
+ if not target:
+ # skip non-files
+ continue
+ ensure_directory(target)
+ f = os.path.join(base, f)
+ shutil.copyfile(f, target)
+ shutil.copystat(f, target)
+
+
+def unpack_zipfile(filename, extract_dir, progress_filter=default_filter):
+ """Unpack zip `filename` to `extract_dir`
+
+ Raises ``UnrecognizedFormat`` if `filename` is not a zipfile (as determined
+ by ``zipfile.is_zipfile()``). See ``unpack_archive()`` for an explanation
+ of the `progress_filter` argument.
+ """
+
+ if not zipfile.is_zipfile(filename):
+ raise UnrecognizedFormat("%s is not a zip file" % (filename,))
+
+ with zipfile.ZipFile(filename) as z:
+ for info in z.infolist():
+ name = info.filename
+
+ # don't extract absolute paths or ones with .. in them
+ if name.startswith('/') or '..' in name.split('/'):
+ continue
+
+ target = os.path.join(extract_dir, *name.split('/'))
+ target = progress_filter(name, target)
+ if not target:
+ continue
+ if name.endswith('/'):
+ # directory
+ ensure_directory(target)
+ else:
+ # file
+ ensure_directory(target)
+ data = z.read(info.filename)
+ with open(target, 'wb') as f:
+ f.write(data)
+ unix_attributes = info.external_attr >> 16
+ if unix_attributes:
+ os.chmod(target, unix_attributes)
+
+
+def unpack_tarfile(filename, extract_dir, progress_filter=default_filter):
+ """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir`
+
+ Raises ``UnrecognizedFormat`` if `filename` is not a tarfile (as determined
+ by ``tarfile.open()``). See ``unpack_archive()`` for an explanation
+ of the `progress_filter` argument.
+ """
+ try:
+ tarobj = tarfile.open(filename)
+ except tarfile.TarError:
+ raise UnrecognizedFormat(
+ "%s is not a compressed or uncompressed tar file" % (filename,)
+ )
+ with contextlib.closing(tarobj):
+ # don't do any chowning!
+ tarobj.chown = lambda *args: None
+ for member in tarobj:
+ name = member.name
+ # don't extract absolute paths or ones with .. in them
+ if not name.startswith('/') and '..' not in name.split('/'):
+ prelim_dst = os.path.join(extract_dir, *name.split('/'))
+
+ # resolve any links and to extract the link targets as normal
+ # files
+ while member is not None and (member.islnk() or member.issym()):
+ linkpath = member.linkname
+ if member.issym():
+ base = posixpath.dirname(member.name)
+ linkpath = posixpath.join(base, linkpath)
+ linkpath = posixpath.normpath(linkpath)
+ member = tarobj._getmember(linkpath)
+
+ if member is not None and (member.isfile() or member.isdir()):
+ final_dst = progress_filter(name, prelim_dst)
+ if final_dst:
+ if final_dst.endswith(os.sep):
+ final_dst = final_dst[:-1]
+ try:
+ # XXX Ugh
+ tarobj._extract_member(member, final_dst)
+ except tarfile.ExtractError:
+ # chown/chmod/mkfifo/mknode/makedev failed
+ pass
+ return True
+
+
+extraction_drivers = unpack_directory, unpack_zipfile, unpack_tarfile
diff --git a/setuptools/build_meta.py b/setuptools/build_meta.py
new file mode 100644
index 0000000..609ea1e
--- /dev/null
+++ b/setuptools/build_meta.py
@@ -0,0 +1,172 @@
+"""A PEP 517 interface to setuptools
+
+Previously, when a user or a command line tool (let's call it a "frontend")
+needed to make a request of setuptools to take a certain action, for
+example, generating a list of installation requirements, the frontend would
+would call "setup.py egg_info" or "setup.py bdist_wheel" on the command line.
+
+PEP 517 defines a different method of interfacing with setuptools. Rather
+than calling "setup.py" directly, the frontend should:
+
+ 1. Set the current directory to the directory with a setup.py file
+ 2. Import this module into a safe python interpreter (one in which
+ setuptools can potentially set global variables or crash hard).
+ 3. Call one of the functions defined in PEP 517.
+
+What each function does is defined in PEP 517. However, here is a "casual"
+definition of the functions (this definition should not be relied on for
+bug reports or API stability):
+
+ - `build_wheel`: build a wheel in the folder and return the basename
+ - `get_requires_for_build_wheel`: get the `setup_requires` to build
+ - `prepare_metadata_for_build_wheel`: get the `install_requires`
+ - `build_sdist`: build an sdist in the folder and return the basename
+ - `get_requires_for_build_sdist`: get the `setup_requires` to build
+
+Again, this is not a formal definition! Just a "taste" of the module.
+"""
+
+import os
+import sys
+import tokenize
+import shutil
+import contextlib
+
+import setuptools
+import distutils
+
+
+class SetupRequirementsError(BaseException):
+ def __init__(self, specifiers):
+ self.specifiers = specifiers
+
+
+class Distribution(setuptools.dist.Distribution):
+ def fetch_build_eggs(self, specifiers):
+ raise SetupRequirementsError(specifiers)
+
+ @classmethod
+ @contextlib.contextmanager
+ def patch(cls):
+ """
+ Replace
+ distutils.dist.Distribution with this class
+ for the duration of this context.
+ """
+ orig = distutils.core.Distribution
+ distutils.core.Distribution = cls
+ try:
+ yield
+ finally:
+ distutils.core.Distribution = orig
+
+
+def _run_setup(setup_script='setup.py'):
+ # Note that we can reuse our build directory between calls
+ # Correctness comes first, then optimization later
+ __file__ = setup_script
+ __name__ = '__main__'
+ f = getattr(tokenize, 'open', open)(__file__)
+ code = f.read().replace('\\r\\n', '\\n')
+ f.close()
+ exec(compile(code, __file__, 'exec'), locals())
+
+
+def _fix_config(config_settings):
+ config_settings = config_settings or {}
+ config_settings.setdefault('--global-option', [])
+ return config_settings
+
+
+def _get_build_requires(config_settings):
+ config_settings = _fix_config(config_settings)
+ requirements = ['setuptools', 'wheel']
+
+ sys.argv = sys.argv[:1] + ['egg_info'] + \
+ config_settings["--global-option"]
+ try:
+ with Distribution.patch():
+ _run_setup()
+ except SetupRequirementsError as e:
+ requirements += e.specifiers
+
+ return requirements
+
+
+def _get_immediate_subdirectories(a_dir):
+ return [name for name in os.listdir(a_dir)
+ if os.path.isdir(os.path.join(a_dir, name))]
+
+
+def get_requires_for_build_wheel(config_settings=None):
+ config_settings = _fix_config(config_settings)
+ return _get_build_requires(config_settings)
+
+
+def get_requires_for_build_sdist(config_settings=None):
+ config_settings = _fix_config(config_settings)
+ return _get_build_requires(config_settings)
+
+
+def prepare_metadata_for_build_wheel(metadata_directory, config_settings=None):
+ sys.argv = sys.argv[:1] + ['dist_info', '--egg-base', metadata_directory]
+ _run_setup()
+
+ dist_info_directory = metadata_directory
+ while True:
+ dist_infos = [f for f in os.listdir(dist_info_directory)
+ if f.endswith('.dist-info')]
+
+ if len(dist_infos) == 0 and \
+ len(_get_immediate_subdirectories(dist_info_directory)) == 1:
+ dist_info_directory = os.path.join(
+ dist_info_directory, os.listdir(dist_info_directory)[0])
+ continue
+
+ assert len(dist_infos) == 1
+ break
+
+ # PEP 517 requires that the .dist-info directory be placed in the
+ # metadata_directory. To comply, we MUST copy the directory to the root
+ if dist_info_directory != metadata_directory:
+ shutil.move(
+ os.path.join(dist_info_directory, dist_infos[0]),
+ metadata_directory)
+ shutil.rmtree(dist_info_directory, ignore_errors=True)
+
+ return dist_infos[0]
+
+
+def build_wheel(wheel_directory, config_settings=None,
+ metadata_directory=None):
+ config_settings = _fix_config(config_settings)
+ wheel_directory = os.path.abspath(wheel_directory)
+ sys.argv = sys.argv[:1] + ['bdist_wheel'] + \
+ config_settings["--global-option"]
+ _run_setup()
+ if wheel_directory != 'dist':
+ shutil.rmtree(wheel_directory)
+ shutil.copytree('dist', wheel_directory)
+
+ wheels = [f for f in os.listdir(wheel_directory)
+ if f.endswith('.whl')]
+
+ assert len(wheels) == 1
+ return wheels[0]
+
+
+def build_sdist(sdist_directory, config_settings=None):
+ config_settings = _fix_config(config_settings)
+ sdist_directory = os.path.abspath(sdist_directory)
+ sys.argv = sys.argv[:1] + ['sdist'] + \
+ config_settings["--global-option"]
+ _run_setup()
+ if sdist_directory != 'dist':
+ shutil.rmtree(sdist_directory)
+ shutil.copytree('dist', sdist_directory)
+
+ sdists = [f for f in os.listdir(sdist_directory)
+ if f.endswith('.tar.gz')]
+
+ assert len(sdists) == 1
+ return sdists[0]
diff --git a/setuptools/cli-32.exe b/setuptools/cli-32.exe
new file mode 100644
index 0000000..b1487b7
--- /dev/null
+++ b/setuptools/cli-32.exe
Binary files differ
diff --git a/setuptools/cli-64.exe b/setuptools/cli-64.exe
new file mode 100644
index 0000000..675e6bf
--- /dev/null
+++ b/setuptools/cli-64.exe
Binary files differ
diff --git a/setuptools/cli.exe b/setuptools/cli.exe
new file mode 100644
index 0000000..b1487b7
--- /dev/null
+++ b/setuptools/cli.exe
Binary files differ
diff --git a/setuptools/command/__init__.py b/setuptools/command/__init__.py
new file mode 100644
index 0000000..fe619e2
--- /dev/null
+++ b/setuptools/command/__init__.py
@@ -0,0 +1,18 @@
+__all__ = [
+ 'alias', 'bdist_egg', 'bdist_rpm', 'build_ext', 'build_py', 'develop',
+ 'easy_install', 'egg_info', 'install', 'install_lib', 'rotate', 'saveopts',
+ 'sdist', 'setopt', 'test', 'install_egg_info', 'install_scripts',
+ 'register', 'bdist_wininst', 'upload_docs', 'upload', 'build_clib',
+ 'dist_info',
+]
+
+from distutils.command.bdist import bdist
+import sys
+
+from setuptools.command import install_scripts
+
+if 'egg' not in bdist.format_commands:
+ bdist.format_command['egg'] = ('bdist_egg', "Python .egg file")
+ bdist.format_commands.append('egg')
+
+del bdist, sys
diff --git a/setuptools/command/alias.py b/setuptools/command/alias.py
new file mode 100755
index 0000000..4532b1c
--- /dev/null
+++ b/setuptools/command/alias.py
@@ -0,0 +1,80 @@
+from distutils.errors import DistutilsOptionError
+
+from setuptools.extern.six.moves import map
+
+from setuptools.command.setopt import edit_config, option_base, config_file
+
+
+def shquote(arg):
+ """Quote an argument for later parsing by shlex.split()"""
+ for c in '"', "'", "\\", "#":
+ if c in arg:
+ return repr(arg)
+ if arg.split() != [arg]:
+ return repr(arg)
+ return arg
+
+
+class alias(option_base):
+ """Define a shortcut that invokes one or more commands"""
+
+ description = "define a shortcut to invoke one or more commands"
+ command_consumes_arguments = True
+
+ user_options = [
+ ('remove', 'r', 'remove (unset) the alias'),
+ ] + option_base.user_options
+
+ boolean_options = option_base.boolean_options + ['remove']
+
+ def initialize_options(self):
+ option_base.initialize_options(self)
+ self.args = None
+ self.remove = None
+
+ def finalize_options(self):
+ option_base.finalize_options(self)
+ if self.remove and len(self.args) != 1:
+ raise DistutilsOptionError(
+ "Must specify exactly one argument (the alias name) when "
+ "using --remove"
+ )
+
+ def run(self):
+ aliases = self.distribution.get_option_dict('aliases')
+
+ if not self.args:
+ print("Command Aliases")
+ print("---------------")
+ for alias in aliases:
+ print("setup.py alias", format_alias(alias, aliases))
+ return
+
+ elif len(self.args) == 1:
+ alias, = self.args
+ if self.remove:
+ command = None
+ elif alias in aliases:
+ print("setup.py alias", format_alias(alias, aliases))
+ return
+ else:
+ print("No alias definition found for %r" % alias)
+ return
+ else:
+ alias = self.args[0]
+ command = ' '.join(map(shquote, self.args[1:]))
+
+ edit_config(self.filename, {'aliases': {alias: command}}, self.dry_run)
+
+
+def format_alias(name, aliases):
+ source, command = aliases[name]
+ if source == config_file('global'):
+ source = '--global-config '
+ elif source == config_file('user'):
+ source = '--user-config '
+ elif source == config_file('local'):
+ source = ''
+ else:
+ source = '--filename=%r' % source
+ return source + name + ' ' + command
diff --git a/setuptools/command/bdist_egg.py b/setuptools/command/bdist_egg.py
new file mode 100644
index 0000000..423b818
--- /dev/null
+++ b/setuptools/command/bdist_egg.py
@@ -0,0 +1,502 @@
+"""setuptools.command.bdist_egg
+
+Build .egg distributions"""
+
+from distutils.errors import DistutilsSetupError
+from distutils.dir_util import remove_tree, mkpath
+from distutils import log
+from types import CodeType
+import sys
+import os
+import re
+import textwrap
+import marshal
+
+from setuptools.extern import six
+
+from pkg_resources import get_build_platform, Distribution, ensure_directory
+from pkg_resources import EntryPoint
+from setuptools.extension import Library
+from setuptools import Command
+
+try:
+ # Python 2.7 or >=3.2
+ from sysconfig import get_path, get_python_version
+
+ def _get_purelib():
+ return get_path("purelib")
+except ImportError:
+ from distutils.sysconfig import get_python_lib, get_python_version
+
+ def _get_purelib():
+ return get_python_lib(False)
+
+
+def strip_module(filename):
+ if '.' in filename:
+ filename = os.path.splitext(filename)[0]
+ if filename.endswith('module'):
+ filename = filename[:-6]
+ return filename
+
+
+def sorted_walk(dir):
+ """Do os.walk in a reproducible way,
+ independent of indeterministic filesystem readdir order
+ """
+ for base, dirs, files in os.walk(dir):
+ dirs.sort()
+ files.sort()
+ yield base, dirs, files
+
+
+def write_stub(resource, pyfile):
+ _stub_template = textwrap.dedent("""
+ def __bootstrap__():
+ global __bootstrap__, __loader__, __file__
+ import sys, pkg_resources, imp
+ __file__ = pkg_resources.resource_filename(__name__, %r)
+ __loader__ = None; del __bootstrap__, __loader__
+ imp.load_dynamic(__name__,__file__)
+ __bootstrap__()
+ """).lstrip()
+ with open(pyfile, 'w') as f:
+ f.write(_stub_template % resource)
+
+
+class bdist_egg(Command):
+ description = "create an \"egg\" distribution"
+
+ user_options = [
+ ('bdist-dir=', 'b',
+ "temporary directory for creating the distribution"),
+ ('plat-name=', 'p', "platform name to embed in generated filenames "
+ "(default: %s)" % get_build_platform()),
+ ('exclude-source-files', None,
+ "remove all .py files from the generated egg"),
+ ('keep-temp', 'k',
+ "keep the pseudo-installation tree around after " +
+ "creating the distribution archive"),
+ ('dist-dir=', 'd',
+ "directory to put final built distributions in"),
+ ('skip-build', None,
+ "skip rebuilding everything (for testing/debugging)"),
+ ]
+
+ boolean_options = [
+ 'keep-temp', 'skip-build', 'exclude-source-files'
+ ]
+
+ def initialize_options(self):
+ self.bdist_dir = None
+ self.plat_name = None
+ self.keep_temp = 0
+ self.dist_dir = None
+ self.skip_build = 0
+ self.egg_output = None
+ self.exclude_source_files = None
+
+ def finalize_options(self):
+ ei_cmd = self.ei_cmd = self.get_finalized_command("egg_info")
+ self.egg_info = ei_cmd.egg_info
+
+ if self.bdist_dir is None:
+ bdist_base = self.get_finalized_command('bdist').bdist_base
+ self.bdist_dir = os.path.join(bdist_base, 'egg')
+
+ if self.plat_name is None:
+ self.plat_name = get_build_platform()
+
+ self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
+
+ if self.egg_output is None:
+
+ # Compute filename of the output egg
+ basename = Distribution(
+ None, None, ei_cmd.egg_name, ei_cmd.egg_version,
+ get_python_version(),
+ self.distribution.has_ext_modules() and self.plat_name
+ ).egg_name()
+
+ self.egg_output = os.path.join(self.dist_dir, basename + '.egg')
+
+ def do_install_data(self):
+ # Hack for packages that install data to install's --install-lib
+ self.get_finalized_command('install').install_lib = self.bdist_dir
+
+ site_packages = os.path.normcase(os.path.realpath(_get_purelib()))
+ old, self.distribution.data_files = self.distribution.data_files, []
+
+ for item in old:
+ if isinstance(item, tuple) and len(item) == 2:
+ if os.path.isabs(item[0]):
+ realpath = os.path.realpath(item[0])
+ normalized = os.path.normcase(realpath)
+ if normalized == site_packages or normalized.startswith(
+ site_packages + os.sep
+ ):
+ item = realpath[len(site_packages) + 1:], item[1]
+ # XXX else: raise ???
+ self.distribution.data_files.append(item)
+
+ try:
+ log.info("installing package data to %s", self.bdist_dir)
+ self.call_command('install_data', force=0, root=None)
+ finally:
+ self.distribution.data_files = old
+
+ def get_outputs(self):
+ return [self.egg_output]
+
+ def call_command(self, cmdname, **kw):
+ """Invoke reinitialized command `cmdname` with keyword args"""
+ for dirname in INSTALL_DIRECTORY_ATTRS:
+ kw.setdefault(dirname, self.bdist_dir)
+ kw.setdefault('skip_build', self.skip_build)
+ kw.setdefault('dry_run', self.dry_run)
+ cmd = self.reinitialize_command(cmdname, **kw)
+ self.run_command(cmdname)
+ return cmd
+
+ def run(self):
+ # Generate metadata first
+ self.run_command("egg_info")
+ # We run install_lib before install_data, because some data hacks
+ # pull their data path from the install_lib command.
+ log.info("installing library code to %s", self.bdist_dir)
+ instcmd = self.get_finalized_command('install')
+ old_root = instcmd.root
+ instcmd.root = None
+ if self.distribution.has_c_libraries() and not self.skip_build:
+ self.run_command('build_clib')
+ cmd = self.call_command('install_lib', warn_dir=0)
+ instcmd.root = old_root
+
+ all_outputs, ext_outputs = self.get_ext_outputs()
+ self.stubs = []
+ to_compile = []
+ for (p, ext_name) in enumerate(ext_outputs):
+ filename, ext = os.path.splitext(ext_name)
+ pyfile = os.path.join(self.bdist_dir, strip_module(filename) +
+ '.py')
+ self.stubs.append(pyfile)
+ log.info("creating stub loader for %s", ext_name)
+ if not self.dry_run:
+ write_stub(os.path.basename(ext_name), pyfile)
+ to_compile.append(pyfile)
+ ext_outputs[p] = ext_name.replace(os.sep, '/')
+
+ if to_compile:
+ cmd.byte_compile(to_compile)
+ if self.distribution.data_files:
+ self.do_install_data()
+
+ # Make the EGG-INFO directory
+ archive_root = self.bdist_dir
+ egg_info = os.path.join(archive_root, 'EGG-INFO')
+ self.mkpath(egg_info)
+ if self.distribution.scripts:
+ script_dir = os.path.join(egg_info, 'scripts')
+ log.info("installing scripts to %s", script_dir)
+ self.call_command('install_scripts', install_dir=script_dir,
+ no_ep=1)
+
+ self.copy_metadata_to(egg_info)
+ native_libs = os.path.join(egg_info, "native_libs.txt")
+ if all_outputs:
+ log.info("writing %s", native_libs)
+ if not self.dry_run:
+ ensure_directory(native_libs)
+ libs_file = open(native_libs, 'wt')
+ libs_file.write('\n'.join(all_outputs))
+ libs_file.write('\n')
+ libs_file.close()
+ elif os.path.isfile(native_libs):
+ log.info("removing %s", native_libs)
+ if not self.dry_run:
+ os.unlink(native_libs)
+
+ write_safety_flag(
+ os.path.join(archive_root, 'EGG-INFO'), self.zip_safe()
+ )
+
+ if os.path.exists(os.path.join(self.egg_info, 'depends.txt')):
+ log.warn(
+ "WARNING: 'depends.txt' will not be used by setuptools 0.6!\n"
+ "Use the install_requires/extras_require setup() args instead."
+ )
+
+ if self.exclude_source_files:
+ self.zap_pyfiles()
+
+ # Make the archive
+ make_zipfile(self.egg_output, archive_root, verbose=self.verbose,
+ dry_run=self.dry_run, mode=self.gen_header())
+ if not self.keep_temp:
+ remove_tree(self.bdist_dir, dry_run=self.dry_run)
+
+ # Add to 'Distribution.dist_files' so that the "upload" command works
+ getattr(self.distribution, 'dist_files', []).append(
+ ('bdist_egg', get_python_version(), self.egg_output))
+
+ def zap_pyfiles(self):
+ log.info("Removing .py files from temporary directory")
+ for base, dirs, files in walk_egg(self.bdist_dir):
+ for name in files:
+ path = os.path.join(base, name)
+
+ if name.endswith('.py'):
+ log.debug("Deleting %s", path)
+ os.unlink(path)
+
+ if base.endswith('__pycache__'):
+ path_old = path
+
+ pattern = r'(?P<name>.+)\.(?P<magic>[^.]+)\.pyc'
+ m = re.match(pattern, name)
+ path_new = os.path.join(
+ base, os.pardir, m.group('name') + '.pyc')
+ log.info(
+ "Renaming file from [%s] to [%s]"
+ % (path_old, path_new))
+ try:
+ os.remove(path_new)
+ except OSError:
+ pass
+ os.rename(path_old, path_new)
+
+ def zip_safe(self):
+ safe = getattr(self.distribution, 'zip_safe', None)
+ if safe is not None:
+ return safe
+ log.warn("zip_safe flag not set; analyzing archive contents...")
+ return analyze_egg(self.bdist_dir, self.stubs)
+
+ def gen_header(self):
+ epm = EntryPoint.parse_map(self.distribution.entry_points or '')
+ ep = epm.get('setuptools.installation', {}).get('eggsecutable')
+ if ep is None:
+ return 'w' # not an eggsecutable, do it the usual way.
+
+ if not ep.attrs or ep.extras:
+ raise DistutilsSetupError(
+ "eggsecutable entry point (%r) cannot have 'extras' "
+ "or refer to a module" % (ep,)
+ )
+
+ pyver = sys.version[:3]
+ pkg = ep.module_name
+ full = '.'.join(ep.attrs)
+ base = ep.attrs[0]
+ basename = os.path.basename(self.egg_output)
+
+ header = (
+ "#!/bin/sh\n"
+ 'if [ `basename $0` = "%(basename)s" ]\n'
+ 'then exec python%(pyver)s -c "'
+ "import sys, os; sys.path.insert(0, os.path.abspath('$0')); "
+ "from %(pkg)s import %(base)s; sys.exit(%(full)s())"
+ '" "$@"\n'
+ 'else\n'
+ ' echo $0 is not the correct name for this egg file.\n'
+ ' echo Please rename it back to %(basename)s and try again.\n'
+ ' exec false\n'
+ 'fi\n'
+ ) % locals()
+
+ if not self.dry_run:
+ mkpath(os.path.dirname(self.egg_output), dry_run=self.dry_run)
+ f = open(self.egg_output, 'w')
+ f.write(header)
+ f.close()
+ return 'a'
+
+ def copy_metadata_to(self, target_dir):
+ "Copy metadata (egg info) to the target_dir"
+ # normalize the path (so that a forward-slash in egg_info will
+ # match using startswith below)
+ norm_egg_info = os.path.normpath(self.egg_info)
+ prefix = os.path.join(norm_egg_info, '')
+ for path in self.ei_cmd.filelist.files:
+ if path.startswith(prefix):
+ target = os.path.join(target_dir, path[len(prefix):])
+ ensure_directory(target)
+ self.copy_file(path, target)
+
+ def get_ext_outputs(self):
+ """Get a list of relative paths to C extensions in the output distro"""
+
+ all_outputs = []
+ ext_outputs = []
+
+ paths = {self.bdist_dir: ''}
+ for base, dirs, files in sorted_walk(self.bdist_dir):
+ for filename in files:
+ if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS:
+ all_outputs.append(paths[base] + filename)
+ for filename in dirs:
+ paths[os.path.join(base, filename)] = (paths[base] +
+ filename + '/')
+
+ if self.distribution.has_ext_modules():
+ build_cmd = self.get_finalized_command('build_ext')
+ for ext in build_cmd.extensions:
+ if isinstance(ext, Library):
+ continue
+ fullname = build_cmd.get_ext_fullname(ext.name)
+ filename = build_cmd.get_ext_filename(fullname)
+ if not os.path.basename(filename).startswith('dl-'):
+ if os.path.exists(os.path.join(self.bdist_dir, filename)):
+ ext_outputs.append(filename)
+
+ return all_outputs, ext_outputs
+
+
+NATIVE_EXTENSIONS = dict.fromkeys('.dll .so .dylib .pyd'.split())
+
+
+def walk_egg(egg_dir):
+ """Walk an unpacked egg's contents, skipping the metadata directory"""
+ walker = sorted_walk(egg_dir)
+ base, dirs, files = next(walker)
+ if 'EGG-INFO' in dirs:
+ dirs.remove('EGG-INFO')
+ yield base, dirs, files
+ for bdf in walker:
+ yield bdf
+
+
+def analyze_egg(egg_dir, stubs):
+ # check for existing flag in EGG-INFO
+ for flag, fn in safety_flags.items():
+ if os.path.exists(os.path.join(egg_dir, 'EGG-INFO', fn)):
+ return flag
+ if not can_scan():
+ return False
+ safe = True
+ for base, dirs, files in walk_egg(egg_dir):
+ for name in files:
+ if name.endswith('.py') or name.endswith('.pyw'):
+ continue
+ elif name.endswith('.pyc') or name.endswith('.pyo'):
+ # always scan, even if we already know we're not safe
+ safe = scan_module(egg_dir, base, name, stubs) and safe
+ return safe
+
+
+def write_safety_flag(egg_dir, safe):
+ # Write or remove zip safety flag file(s)
+ for flag, fn in safety_flags.items():
+ fn = os.path.join(egg_dir, fn)
+ if os.path.exists(fn):
+ if safe is None or bool(safe) != flag:
+ os.unlink(fn)
+ elif safe is not None and bool(safe) == flag:
+ f = open(fn, 'wt')
+ f.write('\n')
+ f.close()
+
+
+safety_flags = {
+ True: 'zip-safe',
+ False: 'not-zip-safe',
+}
+
+
+def scan_module(egg_dir, base, name, stubs):
+ """Check whether module possibly uses unsafe-for-zipfile stuff"""
+
+ filename = os.path.join(base, name)
+ if filename[:-1] in stubs:
+ return True # Extension module
+ pkg = base[len(egg_dir) + 1:].replace(os.sep, '.')
+ module = pkg + (pkg and '.' or '') + os.path.splitext(name)[0]
+ if sys.version_info < (3, 3):
+ skip = 8 # skip magic & date
+ elif sys.version_info < (3, 7):
+ skip = 12 # skip magic & date & file size
+ else:
+ skip = 16 # skip magic & reserved? & date & file size
+ f = open(filename, 'rb')
+ f.read(skip)
+ code = marshal.load(f)
+ f.close()
+ safe = True
+ symbols = dict.fromkeys(iter_symbols(code))
+ for bad in ['__file__', '__path__']:
+ if bad in symbols:
+ log.warn("%s: module references %s", module, bad)
+ safe = False
+ if 'inspect' in symbols:
+ for bad in [
+ 'getsource', 'getabsfile', 'getsourcefile', 'getfile'
+ 'getsourcelines', 'findsource', 'getcomments', 'getframeinfo',
+ 'getinnerframes', 'getouterframes', 'stack', 'trace'
+ ]:
+ if bad in symbols:
+ log.warn("%s: module MAY be using inspect.%s", module, bad)
+ safe = False
+ return safe
+
+
+def iter_symbols(code):
+ """Yield names and strings used by `code` and its nested code objects"""
+ for name in code.co_names:
+ yield name
+ for const in code.co_consts:
+ if isinstance(const, six.string_types):
+ yield const
+ elif isinstance(const, CodeType):
+ for name in iter_symbols(const):
+ yield name
+
+
+def can_scan():
+ if not sys.platform.startswith('java') and sys.platform != 'cli':
+ # CPython, PyPy, etc.
+ return True
+ log.warn("Unable to analyze compiled code on this platform.")
+ log.warn("Please ask the author to include a 'zip_safe'"
+ " setting (either True or False) in the package's setup.py")
+
+
+# Attribute names of options for commands that might need to be convinced to
+# install to the egg build directory
+
+INSTALL_DIRECTORY_ATTRS = [
+ 'install_lib', 'install_dir', 'install_data', 'install_base'
+]
+
+
+def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=True,
+ mode='w'):
+ """Create a zip file from all the files under 'base_dir'. The output
+ zip file will be named 'base_dir' + ".zip". Uses either the "zipfile"
+ Python module (if available) or the InfoZIP "zip" utility (if installed
+ and found on the default search path). If neither tool is available,
+ raises DistutilsExecError. Returns the name of the output zip file.
+ """
+ import zipfile
+
+ mkpath(os.path.dirname(zip_filename), dry_run=dry_run)
+ log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir)
+
+ def visit(z, dirname, names):
+ for name in names:
+ path = os.path.normpath(os.path.join(dirname, name))
+ if os.path.isfile(path):
+ p = path[len(base_dir) + 1:]
+ if not dry_run:
+ z.write(path, p)
+ log.debug("adding '%s'", p)
+
+ compression = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED
+ if not dry_run:
+ z = zipfile.ZipFile(zip_filename, mode, compression=compression)
+ for dirname, dirs, files in sorted_walk(base_dir):
+ visit(z, dirname, files)
+ z.close()
+ else:
+ for dirname, dirs, files in sorted_walk(base_dir):
+ visit(None, dirname, files)
+ return zip_filename
diff --git a/setuptools/command/bdist_rpm.py b/setuptools/command/bdist_rpm.py
new file mode 100755
index 0000000..7073092
--- /dev/null
+++ b/setuptools/command/bdist_rpm.py
@@ -0,0 +1,43 @@
+import distutils.command.bdist_rpm as orig
+
+
+class bdist_rpm(orig.bdist_rpm):
+ """
+ Override the default bdist_rpm behavior to do the following:
+
+ 1. Run egg_info to ensure the name and version are properly calculated.
+ 2. Always run 'install' using --single-version-externally-managed to
+ disable eggs in RPM distributions.
+ 3. Replace dash with underscore in the version numbers for better RPM
+ compatibility.
+ """
+
+ def run(self):
+ # ensure distro name is up-to-date
+ self.run_command('egg_info')
+
+ orig.bdist_rpm.run(self)
+
+ def _make_spec_file(self):
+ version = self.distribution.get_version()
+ rpmversion = version.replace('-', '_')
+ spec = orig.bdist_rpm._make_spec_file(self)
+ line23 = '%define version ' + version
+ line24 = '%define version ' + rpmversion
+ spec = [
+ line.replace(
+ "Source0: %{name}-%{version}.tar",
+ "Source0: %{name}-%{unmangled_version}.tar"
+ ).replace(
+ "setup.py install ",
+ "setup.py install --single-version-externally-managed "
+ ).replace(
+ "%setup",
+ "%setup -n %{name}-%{unmangled_version}"
+ ).replace(line23, line24)
+ for line in spec
+ ]
+ insert_loc = spec.index(line24) + 1
+ unmangled_version = "%define unmangled_version " + version
+ spec.insert(insert_loc, unmangled_version)
+ return spec
diff --git a/setuptools/command/bdist_wininst.py b/setuptools/command/bdist_wininst.py
new file mode 100755
index 0000000..073de97
--- /dev/null
+++ b/setuptools/command/bdist_wininst.py
@@ -0,0 +1,21 @@
+import distutils.command.bdist_wininst as orig
+
+
+class bdist_wininst(orig.bdist_wininst):
+ def reinitialize_command(self, command, reinit_subcommands=0):
+ """
+ Supplement reinitialize_command to work around
+ http://bugs.python.org/issue20819
+ """
+ cmd = self.distribution.reinitialize_command(
+ command, reinit_subcommands)
+ if command in ('install', 'install_lib'):
+ cmd.install_lib = None
+ return cmd
+
+ def run(self):
+ self._is_running = True
+ try:
+ orig.bdist_wininst.run(self)
+ finally:
+ self._is_running = False
diff --git a/setuptools/command/build_clib.py b/setuptools/command/build_clib.py
new file mode 100644
index 0000000..09caff6
--- /dev/null
+++ b/setuptools/command/build_clib.py
@@ -0,0 +1,98 @@
+import distutils.command.build_clib as orig
+from distutils.errors import DistutilsSetupError
+from distutils import log
+from setuptools.dep_util import newer_pairwise_group
+
+
+class build_clib(orig.build_clib):
+ """
+ Override the default build_clib behaviour to do the following:
+
+ 1. Implement a rudimentary timestamp-based dependency system
+ so 'compile()' doesn't run every time.
+ 2. Add more keys to the 'build_info' dictionary:
+ * obj_deps - specify dependencies for each object compiled.
+ this should be a dictionary mapping a key
+ with the source filename to a list of
+ dependencies. Use an empty string for global
+ dependencies.
+ * cflags - specify a list of additional flags to pass to
+ the compiler.
+ """
+
+ def build_libraries(self, libraries):
+ for (lib_name, build_info) in libraries:
+ sources = build_info.get('sources')
+ if sources is None or not isinstance(sources, (list, tuple)):
+ raise DistutilsSetupError(
+ "in 'libraries' option (library '%s'), "
+ "'sources' must be present and must be "
+ "a list of source filenames" % lib_name)
+ sources = list(sources)
+
+ log.info("building '%s' library", lib_name)
+
+ # Make sure everything is the correct type.
+ # obj_deps should be a dictionary of keys as sources
+ # and a list/tuple of files that are its dependencies.
+ obj_deps = build_info.get('obj_deps', dict())
+ if not isinstance(obj_deps, dict):
+ raise DistutilsSetupError(
+ "in 'libraries' option (library '%s'), "
+ "'obj_deps' must be a dictionary of "
+ "type 'source: list'" % lib_name)
+ dependencies = []
+
+ # Get the global dependencies that are specified by the '' key.
+ # These will go into every source's dependency list.
+ global_deps = obj_deps.get('', list())
+ if not isinstance(global_deps, (list, tuple)):
+ raise DistutilsSetupError(
+ "in 'libraries' option (library '%s'), "
+ "'obj_deps' must be a dictionary of "
+ "type 'source: list'" % lib_name)
+
+ # Build the list to be used by newer_pairwise_group
+ # each source will be auto-added to its dependencies.
+ for source in sources:
+ src_deps = [source]
+ src_deps.extend(global_deps)
+ extra_deps = obj_deps.get(source, list())
+ if not isinstance(extra_deps, (list, tuple)):
+ raise DistutilsSetupError(
+ "in 'libraries' option (library '%s'), "
+ "'obj_deps' must be a dictionary of "
+ "type 'source: list'" % lib_name)
+ src_deps.extend(extra_deps)
+ dependencies.append(src_deps)
+
+ expected_objects = self.compiler.object_filenames(
+ sources,
+ output_dir=self.build_temp
+ )
+
+ if newer_pairwise_group(dependencies, expected_objects) != ([], []):
+ # First, compile the source code to object files in the library
+ # directory. (This should probably change to putting object
+ # files in a temporary build directory.)
+ macros = build_info.get('macros')
+ include_dirs = build_info.get('include_dirs')
+ cflags = build_info.get('cflags')
+ objects = self.compiler.compile(
+ sources,
+ output_dir=self.build_temp,
+ macros=macros,
+ include_dirs=include_dirs,
+ extra_postargs=cflags,
+ debug=self.debug
+ )
+
+ # Now "link" the object files together into a static library.
+ # (On Unix at least, this isn't really linking -- it just
+ # builds an archive. Whatever.)
+ self.compiler.create_static_lib(
+ expected_objects,
+ lib_name,
+ output_dir=self.build_clib,
+ debug=self.debug
+ )
diff --git a/setuptools/command/build_ext.py b/setuptools/command/build_ext.py
new file mode 100644
index 0000000..ea97b37
--- /dev/null
+++ b/setuptools/command/build_ext.py
@@ -0,0 +1,331 @@
+import os
+import sys
+import itertools
+import imp
+from distutils.command.build_ext import build_ext as _du_build_ext
+from distutils.file_util import copy_file
+from distutils.ccompiler import new_compiler
+from distutils.sysconfig import customize_compiler, get_config_var
+from distutils.errors import DistutilsError
+from distutils import log
+
+from setuptools.extension import Library
+from setuptools.extern import six
+
+try:
+ # Attempt to use Cython for building extensions, if available
+ from Cython.Distutils.build_ext import build_ext as _build_ext
+ # Additionally, assert that the compiler module will load
+ # also. Ref #1229.
+ __import__('Cython.Compiler.Main')
+except ImportError:
+ _build_ext = _du_build_ext
+
+# make sure _config_vars is initialized
+get_config_var("LDSHARED")
+from distutils.sysconfig import _config_vars as _CONFIG_VARS
+
+
+def _customize_compiler_for_shlib(compiler):
+ if sys.platform == "darwin":
+ # building .dylib requires additional compiler flags on OSX; here we
+ # temporarily substitute the pyconfig.h variables so that distutils'
+ # 'customize_compiler' uses them before we build the shared libraries.
+ tmp = _CONFIG_VARS.copy()
+ try:
+ # XXX Help! I don't have any idea whether these are right...
+ _CONFIG_VARS['LDSHARED'] = (
+ "gcc -Wl,-x -dynamiclib -undefined dynamic_lookup")
+ _CONFIG_VARS['CCSHARED'] = " -dynamiclib"
+ _CONFIG_VARS['SO'] = ".dylib"
+ customize_compiler(compiler)
+ finally:
+ _CONFIG_VARS.clear()
+ _CONFIG_VARS.update(tmp)
+ else:
+ customize_compiler(compiler)
+
+
+have_rtld = False
+use_stubs = False
+libtype = 'shared'
+
+if sys.platform == "darwin":
+ use_stubs = True
+elif os.name != 'nt':
+ try:
+ import dl
+ use_stubs = have_rtld = hasattr(dl, 'RTLD_NOW')
+ except ImportError:
+ pass
+
+if_dl = lambda s: s if have_rtld else ''
+
+
+def get_abi3_suffix():
+ """Return the file extension for an abi3-compliant Extension()"""
+ for suffix, _, _ in (s for s in imp.get_suffixes() if s[2] == imp.C_EXTENSION):
+ if '.abi3' in suffix: # Unix
+ return suffix
+ elif suffix == '.pyd': # Windows
+ return suffix
+
+
+class build_ext(_build_ext):
+ def run(self):
+ """Build extensions in build directory, then copy if --inplace"""
+ old_inplace, self.inplace = self.inplace, 0
+ _build_ext.run(self)
+ self.inplace = old_inplace
+ if old_inplace:
+ self.copy_extensions_to_source()
+
+ def copy_extensions_to_source(self):
+ build_py = self.get_finalized_command('build_py')
+ for ext in self.extensions:
+ fullname = self.get_ext_fullname(ext.name)
+ filename = self.get_ext_filename(fullname)
+ modpath = fullname.split('.')
+ package = '.'.join(modpath[:-1])
+ package_dir = build_py.get_package_dir(package)
+ dest_filename = os.path.join(package_dir,
+ os.path.basename(filename))
+ src_filename = os.path.join(self.build_lib, filename)
+
+ # Always copy, even if source is older than destination, to ensure
+ # that the right extensions for the current Python/platform are
+ # used.
+ copy_file(
+ src_filename, dest_filename, verbose=self.verbose,
+ dry_run=self.dry_run
+ )
+ if ext._needs_stub:
+ self.write_stub(package_dir or os.curdir, ext, True)
+
+ def get_ext_filename(self, fullname):
+ filename = _build_ext.get_ext_filename(self, fullname)
+ if fullname in self.ext_map:
+ ext = self.ext_map[fullname]
+ use_abi3 = (
+ six.PY3
+ and getattr(ext, 'py_limited_api')
+ and get_abi3_suffix()
+ )
+ if use_abi3:
+ so_ext = _get_config_var_837('EXT_SUFFIX')
+ filename = filename[:-len(so_ext)]
+ filename = filename + get_abi3_suffix()
+ if isinstance(ext, Library):
+ fn, ext = os.path.splitext(filename)
+ return self.shlib_compiler.library_filename(fn, libtype)
+ elif use_stubs and ext._links_to_dynamic:
+ d, fn = os.path.split(filename)
+ return os.path.join(d, 'dl-' + fn)
+ return filename
+
+ def initialize_options(self):
+ _build_ext.initialize_options(self)
+ self.shlib_compiler = None
+ self.shlibs = []
+ self.ext_map = {}
+
+ def finalize_options(self):
+ _build_ext.finalize_options(self)
+ self.extensions = self.extensions or []
+ self.check_extensions_list(self.extensions)
+ self.shlibs = [ext for ext in self.extensions
+ if isinstance(ext, Library)]
+ if self.shlibs:
+ self.setup_shlib_compiler()
+ for ext in self.extensions:
+ ext._full_name = self.get_ext_fullname(ext.name)
+ for ext in self.extensions:
+ fullname = ext._full_name
+ self.ext_map[fullname] = ext
+
+ # distutils 3.1 will also ask for module names
+ # XXX what to do with conflicts?
+ self.ext_map[fullname.split('.')[-1]] = ext
+
+ ltd = self.shlibs and self.links_to_dynamic(ext) or False
+ ns = ltd and use_stubs and not isinstance(ext, Library)
+ ext._links_to_dynamic = ltd
+ ext._needs_stub = ns
+ filename = ext._file_name = self.get_ext_filename(fullname)
+ libdir = os.path.dirname(os.path.join(self.build_lib, filename))
+ if ltd and libdir not in ext.library_dirs:
+ ext.library_dirs.append(libdir)
+ if ltd and use_stubs and os.curdir not in ext.runtime_library_dirs:
+ ext.runtime_library_dirs.append(os.curdir)
+
+ def setup_shlib_compiler(self):
+ compiler = self.shlib_compiler = new_compiler(
+ compiler=self.compiler, dry_run=self.dry_run, force=self.force
+ )
+ _customize_compiler_for_shlib(compiler)
+
+ if self.include_dirs is not None:
+ compiler.set_include_dirs(self.include_dirs)
+ if self.define is not None:
+ # 'define' option is a list of (name,value) tuples
+ for (name, value) in self.define:
+ compiler.define_macro(name, value)
+ if self.undef is not None:
+ for macro in self.undef:
+ compiler.undefine_macro(macro)
+ if self.libraries is not None:
+ compiler.set_libraries(self.libraries)
+ if self.library_dirs is not None:
+ compiler.set_library_dirs(self.library_dirs)
+ if self.rpath is not None:
+ compiler.set_runtime_library_dirs(self.rpath)
+ if self.link_objects is not None:
+ compiler.set_link_objects(self.link_objects)
+
+ # hack so distutils' build_extension() builds a library instead
+ compiler.link_shared_object = link_shared_object.__get__(compiler)
+
+ def get_export_symbols(self, ext):
+ if isinstance(ext, Library):
+ return ext.export_symbols
+ return _build_ext.get_export_symbols(self, ext)
+
+ def build_extension(self, ext):
+ ext._convert_pyx_sources_to_lang()
+ _compiler = self.compiler
+ try:
+ if isinstance(ext, Library):
+ self.compiler = self.shlib_compiler
+ _build_ext.build_extension(self, ext)
+ if ext._needs_stub:
+ cmd = self.get_finalized_command('build_py').build_lib
+ self.write_stub(cmd, ext)
+ finally:
+ self.compiler = _compiler
+
+ def links_to_dynamic(self, ext):
+ """Return true if 'ext' links to a dynamic lib in the same package"""
+ # XXX this should check to ensure the lib is actually being built
+ # XXX as dynamic, and not just using a locally-found version or a
+ # XXX static-compiled version
+ libnames = dict.fromkeys([lib._full_name for lib in self.shlibs])
+ pkg = '.'.join(ext._full_name.split('.')[:-1] + [''])
+ return any(pkg + libname in libnames for libname in ext.libraries)
+
+ def get_outputs(self):
+ return _build_ext.get_outputs(self) + self.__get_stubs_outputs()
+
+ def __get_stubs_outputs(self):
+ # assemble the base name for each extension that needs a stub
+ ns_ext_bases = (
+ os.path.join(self.build_lib, *ext._full_name.split('.'))
+ for ext in self.extensions
+ if ext._needs_stub
+ )
+ # pair each base with the extension
+ pairs = itertools.product(ns_ext_bases, self.__get_output_extensions())
+ return list(base + fnext for base, fnext in pairs)
+
+ def __get_output_extensions(self):
+ yield '.py'
+ yield '.pyc'
+ if self.get_finalized_command('build_py').optimize:
+ yield '.pyo'
+
+ def write_stub(self, output_dir, ext, compile=False):
+ log.info("writing stub loader for %s to %s", ext._full_name,
+ output_dir)
+ stub_file = (os.path.join(output_dir, *ext._full_name.split('.')) +
+ '.py')
+ if compile and os.path.exists(stub_file):
+ raise DistutilsError(stub_file + " already exists! Please delete.")
+ if not self.dry_run:
+ f = open(stub_file, 'w')
+ f.write(
+ '\n'.join([
+ "def __bootstrap__():",
+ " global __bootstrap__, __file__, __loader__",
+ " import sys, os, pkg_resources, imp" + if_dl(", dl"),
+ " __file__ = pkg_resources.resource_filename"
+ "(__name__,%r)"
+ % os.path.basename(ext._file_name),
+ " del __bootstrap__",
+ " if '__loader__' in globals():",
+ " del __loader__",
+ if_dl(" old_flags = sys.getdlopenflags()"),
+ " old_dir = os.getcwd()",
+ " try:",
+ " os.chdir(os.path.dirname(__file__))",
+ if_dl(" sys.setdlopenflags(dl.RTLD_NOW)"),
+ " imp.load_dynamic(__name__,__file__)",
+ " finally:",
+ if_dl(" sys.setdlopenflags(old_flags)"),
+ " os.chdir(old_dir)",
+ "__bootstrap__()",
+ "" # terminal \n
+ ])
+ )
+ f.close()
+ if compile:
+ from distutils.util import byte_compile
+
+ byte_compile([stub_file], optimize=0,
+ force=True, dry_run=self.dry_run)
+ optimize = self.get_finalized_command('install_lib').optimize
+ if optimize > 0:
+ byte_compile([stub_file], optimize=optimize,
+ force=True, dry_run=self.dry_run)
+ if os.path.exists(stub_file) and not self.dry_run:
+ os.unlink(stub_file)
+
+
+if use_stubs or os.name == 'nt':
+ # Build shared libraries
+ #
+ def link_shared_object(
+ self, objects, output_libname, output_dir=None, libraries=None,
+ library_dirs=None, runtime_library_dirs=None, export_symbols=None,
+ debug=0, extra_preargs=None, extra_postargs=None, build_temp=None,
+ target_lang=None):
+ self.link(
+ self.SHARED_LIBRARY, objects, output_libname,
+ output_dir, libraries, library_dirs, runtime_library_dirs,
+ export_symbols, debug, extra_preargs, extra_postargs,
+ build_temp, target_lang
+ )
+else:
+ # Build static libraries everywhere else
+ libtype = 'static'
+
+ def link_shared_object(
+ self, objects, output_libname, output_dir=None, libraries=None,
+ library_dirs=None, runtime_library_dirs=None, export_symbols=None,
+ debug=0, extra_preargs=None, extra_postargs=None, build_temp=None,
+ target_lang=None):
+ # XXX we need to either disallow these attrs on Library instances,
+ # or warn/abort here if set, or something...
+ # libraries=None, library_dirs=None, runtime_library_dirs=None,
+ # export_symbols=None, extra_preargs=None, extra_postargs=None,
+ # build_temp=None
+
+ assert output_dir is None # distutils build_ext doesn't pass this
+ output_dir, filename = os.path.split(output_libname)
+ basename, ext = os.path.splitext(filename)
+ if self.library_filename("x").startswith('lib'):
+ # strip 'lib' prefix; this is kludgy if some platform uses
+ # a different prefix
+ basename = basename[3:]
+
+ self.create_static_lib(
+ objects, basename, output_dir, debug, target_lang
+ )
+
+
+def _get_config_var_837(name):
+ """
+ In https://github.com/pypa/setuptools/pull/837, we discovered
+ Python 3.3.0 exposes the extension suffix under the name 'SO'.
+ """
+ if sys.version_info < (3, 3, 1):
+ name = 'SO'
+ return get_config_var(name)
diff --git a/setuptools/command/build_py.py b/setuptools/command/build_py.py
new file mode 100644
index 0000000..b0314fd
--- /dev/null
+++ b/setuptools/command/build_py.py
@@ -0,0 +1,270 @@
+from glob import glob
+from distutils.util import convert_path
+import distutils.command.build_py as orig
+import os
+import fnmatch
+import textwrap
+import io
+import distutils.errors
+import itertools
+
+from setuptools.extern import six
+from setuptools.extern.six.moves import map, filter, filterfalse
+
+try:
+ from setuptools.lib2to3_ex import Mixin2to3
+except ImportError:
+
+ class Mixin2to3:
+ def run_2to3(self, files, doctests=True):
+ "do nothing"
+
+
+class build_py(orig.build_py, Mixin2to3):
+ """Enhanced 'build_py' command that includes data files with packages
+
+ The data files are specified via a 'package_data' argument to 'setup()'.
+ See 'setuptools.dist.Distribution' for more details.
+
+ Also, this version of the 'build_py' command allows you to specify both
+ 'py_modules' and 'packages' in the same setup operation.
+ """
+
+ def finalize_options(self):
+ orig.build_py.finalize_options(self)
+ self.package_data = self.distribution.package_data
+ self.exclude_package_data = (self.distribution.exclude_package_data or
+ {})
+ if 'data_files' in self.__dict__:
+ del self.__dict__['data_files']
+ self.__updated_files = []
+ self.__doctests_2to3 = []
+
+ def run(self):
+ """Build modules, packages, and copy data files to build directory"""
+ if not self.py_modules and not self.packages:
+ return
+
+ if self.py_modules:
+ self.build_modules()
+
+ if self.packages:
+ self.build_packages()
+ self.build_package_data()
+
+ self.run_2to3(self.__updated_files, False)
+ self.run_2to3(self.__updated_files, True)
+ self.run_2to3(self.__doctests_2to3, True)
+
+ # Only compile actual .py files, using our base class' idea of what our
+ # output files are.
+ self.byte_compile(orig.build_py.get_outputs(self, include_bytecode=0))
+
+ def __getattr__(self, attr):
+ "lazily compute data files"
+ if attr == 'data_files':
+ self.data_files = self._get_data_files()
+ return self.data_files
+ return orig.build_py.__getattr__(self, attr)
+
+ def build_module(self, module, module_file, package):
+ if six.PY2 and isinstance(package, six.string_types):
+ # avoid errors on Python 2 when unicode is passed (#190)
+ package = package.split('.')
+ outfile, copied = orig.build_py.build_module(self, module, module_file,
+ package)
+ if copied:
+ self.__updated_files.append(outfile)
+ return outfile, copied
+
+ def _get_data_files(self):
+ """Generate list of '(package,src_dir,build_dir,filenames)' tuples"""
+ self.analyze_manifest()
+ return list(map(self._get_pkg_data_files, self.packages or ()))
+
+ def _get_pkg_data_files(self, package):
+ # Locate package source directory
+ src_dir = self.get_package_dir(package)
+
+ # Compute package build directory
+ build_dir = os.path.join(*([self.build_lib] + package.split('.')))
+
+ # Strip directory from globbed filenames
+ filenames = [
+ os.path.relpath(file, src_dir)
+ for file in self.find_data_files(package, src_dir)
+ ]
+ return package, src_dir, build_dir, filenames
+
+ def find_data_files(self, package, src_dir):
+ """Return filenames for package's data files in 'src_dir'"""
+ patterns = self._get_platform_patterns(
+ self.package_data,
+ package,
+ src_dir,
+ )
+ globs_expanded = map(glob, patterns)
+ # flatten the expanded globs into an iterable of matches
+ globs_matches = itertools.chain.from_iterable(globs_expanded)
+ glob_files = filter(os.path.isfile, globs_matches)
+ files = itertools.chain(
+ self.manifest_files.get(package, []),
+ glob_files,
+ )
+ return self.exclude_data_files(package, src_dir, files)
+
+ def build_package_data(self):
+ """Copy data files into build directory"""
+ for package, src_dir, build_dir, filenames in self.data_files:
+ for filename in filenames:
+ target = os.path.join(build_dir, filename)
+ self.mkpath(os.path.dirname(target))
+ srcfile = os.path.join(src_dir, filename)
+ outf, copied = self.copy_file(srcfile, target)
+ srcfile = os.path.abspath(srcfile)
+ if (copied and
+ srcfile in self.distribution.convert_2to3_doctests):
+ self.__doctests_2to3.append(outf)
+
+ def analyze_manifest(self):
+ self.manifest_files = mf = {}
+ if not self.distribution.include_package_data:
+ return
+ src_dirs = {}
+ for package in self.packages or ():
+ # Locate package source directory
+ src_dirs[assert_relative(self.get_package_dir(package))] = package
+
+ self.run_command('egg_info')
+ ei_cmd = self.get_finalized_command('egg_info')
+ for path in ei_cmd.filelist.files:
+ d, f = os.path.split(assert_relative(path))
+ prev = None
+ oldf = f
+ while d and d != prev and d not in src_dirs:
+ prev = d
+ d, df = os.path.split(d)
+ f = os.path.join(df, f)
+ if d in src_dirs:
+ if path.endswith('.py') and f == oldf:
+ continue # it's a module, not data
+ mf.setdefault(src_dirs[d], []).append(path)
+
+ def get_data_files(self):
+ pass # Lazily compute data files in _get_data_files() function.
+
+ def check_package(self, package, package_dir):
+ """Check namespace packages' __init__ for declare_namespace"""
+ try:
+ return self.packages_checked[package]
+ except KeyError:
+ pass
+
+ init_py = orig.build_py.check_package(self, package, package_dir)
+ self.packages_checked[package] = init_py
+
+ if not init_py or not self.distribution.namespace_packages:
+ return init_py
+
+ for pkg in self.distribution.namespace_packages:
+ if pkg == package or pkg.startswith(package + '.'):
+ break
+ else:
+ return init_py
+
+ with io.open(init_py, 'rb') as f:
+ contents = f.read()
+ if b'declare_namespace' not in contents:
+ raise distutils.errors.DistutilsError(
+ "Namespace package problem: %s is a namespace package, but "
+ "its\n__init__.py does not call declare_namespace()! Please "
+ 'fix it.\n(See the setuptools manual under '
+ '"Namespace Packages" for details.)\n"' % (package,)
+ )
+ return init_py
+
+ def initialize_options(self):
+ self.packages_checked = {}
+ orig.build_py.initialize_options(self)
+
+ def get_package_dir(self, package):
+ res = orig.build_py.get_package_dir(self, package)
+ if self.distribution.src_root is not None:
+ return os.path.join(self.distribution.src_root, res)
+ return res
+
+ def exclude_data_files(self, package, src_dir, files):
+ """Filter filenames for package's data files in 'src_dir'"""
+ files = list(files)
+ patterns = self._get_platform_patterns(
+ self.exclude_package_data,
+ package,
+ src_dir,
+ )
+ match_groups = (
+ fnmatch.filter(files, pattern)
+ for pattern in patterns
+ )
+ # flatten the groups of matches into an iterable of matches
+ matches = itertools.chain.from_iterable(match_groups)
+ bad = set(matches)
+ keepers = (
+ fn
+ for fn in files
+ if fn not in bad
+ )
+ # ditch dupes
+ return list(_unique_everseen(keepers))
+
+ @staticmethod
+ def _get_platform_patterns(spec, package, src_dir):
+ """
+ yield platform-specific path patterns (suitable for glob
+ or fn_match) from a glob-based spec (such as
+ self.package_data or self.exclude_package_data)
+ matching package in src_dir.
+ """
+ raw_patterns = itertools.chain(
+ spec.get('', []),
+ spec.get(package, []),
+ )
+ return (
+ # Each pattern has to be converted to a platform-specific path
+ os.path.join(src_dir, convert_path(pattern))
+ for pattern in raw_patterns
+ )
+
+
+# from Python docs
+def _unique_everseen(iterable, key=None):
+ "List unique elements, preserving order. Remember all elements ever seen."
+ # unique_everseen('AAAABBBCCDAABBB') --> A B C D
+ # unique_everseen('ABBCcAD', str.lower) --> A B C D
+ seen = set()
+ seen_add = seen.add
+ if key is None:
+ for element in filterfalse(seen.__contains__, iterable):
+ seen_add(element)
+ yield element
+ else:
+ for element in iterable:
+ k = key(element)
+ if k not in seen:
+ seen_add(k)
+ yield element
+
+
+def assert_relative(path):
+ if not os.path.isabs(path):
+ return path
+ from distutils.errors import DistutilsSetupError
+
+ msg = textwrap.dedent("""
+ Error: setup script specifies an absolute path:
+
+ %s
+
+ setup() arguments must *always* be /-separated paths relative to the
+ setup.py directory, *never* absolute paths.
+ """).lstrip() % path
+ raise DistutilsSetupError(msg)
diff --git a/setuptools/command/develop.py b/setuptools/command/develop.py
new file mode 100755
index 0000000..959c932
--- /dev/null
+++ b/setuptools/command/develop.py
@@ -0,0 +1,216 @@
+from distutils.util import convert_path
+from distutils import log
+from distutils.errors import DistutilsError, DistutilsOptionError
+import os
+import glob
+import io
+
+from setuptools.extern import six
+
+from pkg_resources import Distribution, PathMetadata, normalize_path
+from setuptools.command.easy_install import easy_install
+from setuptools import namespaces
+import setuptools
+
+
+class develop(namespaces.DevelopInstaller, easy_install):
+ """Set up package for development"""
+
+ description = "install package in 'development mode'"
+
+ user_options = easy_install.user_options + [
+ ("uninstall", "u", "Uninstall this source package"),
+ ("egg-path=", None, "Set the path to be used in the .egg-link file"),
+ ]
+
+ boolean_options = easy_install.boolean_options + ['uninstall']
+
+ command_consumes_arguments = False # override base
+
+ def run(self):
+ if self.uninstall:
+ self.multi_version = True
+ self.uninstall_link()
+ self.uninstall_namespaces()
+ else:
+ self.install_for_development()
+ self.warn_deprecated_options()
+
+ def initialize_options(self):
+ self.uninstall = None
+ self.egg_path = None
+ easy_install.initialize_options(self)
+ self.setup_path = None
+ self.always_copy_from = '.' # always copy eggs installed in curdir
+
+ def finalize_options(self):
+ ei = self.get_finalized_command("egg_info")
+ if ei.broken_egg_info:
+ template = "Please rename %r to %r before using 'develop'"
+ args = ei.egg_info, ei.broken_egg_info
+ raise DistutilsError(template % args)
+ self.args = [ei.egg_name]
+
+ easy_install.finalize_options(self)
+ self.expand_basedirs()
+ self.expand_dirs()
+ # pick up setup-dir .egg files only: no .egg-info
+ self.package_index.scan(glob.glob('*.egg'))
+
+ egg_link_fn = ei.egg_name + '.egg-link'
+ self.egg_link = os.path.join(self.install_dir, egg_link_fn)
+ self.egg_base = ei.egg_base
+ if self.egg_path is None:
+ self.egg_path = os.path.abspath(ei.egg_base)
+
+ target = normalize_path(self.egg_base)
+ egg_path = normalize_path(os.path.join(self.install_dir,
+ self.egg_path))
+ if egg_path != target:
+ raise DistutilsOptionError(
+ "--egg-path must be a relative path from the install"
+ " directory to " + target
+ )
+
+ # Make a distribution for the package's source
+ self.dist = Distribution(
+ target,
+ PathMetadata(target, os.path.abspath(ei.egg_info)),
+ project_name=ei.egg_name
+ )
+
+ self.setup_path = self._resolve_setup_path(
+ self.egg_base,
+ self.install_dir,
+ self.egg_path,
+ )
+
+ @staticmethod
+ def _resolve_setup_path(egg_base, install_dir, egg_path):
+ """
+ Generate a path from egg_base back to '.' where the
+ setup script resides and ensure that path points to the
+ setup path from $install_dir/$egg_path.
+ """
+ path_to_setup = egg_base.replace(os.sep, '/').rstrip('/')
+ if path_to_setup != os.curdir:
+ path_to_setup = '../' * (path_to_setup.count('/') + 1)
+ resolved = normalize_path(
+ os.path.join(install_dir, egg_path, path_to_setup)
+ )
+ if resolved != normalize_path(os.curdir):
+ raise DistutilsOptionError(
+ "Can't get a consistent path to setup script from"
+ " installation directory", resolved, normalize_path(os.curdir))
+ return path_to_setup
+
+ def install_for_development(self):
+ if six.PY3 and getattr(self.distribution, 'use_2to3', False):
+ # If we run 2to3 we can not do this inplace:
+
+ # Ensure metadata is up-to-date
+ self.reinitialize_command('build_py', inplace=0)
+ self.run_command('build_py')
+ bpy_cmd = self.get_finalized_command("build_py")
+ build_path = normalize_path(bpy_cmd.build_lib)
+
+ # Build extensions
+ self.reinitialize_command('egg_info', egg_base=build_path)
+ self.run_command('egg_info')
+
+ self.reinitialize_command('build_ext', inplace=0)
+ self.run_command('build_ext')
+
+ # Fixup egg-link and easy-install.pth
+ ei_cmd = self.get_finalized_command("egg_info")
+ self.egg_path = build_path
+ self.dist.location = build_path
+ # XXX
+ self.dist._provider = PathMetadata(build_path, ei_cmd.egg_info)
+ else:
+ # Without 2to3 inplace works fine:
+ self.run_command('egg_info')
+
+ # Build extensions in-place
+ self.reinitialize_command('build_ext', inplace=1)
+ self.run_command('build_ext')
+
+ self.install_site_py() # ensure that target dir is site-safe
+ if setuptools.bootstrap_install_from:
+ self.easy_install(setuptools.bootstrap_install_from)
+ setuptools.bootstrap_install_from = None
+
+ self.install_namespaces()
+
+ # create an .egg-link in the installation dir, pointing to our egg
+ log.info("Creating %s (link to %s)", self.egg_link, self.egg_base)
+ if not self.dry_run:
+ with open(self.egg_link, "w") as f:
+ f.write(self.egg_path + "\n" + self.setup_path)
+ # postprocess the installed distro, fixing up .pth, installing scripts,
+ # and handling requirements
+ self.process_distribution(None, self.dist, not self.no_deps)
+
+ def uninstall_link(self):
+ if os.path.exists(self.egg_link):
+ log.info("Removing %s (link to %s)", self.egg_link, self.egg_base)
+ egg_link_file = open(self.egg_link)
+ contents = [line.rstrip() for line in egg_link_file]
+ egg_link_file.close()
+ if contents not in ([self.egg_path],
+ [self.egg_path, self.setup_path]):
+ log.warn("Link points to %s: uninstall aborted", contents)
+ return
+ if not self.dry_run:
+ os.unlink(self.egg_link)
+ if not self.dry_run:
+ self.update_pth(self.dist) # remove any .pth link to us
+ if self.distribution.scripts:
+ # XXX should also check for entry point scripts!
+ log.warn("Note: you must uninstall or replace scripts manually!")
+
+ def install_egg_scripts(self, dist):
+ if dist is not self.dist:
+ # Installing a dependency, so fall back to normal behavior
+ return easy_install.install_egg_scripts(self, dist)
+
+ # create wrapper scripts in the script dir, pointing to dist.scripts
+
+ # new-style...
+ self.install_wrapper_scripts(dist)
+
+ # ...and old-style
+ for script_name in self.distribution.scripts or []:
+ script_path = os.path.abspath(convert_path(script_name))
+ script_name = os.path.basename(script_path)
+ with io.open(script_path) as strm:
+ script_text = strm.read()
+ self.install_script(dist, script_name, script_text, script_path)
+
+ def install_wrapper_scripts(self, dist):
+ dist = VersionlessRequirement(dist)
+ return easy_install.install_wrapper_scripts(self, dist)
+
+
+class VersionlessRequirement(object):
+ """
+ Adapt a pkg_resources.Distribution to simply return the project
+ name as the 'requirement' so that scripts will work across
+ multiple versions.
+
+ >>> dist = Distribution(project_name='foo', version='1.0')
+ >>> str(dist.as_requirement())
+ 'foo==1.0'
+ >>> adapted_dist = VersionlessRequirement(dist)
+ >>> str(adapted_dist.as_requirement())
+ 'foo'
+ """
+
+ def __init__(self, dist):
+ self.__dist = dist
+
+ def __getattr__(self, name):
+ return getattr(self.__dist, name)
+
+ def as_requirement(self):
+ return self.project_name
diff --git a/setuptools/command/dist_info.py b/setuptools/command/dist_info.py
new file mode 100644
index 0000000..c45258f
--- /dev/null
+++ b/setuptools/command/dist_info.py
@@ -0,0 +1,36 @@
+"""
+Create a dist_info directory
+As defined in the wheel specification
+"""
+
+import os
+
+from distutils.core import Command
+from distutils import log
+
+
+class dist_info(Command):
+
+ description = 'create a .dist-info directory'
+
+ user_options = [
+ ('egg-base=', 'e', "directory containing .egg-info directories"
+ " (default: top of the source tree)"),
+ ]
+
+ def initialize_options(self):
+ self.egg_base = None
+
+ def finalize_options(self):
+ pass
+
+ def run(self):
+ egg_info = self.get_finalized_command('egg_info')
+ egg_info.egg_base = self.egg_base
+ egg_info.finalize_options()
+ egg_info.run()
+ dist_info_dir = egg_info.egg_info[:-len('.egg-info')] + '.dist-info'
+ log.info("creating '{}'".format(os.path.abspath(dist_info_dir)))
+
+ bdist_wheel = self.get_finalized_command('bdist_wheel')
+ bdist_wheel.egg2dist(egg_info.egg_info, dist_info_dir)
diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py
new file mode 100755
index 0000000..85ee40f
--- /dev/null
+++ b/setuptools/command/easy_install.py
@@ -0,0 +1,2334 @@
+#!/usr/bin/env python
+"""
+Easy Install
+------------
+
+A tool for doing automatic download/extract/build of distutils-based Python
+packages. For detailed documentation, see the accompanying EasyInstall.txt
+file, or visit the `EasyInstall home page`__.
+
+__ https://setuptools.readthedocs.io/en/latest/easy_install.html
+
+"""
+
+from glob import glob
+from distutils.util import get_platform
+from distutils.util import convert_path, subst_vars
+from distutils.errors import (
+ DistutilsArgError, DistutilsOptionError,
+ DistutilsError, DistutilsPlatformError,
+)
+from distutils.command.install import INSTALL_SCHEMES, SCHEME_KEYS
+from distutils import log, dir_util
+from distutils.command.build_scripts import first_line_re
+from distutils.spawn import find_executable
+import sys
+import os
+import zipimport
+import shutil
+import tempfile
+import zipfile
+import re
+import stat
+import random
+import textwrap
+import warnings
+import site
+import struct
+import contextlib
+import subprocess
+import shlex
+import io
+
+from setuptools.extern import six
+from setuptools.extern.six.moves import configparser, map
+
+from setuptools import Command
+from setuptools.sandbox import run_setup
+from setuptools.py31compat import get_path, get_config_vars
+from setuptools.py27compat import rmtree_safe
+from setuptools.command import setopt
+from setuptools.archive_util import unpack_archive
+from setuptools.package_index import (
+ PackageIndex, parse_requirement_arg, URL_SCHEME,
+)
+from setuptools.command import bdist_egg, egg_info
+from setuptools.wheel import Wheel
+from pkg_resources import (
+ yield_lines, normalize_path, resource_string, ensure_directory,
+ get_distribution, find_distributions, Environment, Requirement,
+ Distribution, PathMetadata, EggMetadata, WorkingSet, DistributionNotFound,
+ VersionConflict, DEVELOP_DIST,
+)
+import pkg_resources.py31compat
+
+# Turn on PEP440Warnings
+warnings.filterwarnings("default", category=pkg_resources.PEP440Warning)
+
+__all__ = [
+ 'samefile', 'easy_install', 'PthDistributions', 'extract_wininst_cfg',
+ 'main', 'get_exe_prefixes',
+]
+
+
+def is_64bit():
+ return struct.calcsize("P") == 8
+
+
+def samefile(p1, p2):
+ """
+ Determine if two paths reference the same file.
+
+ Augments os.path.samefile to work on Windows and
+ suppresses errors if the path doesn't exist.
+ """
+ both_exist = os.path.exists(p1) and os.path.exists(p2)
+ use_samefile = hasattr(os.path, 'samefile') and both_exist
+ if use_samefile:
+ return os.path.samefile(p1, p2)
+ norm_p1 = os.path.normpath(os.path.normcase(p1))
+ norm_p2 = os.path.normpath(os.path.normcase(p2))
+ return norm_p1 == norm_p2
+
+
+if six.PY2:
+
+ def _to_ascii(s):
+ return s
+
+ def isascii(s):
+ try:
+ six.text_type(s, 'ascii')
+ return True
+ except UnicodeError:
+ return False
+else:
+
+ def _to_ascii(s):
+ return s.encode('ascii')
+
+ def isascii(s):
+ try:
+ s.encode('ascii')
+ return True
+ except UnicodeError:
+ return False
+
+
+_one_liner = lambda text: textwrap.dedent(text).strip().replace('\n', '; ')
+
+
+class easy_install(Command):
+ """Manage a download/build/install process"""
+ description = "Find/get/install Python packages"
+ command_consumes_arguments = True
+
+ user_options = [
+ ('prefix=', None, "installation prefix"),
+ ("zip-ok", "z", "install package as a zipfile"),
+ ("multi-version", "m", "make apps have to require() a version"),
+ ("upgrade", "U", "force upgrade (searches PyPI for latest versions)"),
+ ("install-dir=", "d", "install package to DIR"),
+ ("script-dir=", "s", "install scripts to DIR"),
+ ("exclude-scripts", "x", "Don't install scripts"),
+ ("always-copy", "a", "Copy all needed packages to install dir"),
+ ("index-url=", "i", "base URL of Python Package Index"),
+ ("find-links=", "f", "additional URL(s) to search for packages"),
+ ("build-directory=", "b",
+ "download/extract/build in DIR; keep the results"),
+ ('optimize=', 'O',
+ "also compile with optimization: -O1 for \"python -O\", "
+ "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
+ ('record=', None,
+ "filename in which to record list of installed files"),
+ ('always-unzip', 'Z', "don't install as a zipfile, no matter what"),
+ ('site-dirs=', 'S', "list of directories where .pth files work"),
+ ('editable', 'e', "Install specified packages in editable form"),
+ ('no-deps', 'N', "don't install dependencies"),
+ ('allow-hosts=', 'H', "pattern(s) that hostnames must match"),
+ ('local-snapshots-ok', 'l',
+ "allow building eggs from local checkouts"),
+ ('version', None, "print version information and exit"),
+ ('no-find-links', None,
+ "Don't load find-links defined in packages being installed")
+ ]
+ boolean_options = [
+ 'zip-ok', 'multi-version', 'exclude-scripts', 'upgrade', 'always-copy',
+ 'editable',
+ 'no-deps', 'local-snapshots-ok', 'version'
+ ]
+
+ if site.ENABLE_USER_SITE:
+ help_msg = "install in user site-package '%s'" % site.USER_SITE
+ user_options.append(('user', None, help_msg))
+ boolean_options.append('user')
+
+ negative_opt = {'always-unzip': 'zip-ok'}
+ create_index = PackageIndex
+
+ def initialize_options(self):
+ # the --user option seems to be an opt-in one,
+ # so the default should be False.
+ self.user = 0
+ self.zip_ok = self.local_snapshots_ok = None
+ self.install_dir = self.script_dir = self.exclude_scripts = None
+ self.index_url = None
+ self.find_links = None
+ self.build_directory = None
+ self.args = None
+ self.optimize = self.record = None
+ self.upgrade = self.always_copy = self.multi_version = None
+ self.editable = self.no_deps = self.allow_hosts = None
+ self.root = self.prefix = self.no_report = None
+ self.version = None
+ self.install_purelib = None # for pure module distributions
+ self.install_platlib = None # non-pure (dists w/ extensions)
+ self.install_headers = None # for C/C++ headers
+ self.install_lib = None # set to either purelib or platlib
+ self.install_scripts = None
+ self.install_data = None
+ self.install_base = None
+ self.install_platbase = None
+ if site.ENABLE_USER_SITE:
+ self.install_userbase = site.USER_BASE
+ self.install_usersite = site.USER_SITE
+ else:
+ self.install_userbase = None
+ self.install_usersite = None
+ self.no_find_links = None
+
+ # Options not specifiable via command line
+ self.package_index = None
+ self.pth_file = self.always_copy_from = None
+ self.site_dirs = None
+ self.installed_projects = {}
+ self.sitepy_installed = False
+ # Always read easy_install options, even if we are subclassed, or have
+ # an independent instance created. This ensures that defaults will
+ # always come from the standard configuration file(s)' "easy_install"
+ # section, even if this is a "develop" or "install" command, or some
+ # other embedding.
+ self._dry_run = None
+ self.verbose = self.distribution.verbose
+ self.distribution._set_command_options(
+ self, self.distribution.get_option_dict('easy_install')
+ )
+
+ def delete_blockers(self, blockers):
+ extant_blockers = (
+ filename for filename in blockers
+ if os.path.exists(filename) or os.path.islink(filename)
+ )
+ list(map(self._delete_path, extant_blockers))
+
+ def _delete_path(self, path):
+ log.info("Deleting %s", path)
+ if self.dry_run:
+ return
+
+ is_tree = os.path.isdir(path) and not os.path.islink(path)
+ remover = rmtree if is_tree else os.unlink
+ remover(path)
+
+ @staticmethod
+ def _render_version():
+ """
+ Render the Setuptools version and installation details, then exit.
+ """
+ ver = sys.version[:3]
+ dist = get_distribution('setuptools')
+ tmpl = 'setuptools {dist.version} from {dist.location} (Python {ver})'
+ print(tmpl.format(**locals()))
+ raise SystemExit()
+
+ def finalize_options(self):
+ self.version and self._render_version()
+
+ py_version = sys.version.split()[0]
+ prefix, exec_prefix = get_config_vars('prefix', 'exec_prefix')
+
+ self.config_vars = {
+ 'dist_name': self.distribution.get_name(),
+ 'dist_version': self.distribution.get_version(),
+ 'dist_fullname': self.distribution.get_fullname(),
+ 'py_version': py_version,
+ 'py_version_short': py_version[0:3],
+ 'py_version_nodot': py_version[0] + py_version[2],
+ 'sys_prefix': prefix,
+ 'prefix': prefix,
+ 'sys_exec_prefix': exec_prefix,
+ 'exec_prefix': exec_prefix,
+ # Only python 3.2+ has abiflags
+ 'abiflags': getattr(sys, 'abiflags', ''),
+ }
+
+ if site.ENABLE_USER_SITE:
+ self.config_vars['userbase'] = self.install_userbase
+ self.config_vars['usersite'] = self.install_usersite
+
+ self._fix_install_dir_for_user_site()
+
+ self.expand_basedirs()
+ self.expand_dirs()
+
+ self._expand(
+ 'install_dir', 'script_dir', 'build_directory',
+ 'site_dirs',
+ )
+ # If a non-default installation directory was specified, default the
+ # script directory to match it.
+ if self.script_dir is None:
+ self.script_dir = self.install_dir
+
+ if self.no_find_links is None:
+ self.no_find_links = False
+
+ # Let install_dir get set by install_lib command, which in turn
+ # gets its info from the install command, and takes into account
+ # --prefix and --home and all that other crud.
+ self.set_undefined_options(
+ 'install_lib', ('install_dir', 'install_dir')
+ )
+ # Likewise, set default script_dir from 'install_scripts.install_dir'
+ self.set_undefined_options(
+ 'install_scripts', ('install_dir', 'script_dir')
+ )
+
+ if self.user and self.install_purelib:
+ self.install_dir = self.install_purelib
+ self.script_dir = self.install_scripts
+ # default --record from the install command
+ self.set_undefined_options('install', ('record', 'record'))
+ # Should this be moved to the if statement below? It's not used
+ # elsewhere
+ normpath = map(normalize_path, sys.path)
+ self.all_site_dirs = get_site_dirs()
+ if self.site_dirs is not None:
+ site_dirs = [
+ os.path.expanduser(s.strip()) for s in
+ self.site_dirs.split(',')
+ ]
+ for d in site_dirs:
+ if not os.path.isdir(d):
+ log.warn("%s (in --site-dirs) does not exist", d)
+ elif normalize_path(d) not in normpath:
+ raise DistutilsOptionError(
+ d + " (in --site-dirs) is not on sys.path"
+ )
+ else:
+ self.all_site_dirs.append(normalize_path(d))
+ if not self.editable:
+ self.check_site_dir()
+ self.index_url = self.index_url or "https://pypi.org/simple/"
+ self.shadow_path = self.all_site_dirs[:]
+ for path_item in self.install_dir, normalize_path(self.script_dir):
+ if path_item not in self.shadow_path:
+ self.shadow_path.insert(0, path_item)
+
+ if self.allow_hosts is not None:
+ hosts = [s.strip() for s in self.allow_hosts.split(',')]
+ else:
+ hosts = ['*']
+ if self.package_index is None:
+ self.package_index = self.create_index(
+ self.index_url, search_path=self.shadow_path, hosts=hosts,
+ )
+ self.local_index = Environment(self.shadow_path + sys.path)
+
+ if self.find_links is not None:
+ if isinstance(self.find_links, six.string_types):
+ self.find_links = self.find_links.split()
+ else:
+ self.find_links = []
+ if self.local_snapshots_ok:
+ self.package_index.scan_egg_links(self.shadow_path + sys.path)
+ if not self.no_find_links:
+ self.package_index.add_find_links(self.find_links)
+ self.set_undefined_options('install_lib', ('optimize', 'optimize'))
+ if not isinstance(self.optimize, int):
+ try:
+ self.optimize = int(self.optimize)
+ if not (0 <= self.optimize <= 2):
+ raise ValueError
+ except ValueError:
+ raise DistutilsOptionError("--optimize must be 0, 1, or 2")
+
+ if self.editable and not self.build_directory:
+ raise DistutilsArgError(
+ "Must specify a build directory (-b) when using --editable"
+ )
+ if not self.args:
+ raise DistutilsArgError(
+ "No urls, filenames, or requirements specified (see --help)")
+
+ self.outputs = []
+
+ def _fix_install_dir_for_user_site(self):
+ """
+ Fix the install_dir if "--user" was used.
+ """
+ if not self.user or not site.ENABLE_USER_SITE:
+ return
+
+ self.create_home_path()
+ if self.install_userbase is None:
+ msg = "User base directory is not specified"
+ raise DistutilsPlatformError(msg)
+ self.install_base = self.install_platbase = self.install_userbase
+ scheme_name = os.name.replace('posix', 'unix') + '_user'
+ self.select_scheme(scheme_name)
+
+ def _expand_attrs(self, attrs):
+ for attr in attrs:
+ val = getattr(self, attr)
+ if val is not None:
+ if os.name == 'posix' or os.name == 'nt':
+ val = os.path.expanduser(val)
+ val = subst_vars(val, self.config_vars)
+ setattr(self, attr, val)
+
+ def expand_basedirs(self):
+ """Calls `os.path.expanduser` on install_base, install_platbase and
+ root."""
+ self._expand_attrs(['install_base', 'install_platbase', 'root'])
+
+ def expand_dirs(self):
+ """Calls `os.path.expanduser` on install dirs."""
+ dirs = [
+ 'install_purelib',
+ 'install_platlib',
+ 'install_lib',
+ 'install_headers',
+ 'install_scripts',
+ 'install_data',
+ ]
+ self._expand_attrs(dirs)
+
+ def run(self):
+ if self.verbose != self.distribution.verbose:
+ log.set_verbosity(self.verbose)
+ try:
+ for spec in self.args:
+ self.easy_install(spec, not self.no_deps)
+ if self.record:
+ outputs = self.outputs
+ if self.root: # strip any package prefix
+ root_len = len(self.root)
+ for counter in range(len(outputs)):
+ outputs[counter] = outputs[counter][root_len:]
+ from distutils import file_util
+
+ self.execute(
+ file_util.write_file, (self.record, outputs),
+ "writing list of installed files to '%s'" %
+ self.record
+ )
+ self.warn_deprecated_options()
+ finally:
+ log.set_verbosity(self.distribution.verbose)
+
+ def pseudo_tempname(self):
+ """Return a pseudo-tempname base in the install directory.
+ This code is intentionally naive; if a malicious party can write to
+ the target directory you're already in deep doodoo.
+ """
+ try:
+ pid = os.getpid()
+ except Exception:
+ pid = random.randint(0, sys.maxsize)
+ return os.path.join(self.install_dir, "test-easy-install-%s" % pid)
+
+ def warn_deprecated_options(self):
+ pass
+
+ def check_site_dir(self):
+ """Verify that self.install_dir is .pth-capable dir, if needed"""
+
+ instdir = normalize_path(self.install_dir)
+ pth_file = os.path.join(instdir, 'easy-install.pth')
+
+ # Is it a configured, PYTHONPATH, implicit, or explicit site dir?
+ is_site_dir = instdir in self.all_site_dirs
+
+ if not is_site_dir and not self.multi_version:
+ # No? Then directly test whether it does .pth file processing
+ is_site_dir = self.check_pth_processing()
+ else:
+ # make sure we can write to target dir
+ testfile = self.pseudo_tempname() + '.write-test'
+ test_exists = os.path.exists(testfile)
+ try:
+ if test_exists:
+ os.unlink(testfile)
+ open(testfile, 'w').close()
+ os.unlink(testfile)
+ except (OSError, IOError):
+ self.cant_write_to_target()
+
+ if not is_site_dir and not self.multi_version:
+ # Can't install non-multi to non-site dir
+ raise DistutilsError(self.no_default_version_msg())
+
+ if is_site_dir:
+ if self.pth_file is None:
+ self.pth_file = PthDistributions(pth_file, self.all_site_dirs)
+ else:
+ self.pth_file = None
+
+ if instdir not in map(normalize_path, _pythonpath()):
+ # only PYTHONPATH dirs need a site.py, so pretend it's there
+ self.sitepy_installed = True
+ elif self.multi_version and not os.path.exists(pth_file):
+ self.sitepy_installed = True # don't need site.py in this case
+ self.pth_file = None # and don't create a .pth file
+ self.install_dir = instdir
+
+ __cant_write_msg = textwrap.dedent("""
+ can't create or remove files in install directory
+
+ The following error occurred while trying to add or remove files in the
+ installation directory:
+
+ %s
+
+ The installation directory you specified (via --install-dir, --prefix, or
+ the distutils default setting) was:
+
+ %s
+ """).lstrip()
+
+ __not_exists_id = textwrap.dedent("""
+ This directory does not currently exist. Please create it and try again, or
+ choose a different installation directory (using the -d or --install-dir
+ option).
+ """).lstrip()
+
+ __access_msg = textwrap.dedent("""
+ Perhaps your account does not have write access to this directory? If the
+ installation directory is a system-owned directory, you may need to sign in
+ as the administrator or "root" account. If you do not have administrative
+ access to this machine, you may wish to choose a different installation
+ directory, preferably one that is listed in your PYTHONPATH environment
+ variable.
+
+ For information on other options, you may wish to consult the
+ documentation at:
+
+ https://setuptools.readthedocs.io/en/latest/easy_install.html
+
+ Please make the appropriate changes for your system and try again.
+ """).lstrip()
+
+ def cant_write_to_target(self):
+ msg = self.__cant_write_msg % (sys.exc_info()[1], self.install_dir,)
+
+ if not os.path.exists(self.install_dir):
+ msg += '\n' + self.__not_exists_id
+ else:
+ msg += '\n' + self.__access_msg
+ raise DistutilsError(msg)
+
+ def check_pth_processing(self):
+ """Empirically verify whether .pth files are supported in inst. dir"""
+ instdir = self.install_dir
+ log.info("Checking .pth file support in %s", instdir)
+ pth_file = self.pseudo_tempname() + ".pth"
+ ok_file = pth_file + '.ok'
+ ok_exists = os.path.exists(ok_file)
+ tmpl = _one_liner("""
+ import os
+ f = open({ok_file!r}, 'w')
+ f.write('OK')
+ f.close()
+ """) + '\n'
+ try:
+ if ok_exists:
+ os.unlink(ok_file)
+ dirname = os.path.dirname(ok_file)
+ pkg_resources.py31compat.makedirs(dirname, exist_ok=True)
+ f = open(pth_file, 'w')
+ except (OSError, IOError):
+ self.cant_write_to_target()
+ else:
+ try:
+ f.write(tmpl.format(**locals()))
+ f.close()
+ f = None
+ executable = sys.executable
+ if os.name == 'nt':
+ dirname, basename = os.path.split(executable)
+ alt = os.path.join(dirname, 'pythonw.exe')
+ use_alt = (
+ basename.lower() == 'python.exe' and
+ os.path.exists(alt)
+ )
+ if use_alt:
+ # use pythonw.exe to avoid opening a console window
+ executable = alt
+
+ from distutils.spawn import spawn
+
+ spawn([executable, '-E', '-c', 'pass'], 0)
+
+ if os.path.exists(ok_file):
+ log.info(
+ "TEST PASSED: %s appears to support .pth files",
+ instdir
+ )
+ return True
+ finally:
+ if f:
+ f.close()
+ if os.path.exists(ok_file):
+ os.unlink(ok_file)
+ if os.path.exists(pth_file):
+ os.unlink(pth_file)
+ if not self.multi_version:
+ log.warn("TEST FAILED: %s does NOT support .pth files", instdir)
+ return False
+
+ def install_egg_scripts(self, dist):
+ """Write all the scripts for `dist`, unless scripts are excluded"""
+ if not self.exclude_scripts and dist.metadata_isdir('scripts'):
+ for script_name in dist.metadata_listdir('scripts'):
+ if dist.metadata_isdir('scripts/' + script_name):
+ # The "script" is a directory, likely a Python 3
+ # __pycache__ directory, so skip it.
+ continue
+ self.install_script(
+ dist, script_name,
+ dist.get_metadata('scripts/' + script_name)
+ )
+ self.install_wrapper_scripts(dist)
+
+ def add_output(self, path):
+ if os.path.isdir(path):
+ for base, dirs, files in os.walk(path):
+ for filename in files:
+ self.outputs.append(os.path.join(base, filename))
+ else:
+ self.outputs.append(path)
+
+ def not_editable(self, spec):
+ if self.editable:
+ raise DistutilsArgError(
+ "Invalid argument %r: you can't use filenames or URLs "
+ "with --editable (except via the --find-links option)."
+ % (spec,)
+ )
+
+ def check_editable(self, spec):
+ if not self.editable:
+ return
+
+ if os.path.exists(os.path.join(self.build_directory, spec.key)):
+ raise DistutilsArgError(
+ "%r already exists in %s; can't do a checkout there" %
+ (spec.key, self.build_directory)
+ )
+
+ @contextlib.contextmanager
+ def _tmpdir(self):
+ tmpdir = tempfile.mkdtemp(prefix=six.u("easy_install-"))
+ try:
+ # cast to str as workaround for #709 and #710 and #712
+ yield str(tmpdir)
+ finally:
+ os.path.exists(tmpdir) and rmtree(rmtree_safe(tmpdir))
+
+ def easy_install(self, spec, deps=False):
+ if not self.editable:
+ self.install_site_py()
+
+ with self._tmpdir() as tmpdir:
+ if not isinstance(spec, Requirement):
+ if URL_SCHEME(spec):
+ # It's a url, download it to tmpdir and process
+ self.not_editable(spec)
+ dl = self.package_index.download(spec, tmpdir)
+ return self.install_item(None, dl, tmpdir, deps, True)
+
+ elif os.path.exists(spec):
+ # Existing file or directory, just process it directly
+ self.not_editable(spec)
+ return self.install_item(None, spec, tmpdir, deps, True)
+ else:
+ spec = parse_requirement_arg(spec)
+
+ self.check_editable(spec)
+ dist = self.package_index.fetch_distribution(
+ spec, tmpdir, self.upgrade, self.editable,
+ not self.always_copy, self.local_index
+ )
+ if dist is None:
+ msg = "Could not find suitable distribution for %r" % spec
+ if self.always_copy:
+ msg += " (--always-copy skips system and development eggs)"
+ raise DistutilsError(msg)
+ elif dist.precedence == DEVELOP_DIST:
+ # .egg-info dists don't need installing, just process deps
+ self.process_distribution(spec, dist, deps, "Using")
+ return dist
+ else:
+ return self.install_item(spec, dist.location, tmpdir, deps)
+
+ def install_item(self, spec, download, tmpdir, deps, install_needed=False):
+
+ # Installation is also needed if file in tmpdir or is not an egg
+ install_needed = install_needed or self.always_copy
+ install_needed = install_needed or os.path.dirname(download) == tmpdir
+ install_needed = install_needed or not download.endswith('.egg')
+ install_needed = install_needed or (
+ self.always_copy_from is not None and
+ os.path.dirname(normalize_path(download)) ==
+ normalize_path(self.always_copy_from)
+ )
+
+ if spec and not install_needed:
+ # at this point, we know it's a local .egg, we just don't know if
+ # it's already installed.
+ for dist in self.local_index[spec.project_name]:
+ if dist.location == download:
+ break
+ else:
+ install_needed = True # it's not in the local index
+
+ log.info("Processing %s", os.path.basename(download))
+
+ if install_needed:
+ dists = self.install_eggs(spec, download, tmpdir)
+ for dist in dists:
+ self.process_distribution(spec, dist, deps)
+ else:
+ dists = [self.egg_distribution(download)]
+ self.process_distribution(spec, dists[0], deps, "Using")
+
+ if spec is not None:
+ for dist in dists:
+ if dist in spec:
+ return dist
+
+ def select_scheme(self, name):
+ """Sets the install directories by applying the install schemes."""
+ # it's the caller's problem if they supply a bad name!
+ scheme = INSTALL_SCHEMES[name]
+ for key in SCHEME_KEYS:
+ attrname = 'install_' + key
+ if getattr(self, attrname) is None:
+ setattr(self, attrname, scheme[key])
+
+ def process_distribution(self, requirement, dist, deps=True, *info):
+ self.update_pth(dist)
+ self.package_index.add(dist)
+ if dist in self.local_index[dist.key]:
+ self.local_index.remove(dist)
+ self.local_index.add(dist)
+ self.install_egg_scripts(dist)
+ self.installed_projects[dist.key] = dist
+ log.info(self.installation_report(requirement, dist, *info))
+ if (dist.has_metadata('dependency_links.txt') and
+ not self.no_find_links):
+ self.package_index.add_find_links(
+ dist.get_metadata_lines('dependency_links.txt')
+ )
+ if not deps and not self.always_copy:
+ return
+ elif requirement is not None and dist.key != requirement.key:
+ log.warn("Skipping dependencies for %s", dist)
+ return # XXX this is not the distribution we were looking for
+ elif requirement is None or dist not in requirement:
+ # if we wound up with a different version, resolve what we've got
+ distreq = dist.as_requirement()
+ requirement = Requirement(str(distreq))
+ log.info("Processing dependencies for %s", requirement)
+ try:
+ distros = WorkingSet([]).resolve(
+ [requirement], self.local_index, self.easy_install
+ )
+ except DistributionNotFound as e:
+ raise DistutilsError(str(e))
+ except VersionConflict as e:
+ raise DistutilsError(e.report())
+ if self.always_copy or self.always_copy_from:
+ # Force all the relevant distros to be copied or activated
+ for dist in distros:
+ if dist.key not in self.installed_projects:
+ self.easy_install(dist.as_requirement())
+ log.info("Finished processing dependencies for %s", requirement)
+
+ def should_unzip(self, dist):
+ if self.zip_ok is not None:
+ return not self.zip_ok
+ if dist.has_metadata('not-zip-safe'):
+ return True
+ if not dist.has_metadata('zip-safe'):
+ return True
+ return False
+
+ def maybe_move(self, spec, dist_filename, setup_base):
+ dst = os.path.join(self.build_directory, spec.key)
+ if os.path.exists(dst):
+ msg = (
+ "%r already exists in %s; build directory %s will not be kept"
+ )
+ log.warn(msg, spec.key, self.build_directory, setup_base)
+ return setup_base
+ if os.path.isdir(dist_filename):
+ setup_base = dist_filename
+ else:
+ if os.path.dirname(dist_filename) == setup_base:
+ os.unlink(dist_filename) # get it out of the tmp dir
+ contents = os.listdir(setup_base)
+ if len(contents) == 1:
+ dist_filename = os.path.join(setup_base, contents[0])
+ if os.path.isdir(dist_filename):
+ # if the only thing there is a directory, move it instead
+ setup_base = dist_filename
+ ensure_directory(dst)
+ shutil.move(setup_base, dst)
+ return dst
+
+ def install_wrapper_scripts(self, dist):
+ if self.exclude_scripts:
+ return
+ for args in ScriptWriter.best().get_args(dist):
+ self.write_script(*args)
+
+ def install_script(self, dist, script_name, script_text, dev_path=None):
+ """Generate a legacy script wrapper and install it"""
+ spec = str(dist.as_requirement())
+ is_script = is_python_script(script_text, script_name)
+
+ if is_script:
+ body = self._load_template(dev_path) % locals()
+ script_text = ScriptWriter.get_header(script_text) + body
+ self.write_script(script_name, _to_ascii(script_text), 'b')
+
+ @staticmethod
+ def _load_template(dev_path):
+ """
+ There are a couple of template scripts in the package. This
+ function loads one of them and prepares it for use.
+ """
+ # See https://github.com/pypa/setuptools/issues/134 for info
+ # on script file naming and downstream issues with SVR4
+ name = 'script.tmpl'
+ if dev_path:
+ name = name.replace('.tmpl', ' (dev).tmpl')
+
+ raw_bytes = resource_string('setuptools', name)
+ return raw_bytes.decode('utf-8')
+
+ def write_script(self, script_name, contents, mode="t", blockers=()):
+ """Write an executable file to the scripts directory"""
+ self.delete_blockers( # clean up old .py/.pyw w/o a script
+ [os.path.join(self.script_dir, x) for x in blockers]
+ )
+ log.info("Installing %s script to %s", script_name, self.script_dir)
+ target = os.path.join(self.script_dir, script_name)
+ self.add_output(target)
+
+ if self.dry_run:
+ return
+
+ mask = current_umask()
+ ensure_directory(target)
+ if os.path.exists(target):
+ os.unlink(target)
+ with open(target, "w" + mode) as f:
+ f.write(contents)
+ chmod(target, 0o777 - mask)
+
+ def install_eggs(self, spec, dist_filename, tmpdir):
+ # .egg dirs or files are already built, so just return them
+ if dist_filename.lower().endswith('.egg'):
+ return [self.install_egg(dist_filename, tmpdir)]
+ elif dist_filename.lower().endswith('.exe'):
+ return [self.install_exe(dist_filename, tmpdir)]
+ elif dist_filename.lower().endswith('.whl'):
+ return [self.install_wheel(dist_filename, tmpdir)]
+
+ # Anything else, try to extract and build
+ setup_base = tmpdir
+ if os.path.isfile(dist_filename) and not dist_filename.endswith('.py'):
+ unpack_archive(dist_filename, tmpdir, self.unpack_progress)
+ elif os.path.isdir(dist_filename):
+ setup_base = os.path.abspath(dist_filename)
+
+ if (setup_base.startswith(tmpdir) # something we downloaded
+ and self.build_directory and spec is not None):
+ setup_base = self.maybe_move(spec, dist_filename, setup_base)
+
+ # Find the setup.py file
+ setup_script = os.path.join(setup_base, 'setup.py')
+
+ if not os.path.exists(setup_script):
+ setups = glob(os.path.join(setup_base, '*', 'setup.py'))
+ if not setups:
+ raise DistutilsError(
+ "Couldn't find a setup script in %s" %
+ os.path.abspath(dist_filename)
+ )
+ if len(setups) > 1:
+ raise DistutilsError(
+ "Multiple setup scripts in %s" %
+ os.path.abspath(dist_filename)
+ )
+ setup_script = setups[0]
+
+ # Now run it, and return the result
+ if self.editable:
+ log.info(self.report_editable(spec, setup_script))
+ return []
+ else:
+ return self.build_and_install(setup_script, setup_base)
+
+ def egg_distribution(self, egg_path):
+ if os.path.isdir(egg_path):
+ metadata = PathMetadata(egg_path, os.path.join(egg_path,
+ 'EGG-INFO'))
+ else:
+ metadata = EggMetadata(zipimport.zipimporter(egg_path))
+ return Distribution.from_filename(egg_path, metadata=metadata)
+
+ def install_egg(self, egg_path, tmpdir):
+ destination = os.path.join(
+ self.install_dir,
+ os.path.basename(egg_path),
+ )
+ destination = os.path.abspath(destination)
+ if not self.dry_run:
+ ensure_directory(destination)
+
+ dist = self.egg_distribution(egg_path)
+ if not samefile(egg_path, destination):
+ if os.path.isdir(destination) and not os.path.islink(destination):
+ dir_util.remove_tree(destination, dry_run=self.dry_run)
+ elif os.path.exists(destination):
+ self.execute(
+ os.unlink,
+ (destination,),
+ "Removing " + destination,
+ )
+ try:
+ new_dist_is_zipped = False
+ if os.path.isdir(egg_path):
+ if egg_path.startswith(tmpdir):
+ f, m = shutil.move, "Moving"
+ else:
+ f, m = shutil.copytree, "Copying"
+ elif self.should_unzip(dist):
+ self.mkpath(destination)
+ f, m = self.unpack_and_compile, "Extracting"
+ else:
+ new_dist_is_zipped = True
+ if egg_path.startswith(tmpdir):
+ f, m = shutil.move, "Moving"
+ else:
+ f, m = shutil.copy2, "Copying"
+ self.execute(
+ f,
+ (egg_path, destination),
+ (m + " %s to %s") % (
+ os.path.basename(egg_path),
+ os.path.dirname(destination)
+ ),
+ )
+ update_dist_caches(
+ destination,
+ fix_zipimporter_caches=new_dist_is_zipped,
+ )
+ except Exception:
+ update_dist_caches(destination, fix_zipimporter_caches=False)
+ raise
+
+ self.add_output(destination)
+ return self.egg_distribution(destination)
+
+ def install_exe(self, dist_filename, tmpdir):
+ # See if it's valid, get data
+ cfg = extract_wininst_cfg(dist_filename)
+ if cfg is None:
+ raise DistutilsError(
+ "%s is not a valid distutils Windows .exe" % dist_filename
+ )
+ # Create a dummy distribution object until we build the real distro
+ dist = Distribution(
+ None,
+ project_name=cfg.get('metadata', 'name'),
+ version=cfg.get('metadata', 'version'), platform=get_platform(),
+ )
+
+ # Convert the .exe to an unpacked egg
+ egg_path = os.path.join(tmpdir, dist.egg_name() + '.egg')
+ dist.location = egg_path
+ egg_tmp = egg_path + '.tmp'
+ _egg_info = os.path.join(egg_tmp, 'EGG-INFO')
+ pkg_inf = os.path.join(_egg_info, 'PKG-INFO')
+ ensure_directory(pkg_inf) # make sure EGG-INFO dir exists
+ dist._provider = PathMetadata(egg_tmp, _egg_info) # XXX
+ self.exe_to_egg(dist_filename, egg_tmp)
+
+ # Write EGG-INFO/PKG-INFO
+ if not os.path.exists(pkg_inf):
+ f = open(pkg_inf, 'w')
+ f.write('Metadata-Version: 1.0\n')
+ for k, v in cfg.items('metadata'):
+ if k != 'target_version':
+ f.write('%s: %s\n' % (k.replace('_', '-').title(), v))
+ f.close()
+ script_dir = os.path.join(_egg_info, 'scripts')
+ # delete entry-point scripts to avoid duping
+ self.delete_blockers([
+ os.path.join(script_dir, args[0])
+ for args in ScriptWriter.get_args(dist)
+ ])
+ # Build .egg file from tmpdir
+ bdist_egg.make_zipfile(
+ egg_path, egg_tmp, verbose=self.verbose, dry_run=self.dry_run,
+ )
+ # install the .egg
+ return self.install_egg(egg_path, tmpdir)
+
+ def exe_to_egg(self, dist_filename, egg_tmp):
+ """Extract a bdist_wininst to the directories an egg would use"""
+ # Check for .pth file and set up prefix translations
+ prefixes = get_exe_prefixes(dist_filename)
+ to_compile = []
+ native_libs = []
+ top_level = {}
+
+ def process(src, dst):
+ s = src.lower()
+ for old, new in prefixes:
+ if s.startswith(old):
+ src = new + src[len(old):]
+ parts = src.split('/')
+ dst = os.path.join(egg_tmp, *parts)
+ dl = dst.lower()
+ if dl.endswith('.pyd') or dl.endswith('.dll'):
+ parts[-1] = bdist_egg.strip_module(parts[-1])
+ top_level[os.path.splitext(parts[0])[0]] = 1
+ native_libs.append(src)
+ elif dl.endswith('.py') and old != 'SCRIPTS/':
+ top_level[os.path.splitext(parts[0])[0]] = 1
+ to_compile.append(dst)
+ return dst
+ if not src.endswith('.pth'):
+ log.warn("WARNING: can't process %s", src)
+ return None
+
+ # extract, tracking .pyd/.dll->native_libs and .py -> to_compile
+ unpack_archive(dist_filename, egg_tmp, process)
+ stubs = []
+ for res in native_libs:
+ if res.lower().endswith('.pyd'): # create stubs for .pyd's
+ parts = res.split('/')
+ resource = parts[-1]
+ parts[-1] = bdist_egg.strip_module(parts[-1]) + '.py'
+ pyfile = os.path.join(egg_tmp, *parts)
+ to_compile.append(pyfile)
+ stubs.append(pyfile)
+ bdist_egg.write_stub(resource, pyfile)
+ self.byte_compile(to_compile) # compile .py's
+ bdist_egg.write_safety_flag(
+ os.path.join(egg_tmp, 'EGG-INFO'),
+ bdist_egg.analyze_egg(egg_tmp, stubs)) # write zip-safety flag
+
+ for name in 'top_level', 'native_libs':
+ if locals()[name]:
+ txt = os.path.join(egg_tmp, 'EGG-INFO', name + '.txt')
+ if not os.path.exists(txt):
+ f = open(txt, 'w')
+ f.write('\n'.join(locals()[name]) + '\n')
+ f.close()
+
+ def install_wheel(self, wheel_path, tmpdir):
+ wheel = Wheel(wheel_path)
+ assert wheel.is_compatible()
+ destination = os.path.join(self.install_dir, wheel.egg_name())
+ destination = os.path.abspath(destination)
+ if not self.dry_run:
+ ensure_directory(destination)
+ if os.path.isdir(destination) and not os.path.islink(destination):
+ dir_util.remove_tree(destination, dry_run=self.dry_run)
+ elif os.path.exists(destination):
+ self.execute(
+ os.unlink,
+ (destination,),
+ "Removing " + destination,
+ )
+ try:
+ self.execute(
+ wheel.install_as_egg,
+ (destination,),
+ ("Installing %s to %s") % (
+ os.path.basename(wheel_path),
+ os.path.dirname(destination)
+ ),
+ )
+ finally:
+ update_dist_caches(destination, fix_zipimporter_caches=False)
+ self.add_output(destination)
+ return self.egg_distribution(destination)
+
+ __mv_warning = textwrap.dedent("""
+ Because this distribution was installed --multi-version, before you can
+ import modules from this package in an application, you will need to
+ 'import pkg_resources' and then use a 'require()' call similar to one of
+ these examples, in order to select the desired version:
+
+ pkg_resources.require("%(name)s") # latest installed version
+ pkg_resources.require("%(name)s==%(version)s") # this exact version
+ pkg_resources.require("%(name)s>=%(version)s") # this version or higher
+ """).lstrip()
+
+ __id_warning = textwrap.dedent("""
+ Note also that the installation directory must be on sys.path at runtime for
+ this to work. (e.g. by being the application's script directory, by being on
+ PYTHONPATH, or by being added to sys.path by your code.)
+ """)
+
+ def installation_report(self, req, dist, what="Installed"):
+ """Helpful installation message for display to package users"""
+ msg = "\n%(what)s %(eggloc)s%(extras)s"
+ if self.multi_version and not self.no_report:
+ msg += '\n' + self.__mv_warning
+ if self.install_dir not in map(normalize_path, sys.path):
+ msg += '\n' + self.__id_warning
+
+ eggloc = dist.location
+ name = dist.project_name
+ version = dist.version
+ extras = '' # TODO: self.report_extras(req, dist)
+ return msg % locals()
+
+ __editable_msg = textwrap.dedent("""
+ Extracted editable version of %(spec)s to %(dirname)s
+
+ If it uses setuptools in its setup script, you can activate it in
+ "development" mode by going to that directory and running::
+
+ %(python)s setup.py develop
+
+ See the setuptools documentation for the "develop" command for more info.
+ """).lstrip()
+
+ def report_editable(self, spec, setup_script):
+ dirname = os.path.dirname(setup_script)
+ python = sys.executable
+ return '\n' + self.__editable_msg % locals()
+
+ def run_setup(self, setup_script, setup_base, args):
+ sys.modules.setdefault('distutils.command.bdist_egg', bdist_egg)
+ sys.modules.setdefault('distutils.command.egg_info', egg_info)
+
+ args = list(args)
+ if self.verbose > 2:
+ v = 'v' * (self.verbose - 1)
+ args.insert(0, '-' + v)
+ elif self.verbose < 2:
+ args.insert(0, '-q')
+ if self.dry_run:
+ args.insert(0, '-n')
+ log.info(
+ "Running %s %s", setup_script[len(setup_base) + 1:], ' '.join(args)
+ )
+ try:
+ run_setup(setup_script, args)
+ except SystemExit as v:
+ raise DistutilsError("Setup script exited with %s" % (v.args[0],))
+
+ def build_and_install(self, setup_script, setup_base):
+ args = ['bdist_egg', '--dist-dir']
+
+ dist_dir = tempfile.mkdtemp(
+ prefix='egg-dist-tmp-', dir=os.path.dirname(setup_script)
+ )
+ try:
+ self._set_fetcher_options(os.path.dirname(setup_script))
+ args.append(dist_dir)
+
+ self.run_setup(setup_script, setup_base, args)
+ all_eggs = Environment([dist_dir])
+ eggs = []
+ for key in all_eggs:
+ for dist in all_eggs[key]:
+ eggs.append(self.install_egg(dist.location, setup_base))
+ if not eggs and not self.dry_run:
+ log.warn("No eggs found in %s (setup script problem?)",
+ dist_dir)
+ return eggs
+ finally:
+ rmtree(dist_dir)
+ log.set_verbosity(self.verbose) # restore our log verbosity
+
+ def _set_fetcher_options(self, base):
+ """
+ When easy_install is about to run bdist_egg on a source dist, that
+ source dist might have 'setup_requires' directives, requiring
+ additional fetching. Ensure the fetcher options given to easy_install
+ are available to that command as well.
+ """
+ # find the fetch options from easy_install and write them out
+ # to the setup.cfg file.
+ ei_opts = self.distribution.get_option_dict('easy_install').copy()
+ fetch_directives = (
+ 'find_links', 'site_dirs', 'index_url', 'optimize',
+ 'site_dirs', 'allow_hosts',
+ )
+ fetch_options = {}
+ for key, val in ei_opts.items():
+ if key not in fetch_directives:
+ continue
+ fetch_options[key.replace('_', '-')] = val[1]
+ # create a settings dictionary suitable for `edit_config`
+ settings = dict(easy_install=fetch_options)
+ cfg_filename = os.path.join(base, 'setup.cfg')
+ setopt.edit_config(cfg_filename, settings)
+
+ def update_pth(self, dist):
+ if self.pth_file is None:
+ return
+
+ for d in self.pth_file[dist.key]: # drop old entries
+ if self.multi_version or d.location != dist.location:
+ log.info("Removing %s from easy-install.pth file", d)
+ self.pth_file.remove(d)
+ if d.location in self.shadow_path:
+ self.shadow_path.remove(d.location)
+
+ if not self.multi_version:
+ if dist.location in self.pth_file.paths:
+ log.info(
+ "%s is already the active version in easy-install.pth",
+ dist,
+ )
+ else:
+ log.info("Adding %s to easy-install.pth file", dist)
+ self.pth_file.add(dist) # add new entry
+ if dist.location not in self.shadow_path:
+ self.shadow_path.append(dist.location)
+
+ if not self.dry_run:
+
+ self.pth_file.save()
+
+ if dist.key == 'setuptools':
+ # Ensure that setuptools itself never becomes unavailable!
+ # XXX should this check for latest version?
+ filename = os.path.join(self.install_dir, 'setuptools.pth')
+ if os.path.islink(filename):
+ os.unlink(filename)
+ f = open(filename, 'wt')
+ f.write(self.pth_file.make_relative(dist.location) + '\n')
+ f.close()
+
+ def unpack_progress(self, src, dst):
+ # Progress filter for unpacking
+ log.debug("Unpacking %s to %s", src, dst)
+ return dst # only unpack-and-compile skips files for dry run
+
+ def unpack_and_compile(self, egg_path, destination):
+ to_compile = []
+ to_chmod = []
+
+ def pf(src, dst):
+ if dst.endswith('.py') and not src.startswith('EGG-INFO/'):
+ to_compile.append(dst)
+ elif dst.endswith('.dll') or dst.endswith('.so'):
+ to_chmod.append(dst)
+ self.unpack_progress(src, dst)
+ return not self.dry_run and dst or None
+
+ unpack_archive(egg_path, destination, pf)
+ self.byte_compile(to_compile)
+ if not self.dry_run:
+ for f in to_chmod:
+ mode = ((os.stat(f)[stat.ST_MODE]) | 0o555) & 0o7755
+ chmod(f, mode)
+
+ def byte_compile(self, to_compile):
+ if sys.dont_write_bytecode:
+ return
+
+ from distutils.util import byte_compile
+
+ try:
+ # try to make the byte compile messages quieter
+ log.set_verbosity(self.verbose - 1)
+
+ byte_compile(to_compile, optimize=0, force=1, dry_run=self.dry_run)
+ if self.optimize:
+ byte_compile(
+ to_compile, optimize=self.optimize, force=1,
+ dry_run=self.dry_run,
+ )
+ finally:
+ log.set_verbosity(self.verbose) # restore original verbosity
+
+ __no_default_msg = textwrap.dedent("""
+ bad install directory or PYTHONPATH
+
+ You are attempting to install a package to a directory that is not
+ on PYTHONPATH and which Python does not read ".pth" files from. The
+ installation directory you specified (via --install-dir, --prefix, or
+ the distutils default setting) was:
+
+ %s
+
+ and your PYTHONPATH environment variable currently contains:
+
+ %r
+
+ Here are some of your options for correcting the problem:
+
+ * You can choose a different installation directory, i.e., one that is
+ on PYTHONPATH or supports .pth files
+
+ * You can add the installation directory to the PYTHONPATH environment
+ variable. (It must then also be on PYTHONPATH whenever you run
+ Python and want to use the package(s) you are installing.)
+
+ * You can set up the installation directory to support ".pth" files by
+ using one of the approaches described here:
+
+ https://setuptools.readthedocs.io/en/latest/easy_install.html#custom-installation-locations
+
+
+ Please make the appropriate changes for your system and try again.""").lstrip()
+
+ def no_default_version_msg(self):
+ template = self.__no_default_msg
+ return template % (self.install_dir, os.environ.get('PYTHONPATH', ''))
+
+ def install_site_py(self):
+ """Make sure there's a site.py in the target dir, if needed"""
+
+ if self.sitepy_installed:
+ return # already did it, or don't need to
+
+ sitepy = os.path.join(self.install_dir, "site.py")
+ source = resource_string("setuptools", "site-patch.py")
+ source = source.decode('utf-8')
+ current = ""
+
+ if os.path.exists(sitepy):
+ log.debug("Checking existing site.py in %s", self.install_dir)
+ with io.open(sitepy) as strm:
+ current = strm.read()
+
+ if not current.startswith('def __boot():'):
+ raise DistutilsError(
+ "%s is not a setuptools-generated site.py; please"
+ " remove it." % sitepy
+ )
+
+ if current != source:
+ log.info("Creating %s", sitepy)
+ if not self.dry_run:
+ ensure_directory(sitepy)
+ with io.open(sitepy, 'w', encoding='utf-8') as strm:
+ strm.write(source)
+ self.byte_compile([sitepy])
+
+ self.sitepy_installed = True
+
+ def create_home_path(self):
+ """Create directories under ~."""
+ if not self.user:
+ return
+ home = convert_path(os.path.expanduser("~"))
+ for name, path in six.iteritems(self.config_vars):
+ if path.startswith(home) and not os.path.isdir(path):
+ self.debug_print("os.makedirs('%s', 0o700)" % path)
+ os.makedirs(path, 0o700)
+
+ INSTALL_SCHEMES = dict(
+ posix=dict(
+ install_dir='$base/lib/python$py_version_short/site-packages',
+ script_dir='$base/bin',
+ ),
+ )
+
+ DEFAULT_SCHEME = dict(
+ install_dir='$base/Lib/site-packages',
+ script_dir='$base/Scripts',
+ )
+
+ def _expand(self, *attrs):
+ config_vars = self.get_finalized_command('install').config_vars
+
+ if self.prefix:
+ # Set default install_dir/scripts from --prefix
+ config_vars = config_vars.copy()
+ config_vars['base'] = self.prefix
+ scheme = self.INSTALL_SCHEMES.get(os.name, self.DEFAULT_SCHEME)
+ for attr, val in scheme.items():
+ if getattr(self, attr, None) is None:
+ setattr(self, attr, val)
+
+ from distutils.util import subst_vars
+
+ for attr in attrs:
+ val = getattr(self, attr)
+ if val is not None:
+ val = subst_vars(val, config_vars)
+ if os.name == 'posix':
+ val = os.path.expanduser(val)
+ setattr(self, attr, val)
+
+
+def _pythonpath():
+ items = os.environ.get('PYTHONPATH', '').split(os.pathsep)
+ return filter(None, items)
+
+
+def get_site_dirs():
+ """
+ Return a list of 'site' dirs
+ """
+
+ sitedirs = []
+
+ # start with PYTHONPATH
+ sitedirs.extend(_pythonpath())
+
+ prefixes = [sys.prefix]
+ if sys.exec_prefix != sys.prefix:
+ prefixes.append(sys.exec_prefix)
+ for prefix in prefixes:
+ if prefix:
+ if sys.platform in ('os2emx', 'riscos'):
+ sitedirs.append(os.path.join(prefix, "Lib", "site-packages"))
+ elif os.sep == '/':
+ sitedirs.extend([
+ os.path.join(
+ prefix,
+ "lib",
+ "python" + sys.version[:3],
+ "site-packages",
+ ),
+ os.path.join(prefix, "lib", "site-python"),
+ ])
+ else:
+ sitedirs.extend([
+ prefix,
+ os.path.join(prefix, "lib", "site-packages"),
+ ])
+ if sys.platform == 'darwin':
+ # for framework builds *only* we add the standard Apple
+ # locations. Currently only per-user, but /Library and
+ # /Network/Library could be added too
+ if 'Python.framework' in prefix:
+ home = os.environ.get('HOME')
+ if home:
+ home_sp = os.path.join(
+ home,
+ 'Library',
+ 'Python',
+ sys.version[:3],
+ 'site-packages',
+ )
+ sitedirs.append(home_sp)
+ lib_paths = get_path('purelib'), get_path('platlib')
+ for site_lib in lib_paths:
+ if site_lib not in sitedirs:
+ sitedirs.append(site_lib)
+
+ if site.ENABLE_USER_SITE:
+ sitedirs.append(site.USER_SITE)
+
+ try:
+ sitedirs.extend(site.getsitepackages())
+ except AttributeError:
+ pass
+
+ sitedirs = list(map(normalize_path, sitedirs))
+
+ return sitedirs
+
+
+def expand_paths(inputs):
+ """Yield sys.path directories that might contain "old-style" packages"""
+
+ seen = {}
+
+ for dirname in inputs:
+ dirname = normalize_path(dirname)
+ if dirname in seen:
+ continue
+
+ seen[dirname] = 1
+ if not os.path.isdir(dirname):
+ continue
+
+ files = os.listdir(dirname)
+ yield dirname, files
+
+ for name in files:
+ if not name.endswith('.pth'):
+ # We only care about the .pth files
+ continue
+ if name in ('easy-install.pth', 'setuptools.pth'):
+ # Ignore .pth files that we control
+ continue
+
+ # Read the .pth file
+ f = open(os.path.join(dirname, name))
+ lines = list(yield_lines(f))
+ f.close()
+
+ # Yield existing non-dupe, non-import directory lines from it
+ for line in lines:
+ if not line.startswith("import"):
+ line = normalize_path(line.rstrip())
+ if line not in seen:
+ seen[line] = 1
+ if not os.path.isdir(line):
+ continue
+ yield line, os.listdir(line)
+
+
+def extract_wininst_cfg(dist_filename):
+ """Extract configuration data from a bdist_wininst .exe
+
+ Returns a configparser.RawConfigParser, or None
+ """
+ f = open(dist_filename, 'rb')
+ try:
+ endrec = zipfile._EndRecData(f)
+ if endrec is None:
+ return None
+
+ prepended = (endrec[9] - endrec[5]) - endrec[6]
+ if prepended < 12: # no wininst data here
+ return None
+ f.seek(prepended - 12)
+
+ tag, cfglen, bmlen = struct.unpack("<iii", f.read(12))
+ if tag not in (0x1234567A, 0x1234567B):
+ return None # not a valid tag
+
+ f.seek(prepended - (12 + cfglen))
+ init = {'version': '', 'target_version': ''}
+ cfg = configparser.RawConfigParser(init)
+ try:
+ part = f.read(cfglen)
+ # Read up to the first null byte.
+ config = part.split(b'\0', 1)[0]
+ # Now the config is in bytes, but for RawConfigParser, it should
+ # be text, so decode it.
+ config = config.decode(sys.getfilesystemencoding())
+ cfg.readfp(six.StringIO(config))
+ except configparser.Error:
+ return None
+ if not cfg.has_section('metadata') or not cfg.has_section('Setup'):
+ return None
+ return cfg
+
+ finally:
+ f.close()
+
+
+def get_exe_prefixes(exe_filename):
+ """Get exe->egg path translations for a given .exe file"""
+
+ prefixes = [
+ ('PURELIB/', ''),
+ ('PLATLIB/pywin32_system32', ''),
+ ('PLATLIB/', ''),
+ ('SCRIPTS/', 'EGG-INFO/scripts/'),
+ ('DATA/lib/site-packages', ''),
+ ]
+ z = zipfile.ZipFile(exe_filename)
+ try:
+ for info in z.infolist():
+ name = info.filename
+ parts = name.split('/')
+ if len(parts) == 3 and parts[2] == 'PKG-INFO':
+ if parts[1].endswith('.egg-info'):
+ prefixes.insert(0, ('/'.join(parts[:2]), 'EGG-INFO/'))
+ break
+ if len(parts) != 2 or not name.endswith('.pth'):
+ continue
+ if name.endswith('-nspkg.pth'):
+ continue
+ if parts[0].upper() in ('PURELIB', 'PLATLIB'):
+ contents = z.read(name)
+ if six.PY3:
+ contents = contents.decode()
+ for pth in yield_lines(contents):
+ pth = pth.strip().replace('\\', '/')
+ if not pth.startswith('import'):
+ prefixes.append((('%s/%s/' % (parts[0], pth)), ''))
+ finally:
+ z.close()
+ prefixes = [(x.lower(), y) for x, y in prefixes]
+ prefixes.sort()
+ prefixes.reverse()
+ return prefixes
+
+
+class PthDistributions(Environment):
+ """A .pth file with Distribution paths in it"""
+
+ dirty = False
+
+ def __init__(self, filename, sitedirs=()):
+ self.filename = filename
+ self.sitedirs = list(map(normalize_path, sitedirs))
+ self.basedir = normalize_path(os.path.dirname(self.filename))
+ self._load()
+ Environment.__init__(self, [], None, None)
+ for path in yield_lines(self.paths):
+ list(map(self.add, find_distributions(path, True)))
+
+ def _load(self):
+ self.paths = []
+ saw_import = False
+ seen = dict.fromkeys(self.sitedirs)
+ if os.path.isfile(self.filename):
+ f = open(self.filename, 'rt')
+ for line in f:
+ if line.startswith('import'):
+ saw_import = True
+ continue
+ path = line.rstrip()
+ self.paths.append(path)
+ if not path.strip() or path.strip().startswith('#'):
+ continue
+ # skip non-existent paths, in case somebody deleted a package
+ # manually, and duplicate paths as well
+ path = self.paths[-1] = normalize_path(
+ os.path.join(self.basedir, path)
+ )
+ if not os.path.exists(path) or path in seen:
+ self.paths.pop() # skip it
+ self.dirty = True # we cleaned up, so we're dirty now :)
+ continue
+ seen[path] = 1
+ f.close()
+
+ if self.paths and not saw_import:
+ self.dirty = True # ensure anything we touch has import wrappers
+ while self.paths and not self.paths[-1].strip():
+ self.paths.pop()
+
+ def save(self):
+ """Write changed .pth file back to disk"""
+ if not self.dirty:
+ return
+
+ rel_paths = list(map(self.make_relative, self.paths))
+ if rel_paths:
+ log.debug("Saving %s", self.filename)
+ lines = self._wrap_lines(rel_paths)
+ data = '\n'.join(lines) + '\n'
+
+ if os.path.islink(self.filename):
+ os.unlink(self.filename)
+ with open(self.filename, 'wt') as f:
+ f.write(data)
+
+ elif os.path.exists(self.filename):
+ log.debug("Deleting empty %s", self.filename)
+ os.unlink(self.filename)
+
+ self.dirty = False
+
+ @staticmethod
+ def _wrap_lines(lines):
+ return lines
+
+ def add(self, dist):
+ """Add `dist` to the distribution map"""
+ new_path = (
+ dist.location not in self.paths and (
+ dist.location not in self.sitedirs or
+ # account for '.' being in PYTHONPATH
+ dist.location == os.getcwd()
+ )
+ )
+ if new_path:
+ self.paths.append(dist.location)
+ self.dirty = True
+ Environment.add(self, dist)
+
+ def remove(self, dist):
+ """Remove `dist` from the distribution map"""
+ while dist.location in self.paths:
+ self.paths.remove(dist.location)
+ self.dirty = True
+ Environment.remove(self, dist)
+
+ def make_relative(self, path):
+ npath, last = os.path.split(normalize_path(path))
+ baselen = len(self.basedir)
+ parts = [last]
+ sep = os.altsep == '/' and '/' or os.sep
+ while len(npath) >= baselen:
+ if npath == self.basedir:
+ parts.append(os.curdir)
+ parts.reverse()
+ return sep.join(parts)
+ npath, last = os.path.split(npath)
+ parts.append(last)
+ else:
+ return path
+
+
+class RewritePthDistributions(PthDistributions):
+ @classmethod
+ def _wrap_lines(cls, lines):
+ yield cls.prelude
+ for line in lines:
+ yield line
+ yield cls.postlude
+
+ prelude = _one_liner("""
+ import sys
+ sys.__plen = len(sys.path)
+ """)
+ postlude = _one_liner("""
+ import sys
+ new = sys.path[sys.__plen:]
+ del sys.path[sys.__plen:]
+ p = getattr(sys, '__egginsert', 0)
+ sys.path[p:p] = new
+ sys.__egginsert = p + len(new)
+ """)
+
+
+if os.environ.get('SETUPTOOLS_SYS_PATH_TECHNIQUE', 'raw') == 'rewrite':
+ PthDistributions = RewritePthDistributions
+
+
+def _first_line_re():
+ """
+ Return a regular expression based on first_line_re suitable for matching
+ strings.
+ """
+ if isinstance(first_line_re.pattern, str):
+ return first_line_re
+
+ # first_line_re in Python >=3.1.4 and >=3.2.1 is a bytes pattern.
+ return re.compile(first_line_re.pattern.decode())
+
+
+def auto_chmod(func, arg, exc):
+ if func in [os.unlink, os.remove] and os.name == 'nt':
+ chmod(arg, stat.S_IWRITE)
+ return func(arg)
+ et, ev, _ = sys.exc_info()
+ six.reraise(et, (ev[0], ev[1] + (" %s %s" % (func, arg))))
+
+
+def update_dist_caches(dist_path, fix_zipimporter_caches):
+ """
+ Fix any globally cached `dist_path` related data
+
+ `dist_path` should be a path of a newly installed egg distribution (zipped
+ or unzipped).
+
+ sys.path_importer_cache contains finder objects that have been cached when
+ importing data from the original distribution. Any such finders need to be
+ cleared since the replacement distribution might be packaged differently,
+ e.g. a zipped egg distribution might get replaced with an unzipped egg
+ folder or vice versa. Having the old finders cached may then cause Python
+ to attempt loading modules from the replacement distribution using an
+ incorrect loader.
+
+ zipimport.zipimporter objects are Python loaders charged with importing
+ data packaged inside zip archives. If stale loaders referencing the
+ original distribution, are left behind, they can fail to load modules from
+ the replacement distribution. E.g. if an old zipimport.zipimporter instance
+ is used to load data from a new zipped egg archive, it may cause the
+ operation to attempt to locate the requested data in the wrong location -
+ one indicated by the original distribution's zip archive directory
+ information. Such an operation may then fail outright, e.g. report having
+ read a 'bad local file header', or even worse, it may fail silently &
+ return invalid data.
+
+ zipimport._zip_directory_cache contains cached zip archive directory
+ information for all existing zipimport.zipimporter instances and all such
+ instances connected to the same archive share the same cached directory
+ information.
+
+ If asked, and the underlying Python implementation allows it, we can fix
+ all existing zipimport.zipimporter instances instead of having to track
+ them down and remove them one by one, by updating their shared cached zip
+ archive directory information. This, of course, assumes that the
+ replacement distribution is packaged as a zipped egg.
+
+ If not asked to fix existing zipimport.zipimporter instances, we still do
+ our best to clear any remaining zipimport.zipimporter related cached data
+ that might somehow later get used when attempting to load data from the new
+ distribution and thus cause such load operations to fail. Note that when
+ tracking down such remaining stale data, we can not catch every conceivable
+ usage from here, and we clear only those that we know of and have found to
+ cause problems if left alive. Any remaining caches should be updated by
+ whomever is in charge of maintaining them, i.e. they should be ready to
+ handle us replacing their zip archives with new distributions at runtime.
+
+ """
+ # There are several other known sources of stale zipimport.zipimporter
+ # instances that we do not clear here, but might if ever given a reason to
+ # do so:
+ # * Global setuptools pkg_resources.working_set (a.k.a. 'master working
+ # set') may contain distributions which may in turn contain their
+ # zipimport.zipimporter loaders.
+ # * Several zipimport.zipimporter loaders held by local variables further
+ # up the function call stack when running the setuptools installation.
+ # * Already loaded modules may have their __loader__ attribute set to the
+ # exact loader instance used when importing them. Python 3.4 docs state
+ # that this information is intended mostly for introspection and so is
+ # not expected to cause us problems.
+ normalized_path = normalize_path(dist_path)
+ _uncache(normalized_path, sys.path_importer_cache)
+ if fix_zipimporter_caches:
+ _replace_zip_directory_cache_data(normalized_path)
+ else:
+ # Here, even though we do not want to fix existing and now stale
+ # zipimporter cache information, we still want to remove it. Related to
+ # Python's zip archive directory information cache, we clear each of
+ # its stale entries in two phases:
+ # 1. Clear the entry so attempting to access zip archive information
+ # via any existing stale zipimport.zipimporter instances fails.
+ # 2. Remove the entry from the cache so any newly constructed
+ # zipimport.zipimporter instances do not end up using old stale
+ # zip archive directory information.
+ # This whole stale data removal step does not seem strictly necessary,
+ # but has been left in because it was done before we started replacing
+ # the zip archive directory information cache content if possible, and
+ # there are no relevant unit tests that we can depend on to tell us if
+ # this is really needed.
+ _remove_and_clear_zip_directory_cache_data(normalized_path)
+
+
+def _collect_zipimporter_cache_entries(normalized_path, cache):
+ """
+ Return zipimporter cache entry keys related to a given normalized path.
+
+ Alternative path spellings (e.g. those using different character case or
+ those using alternative path separators) related to the same path are
+ included. Any sub-path entries are included as well, i.e. those
+ corresponding to zip archives embedded in other zip archives.
+
+ """
+ result = []
+ prefix_len = len(normalized_path)
+ for p in cache:
+ np = normalize_path(p)
+ if (np.startswith(normalized_path) and
+ np[prefix_len:prefix_len + 1] in (os.sep, '')):
+ result.append(p)
+ return result
+
+
+def _update_zipimporter_cache(normalized_path, cache, updater=None):
+ """
+ Update zipimporter cache data for a given normalized path.
+
+ Any sub-path entries are processed as well, i.e. those corresponding to zip
+ archives embedded in other zip archives.
+
+ Given updater is a callable taking a cache entry key and the original entry
+ (after already removing the entry from the cache), and expected to update
+ the entry and possibly return a new one to be inserted in its place.
+ Returning None indicates that the entry should not be replaced with a new
+ one. If no updater is given, the cache entries are simply removed without
+ any additional processing, the same as if the updater simply returned None.
+
+ """
+ for p in _collect_zipimporter_cache_entries(normalized_path, cache):
+ # N.B. pypy's custom zipimport._zip_directory_cache implementation does
+ # not support the complete dict interface:
+ # * Does not support item assignment, thus not allowing this function
+ # to be used only for removing existing cache entries.
+ # * Does not support the dict.pop() method, forcing us to use the
+ # get/del patterns instead. For more detailed information see the
+ # following links:
+ # https://github.com/pypa/setuptools/issues/202#issuecomment-202913420
+ # http://bit.ly/2h9itJX
+ old_entry = cache[p]
+ del cache[p]
+ new_entry = updater and updater(p, old_entry)
+ if new_entry is not None:
+ cache[p] = new_entry
+
+
+def _uncache(normalized_path, cache):
+ _update_zipimporter_cache(normalized_path, cache)
+
+
+def _remove_and_clear_zip_directory_cache_data(normalized_path):
+ def clear_and_remove_cached_zip_archive_directory_data(path, old_entry):
+ old_entry.clear()
+
+ _update_zipimporter_cache(
+ normalized_path, zipimport._zip_directory_cache,
+ updater=clear_and_remove_cached_zip_archive_directory_data)
+
+
+# PyPy Python implementation does not allow directly writing to the
+# zipimport._zip_directory_cache and so prevents us from attempting to correct
+# its content. The best we can do there is clear the problematic cache content
+# and have PyPy repopulate it as needed. The downside is that if there are any
+# stale zipimport.zipimporter instances laying around, attempting to use them
+# will fail due to not having its zip archive directory information available
+# instead of being automatically corrected to use the new correct zip archive
+# directory information.
+if '__pypy__' in sys.builtin_module_names:
+ _replace_zip_directory_cache_data = \
+ _remove_and_clear_zip_directory_cache_data
+else:
+
+ def _replace_zip_directory_cache_data(normalized_path):
+ def replace_cached_zip_archive_directory_data(path, old_entry):
+ # N.B. In theory, we could load the zip directory information just
+ # once for all updated path spellings, and then copy it locally and
+ # update its contained path strings to contain the correct
+ # spelling, but that seems like a way too invasive move (this cache
+ # structure is not officially documented anywhere and could in
+ # theory change with new Python releases) for no significant
+ # benefit.
+ old_entry.clear()
+ zipimport.zipimporter(path)
+ old_entry.update(zipimport._zip_directory_cache[path])
+ return old_entry
+
+ _update_zipimporter_cache(
+ normalized_path, zipimport._zip_directory_cache,
+ updater=replace_cached_zip_archive_directory_data)
+
+
+def is_python(text, filename='<string>'):
+ "Is this string a valid Python script?"
+ try:
+ compile(text, filename, 'exec')
+ except (SyntaxError, TypeError):
+ return False
+ else:
+ return True
+
+
+def is_sh(executable):
+ """Determine if the specified executable is a .sh (contains a #! line)"""
+ try:
+ with io.open(executable, encoding='latin-1') as fp:
+ magic = fp.read(2)
+ except (OSError, IOError):
+ return executable
+ return magic == '#!'
+
+
+def nt_quote_arg(arg):
+ """Quote a command line argument according to Windows parsing rules"""
+ return subprocess.list2cmdline([arg])
+
+
+def is_python_script(script_text, filename):
+ """Is this text, as a whole, a Python script? (as opposed to shell/bat/etc.
+ """
+ if filename.endswith('.py') or filename.endswith('.pyw'):
+ return True # extension says it's Python
+ if is_python(script_text, filename):
+ return True # it's syntactically valid Python
+ if script_text.startswith('#!'):
+ # It begins with a '#!' line, so check if 'python' is in it somewhere
+ return 'python' in script_text.splitlines()[0].lower()
+
+ return False # Not any Python I can recognize
+
+
+try:
+ from os import chmod as _chmod
+except ImportError:
+ # Jython compatibility
+ def _chmod(*args):
+ pass
+
+
+def chmod(path, mode):
+ log.debug("changing mode of %s to %o", path, mode)
+ try:
+ _chmod(path, mode)
+ except os.error as e:
+ log.debug("chmod failed: %s", e)
+
+
+class CommandSpec(list):
+ """
+ A command spec for a #! header, specified as a list of arguments akin to
+ those passed to Popen.
+ """
+
+ options = []
+ split_args = dict()
+
+ @classmethod
+ def best(cls):
+ """
+ Choose the best CommandSpec class based on environmental conditions.
+ """
+ return cls
+
+ @classmethod
+ def _sys_executable(cls):
+ _default = os.path.normpath(sys.executable)
+ return os.environ.get('__PYVENV_LAUNCHER__', _default)
+
+ @classmethod
+ def from_param(cls, param):
+ """
+ Construct a CommandSpec from a parameter to build_scripts, which may
+ be None.
+ """
+ if isinstance(param, cls):
+ return param
+ if isinstance(param, list):
+ return cls(param)
+ if param is None:
+ return cls.from_environment()
+ # otherwise, assume it's a string.
+ return cls.from_string(param)
+
+ @classmethod
+ def from_environment(cls):
+ return cls([cls._sys_executable()])
+
+ @classmethod
+ def from_string(cls, string):
+ """
+ Construct a command spec from a simple string representing a command
+ line parseable by shlex.split.
+ """
+ items = shlex.split(string, **cls.split_args)
+ return cls(items)
+
+ def install_options(self, script_text):
+ self.options = shlex.split(self._extract_options(script_text))
+ cmdline = subprocess.list2cmdline(self)
+ if not isascii(cmdline):
+ self.options[:0] = ['-x']
+
+ @staticmethod
+ def _extract_options(orig_script):
+ """
+ Extract any options from the first line of the script.
+ """
+ first = (orig_script + '\n').splitlines()[0]
+ match = _first_line_re().match(first)
+ options = match.group(1) or '' if match else ''
+ return options.strip()
+
+ def as_header(self):
+ return self._render(self + list(self.options))
+
+ @staticmethod
+ def _strip_quotes(item):
+ _QUOTES = '"\''
+ for q in _QUOTES:
+ if item.startswith(q) and item.endswith(q):
+ return item[1:-1]
+ return item
+
+ @staticmethod
+ def _render(items):
+ cmdline = subprocess.list2cmdline(
+ CommandSpec._strip_quotes(item.strip()) for item in items)
+ return '#!' + cmdline + '\n'
+
+
+# For pbr compat; will be removed in a future version.
+sys_executable = CommandSpec._sys_executable()
+
+
+class WindowsCommandSpec(CommandSpec):
+ split_args = dict(posix=False)
+
+
+class ScriptWriter(object):
+ """
+ Encapsulates behavior around writing entry point scripts for console and
+ gui apps.
+ """
+
+ template = textwrap.dedent(r"""
+ # EASY-INSTALL-ENTRY-SCRIPT: %(spec)r,%(group)r,%(name)r
+ __requires__ = %(spec)r
+ import re
+ import sys
+ from pkg_resources import load_entry_point
+
+ if __name__ == '__main__':
+ sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
+ sys.exit(
+ load_entry_point(%(spec)r, %(group)r, %(name)r)()
+ )
+ """).lstrip()
+
+ command_spec_class = CommandSpec
+
+ @classmethod
+ def get_script_args(cls, dist, executable=None, wininst=False):
+ # for backward compatibility
+ warnings.warn("Use get_args", DeprecationWarning)
+ writer = (WindowsScriptWriter if wininst else ScriptWriter).best()
+ header = cls.get_script_header("", executable, wininst)
+ return writer.get_args(dist, header)
+
+ @classmethod
+ def get_script_header(cls, script_text, executable=None, wininst=False):
+ # for backward compatibility
+ warnings.warn("Use get_header", DeprecationWarning)
+ if wininst:
+ executable = "python.exe"
+ cmd = cls.command_spec_class.best().from_param(executable)
+ cmd.install_options(script_text)
+ return cmd.as_header()
+
+ @classmethod
+ def get_args(cls, dist, header=None):
+ """
+ Yield write_script() argument tuples for a distribution's
+ console_scripts and gui_scripts entry points.
+ """
+ if header is None:
+ header = cls.get_header()
+ spec = str(dist.as_requirement())
+ for type_ in 'console', 'gui':
+ group = type_ + '_scripts'
+ for name, ep in dist.get_entry_map(group).items():
+ cls._ensure_safe_name(name)
+ script_text = cls.template % locals()
+ args = cls._get_script_args(type_, name, header, script_text)
+ for res in args:
+ yield res
+
+ @staticmethod
+ def _ensure_safe_name(name):
+ """
+ Prevent paths in *_scripts entry point names.
+ """
+ has_path_sep = re.search(r'[\\/]', name)
+ if has_path_sep:
+ raise ValueError("Path separators not allowed in script names")
+
+ @classmethod
+ def get_writer(cls, force_windows):
+ # for backward compatibility
+ warnings.warn("Use best", DeprecationWarning)
+ return WindowsScriptWriter.best() if force_windows else cls.best()
+
+ @classmethod
+ def best(cls):
+ """
+ Select the best ScriptWriter for this environment.
+ """
+ if sys.platform == 'win32' or (os.name == 'java' and os._name == 'nt'):
+ return WindowsScriptWriter.best()
+ else:
+ return cls
+
+ @classmethod
+ def _get_script_args(cls, type_, name, header, script_text):
+ # Simply write the stub with no extension.
+ yield (name, header + script_text)
+
+ @classmethod
+ def get_header(cls, script_text="", executable=None):
+ """Create a #! line, getting options (if any) from script_text"""
+ cmd = cls.command_spec_class.best().from_param(executable)
+ cmd.install_options(script_text)
+ return cmd.as_header()
+
+
+class WindowsScriptWriter(ScriptWriter):
+ command_spec_class = WindowsCommandSpec
+
+ @classmethod
+ def get_writer(cls):
+ # for backward compatibility
+ warnings.warn("Use best", DeprecationWarning)
+ return cls.best()
+
+ @classmethod
+ def best(cls):
+ """
+ Select the best ScriptWriter suitable for Windows
+ """
+ writer_lookup = dict(
+ executable=WindowsExecutableLauncherWriter,
+ natural=cls,
+ )
+ # for compatibility, use the executable launcher by default
+ launcher = os.environ.get('SETUPTOOLS_LAUNCHER', 'executable')
+ return writer_lookup[launcher]
+
+ @classmethod
+ def _get_script_args(cls, type_, name, header, script_text):
+ "For Windows, add a .py extension"
+ ext = dict(console='.pya', gui='.pyw')[type_]
+ if ext not in os.environ['PATHEXT'].lower().split(';'):
+ msg = (
+ "{ext} not listed in PATHEXT; scripts will not be "
+ "recognized as executables."
+ ).format(**locals())
+ warnings.warn(msg, UserWarning)
+ old = ['.pya', '.py', '-script.py', '.pyc', '.pyo', '.pyw', '.exe']
+ old.remove(ext)
+ header = cls._adjust_header(type_, header)
+ blockers = [name + x for x in old]
+ yield name + ext, header + script_text, 't', blockers
+
+ @classmethod
+ def _adjust_header(cls, type_, orig_header):
+ """
+ Make sure 'pythonw' is used for gui and and 'python' is used for
+ console (regardless of what sys.executable is).
+ """
+ pattern = 'pythonw.exe'
+ repl = 'python.exe'
+ if type_ == 'gui':
+ pattern, repl = repl, pattern
+ pattern_ob = re.compile(re.escape(pattern), re.IGNORECASE)
+ new_header = pattern_ob.sub(string=orig_header, repl=repl)
+ return new_header if cls._use_header(new_header) else orig_header
+
+ @staticmethod
+ def _use_header(new_header):
+ """
+ Should _adjust_header use the replaced header?
+
+ On non-windows systems, always use. On
+ Windows systems, only use the replaced header if it resolves
+ to an executable on the system.
+ """
+ clean_header = new_header[2:-1].strip('"')
+ return sys.platform != 'win32' or find_executable(clean_header)
+
+
+class WindowsExecutableLauncherWriter(WindowsScriptWriter):
+ @classmethod
+ def _get_script_args(cls, type_, name, header, script_text):
+ """
+ For Windows, add a .py extension and an .exe launcher
+ """
+ if type_ == 'gui':
+ launcher_type = 'gui'
+ ext = '-script.pyw'
+ old = ['.pyw']
+ else:
+ launcher_type = 'cli'
+ ext = '-script.py'
+ old = ['.py', '.pyc', '.pyo']
+ hdr = cls._adjust_header(type_, header)
+ blockers = [name + x for x in old]
+ yield (name + ext, hdr + script_text, 't', blockers)
+ yield (
+ name + '.exe', get_win_launcher(launcher_type),
+ 'b' # write in binary mode
+ )
+ if not is_64bit():
+ # install a manifest for the launcher to prevent Windows
+ # from detecting it as an installer (which it will for
+ # launchers like easy_install.exe). Consider only
+ # adding a manifest for launchers detected as installers.
+ # See Distribute #143 for details.
+ m_name = name + '.exe.manifest'
+ yield (m_name, load_launcher_manifest(name), 't')
+
+
+# for backward-compatibility
+get_script_args = ScriptWriter.get_script_args
+get_script_header = ScriptWriter.get_script_header
+
+
+def get_win_launcher(type):
+ """
+ Load the Windows launcher (executable) suitable for launching a script.
+
+ `type` should be either 'cli' or 'gui'
+
+ Returns the executable as a byte string.
+ """
+ launcher_fn = '%s.exe' % type
+ if is_64bit():
+ launcher_fn = launcher_fn.replace(".", "-64.")
+ else:
+ launcher_fn = launcher_fn.replace(".", "-32.")
+ return resource_string('setuptools', launcher_fn)
+
+
+def load_launcher_manifest(name):
+ manifest = pkg_resources.resource_string(__name__, 'launcher manifest.xml')
+ if six.PY2:
+ return manifest % vars()
+ else:
+ return manifest.decode('utf-8') % vars()
+
+
+def rmtree(path, ignore_errors=False, onerror=auto_chmod):
+ return shutil.rmtree(path, ignore_errors, onerror)
+
+
+def current_umask():
+ tmp = os.umask(0o022)
+ os.umask(tmp)
+ return tmp
+
+
+def bootstrap():
+ # This function is called when setuptools*.egg is run using /bin/sh
+ import setuptools
+
+ argv0 = os.path.dirname(setuptools.__path__[0])
+ sys.argv[0] = argv0
+ sys.argv.append(argv0)
+ main()
+
+
+def main(argv=None, **kw):
+ from setuptools import setup
+ from setuptools.dist import Distribution
+
+ class DistributionWithoutHelpCommands(Distribution):
+ common_usage = ""
+
+ def _show_help(self, *args, **kw):
+ with _patch_usage():
+ Distribution._show_help(self, *args, **kw)
+
+ if argv is None:
+ argv = sys.argv[1:]
+
+ with _patch_usage():
+ setup(
+ script_args=['-q', 'easy_install', '-v'] + argv,
+ script_name=sys.argv[0] or 'easy_install',
+ distclass=DistributionWithoutHelpCommands,
+ **kw
+ )
+
+
+@contextlib.contextmanager
+def _patch_usage():
+ import distutils.core
+ USAGE = textwrap.dedent("""
+ usage: %(script)s [options] requirement_or_url ...
+ or: %(script)s --help
+ """).lstrip()
+
+ def gen_usage(script_name):
+ return USAGE % dict(
+ script=os.path.basename(script_name),
+ )
+
+ saved = distutils.core.gen_usage
+ distutils.core.gen_usage = gen_usage
+ try:
+ yield
+ finally:
+ distutils.core.gen_usage = saved
diff --git a/setuptools/command/egg_info.py b/setuptools/command/egg_info.py
new file mode 100755
index 0000000..f3e604d
--- /dev/null
+++ b/setuptools/command/egg_info.py
@@ -0,0 +1,696 @@
+"""setuptools.command.egg_info
+
+Create a distribution's .egg-info directory and contents"""
+
+from distutils.filelist import FileList as _FileList
+from distutils.errors import DistutilsInternalError
+from distutils.util import convert_path
+from distutils import log
+import distutils.errors
+import distutils.filelist
+import os
+import re
+import sys
+import io
+import warnings
+import time
+import collections
+
+from setuptools.extern import six
+from setuptools.extern.six.moves import map
+
+from setuptools import Command
+from setuptools.command.sdist import sdist
+from setuptools.command.sdist import walk_revctrl
+from setuptools.command.setopt import edit_config
+from setuptools.command import bdist_egg
+from pkg_resources import (
+ parse_requirements, safe_name, parse_version,
+ safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename)
+import setuptools.unicode_utils as unicode_utils
+from setuptools.glob import glob
+
+from setuptools.extern import packaging
+
+
+def translate_pattern(glob):
+ """
+ Translate a file path glob like '*.txt' in to a regular expression.
+ This differs from fnmatch.translate which allows wildcards to match
+ directory separators. It also knows about '**/' which matches any number of
+ directories.
+ """
+ pat = ''
+
+ # This will split on '/' within [character classes]. This is deliberate.
+ chunks = glob.split(os.path.sep)
+
+ sep = re.escape(os.sep)
+ valid_char = '[^%s]' % (sep,)
+
+ for c, chunk in enumerate(chunks):
+ last_chunk = c == len(chunks) - 1
+
+ # Chunks that are a literal ** are globstars. They match anything.
+ if chunk == '**':
+ if last_chunk:
+ # Match anything if this is the last component
+ pat += '.*'
+ else:
+ # Match '(name/)*'
+ pat += '(?:%s+%s)*' % (valid_char, sep)
+ continue # Break here as the whole path component has been handled
+
+ # Find any special characters in the remainder
+ i = 0
+ chunk_len = len(chunk)
+ while i < chunk_len:
+ char = chunk[i]
+ if char == '*':
+ # Match any number of name characters
+ pat += valid_char + '*'
+ elif char == '?':
+ # Match a name character
+ pat += valid_char
+ elif char == '[':
+ # Character class
+ inner_i = i + 1
+ # Skip initial !/] chars
+ if inner_i < chunk_len and chunk[inner_i] == '!':
+ inner_i = inner_i + 1
+ if inner_i < chunk_len and chunk[inner_i] == ']':
+ inner_i = inner_i + 1
+
+ # Loop till the closing ] is found
+ while inner_i < chunk_len and chunk[inner_i] != ']':
+ inner_i = inner_i + 1
+
+ if inner_i >= chunk_len:
+ # Got to the end of the string without finding a closing ]
+ # Do not treat this as a matching group, but as a literal [
+ pat += re.escape(char)
+ else:
+ # Grab the insides of the [brackets]
+ inner = chunk[i + 1:inner_i]
+ char_class = ''
+
+ # Class negation
+ if inner[0] == '!':
+ char_class = '^'
+ inner = inner[1:]
+
+ char_class += re.escape(inner)
+ pat += '[%s]' % (char_class,)
+
+ # Skip to the end ]
+ i = inner_i
+ else:
+ pat += re.escape(char)
+ i += 1
+
+ # Join each chunk with the dir separator
+ if not last_chunk:
+ pat += sep
+
+ pat += r'\Z'
+ return re.compile(pat, flags=re.MULTILINE|re.DOTALL)
+
+
+class egg_info(Command):
+ description = "create a distribution's .egg-info directory"
+
+ user_options = [
+ ('egg-base=', 'e', "directory containing .egg-info directories"
+ " (default: top of the source tree)"),
+ ('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"),
+ ('tag-build=', 'b', "Specify explicit tag to add to version number"),
+ ('no-date', 'D', "Don't include date stamp [default]"),
+ ]
+
+ boolean_options = ['tag-date']
+ negative_opt = {
+ 'no-date': 'tag-date',
+ }
+
+ def initialize_options(self):
+ self.egg_name = None
+ self.egg_version = None
+ self.egg_base = None
+ self.egg_info = None
+ self.tag_build = None
+ self.tag_date = 0
+ self.broken_egg_info = False
+ self.vtags = None
+
+ ####################################
+ # allow the 'tag_svn_revision' to be detected and
+ # set, supporting sdists built on older Setuptools.
+ @property
+ def tag_svn_revision(self):
+ pass
+
+ @tag_svn_revision.setter
+ def tag_svn_revision(self, value):
+ pass
+ ####################################
+
+ def save_version_info(self, filename):
+ """
+ Materialize the value of date into the
+ build tag. Install build keys in a deterministic order
+ to avoid arbitrary reordering on subsequent builds.
+ """
+ egg_info = collections.OrderedDict()
+ # follow the order these keys would have been added
+ # when PYTHONHASHSEED=0
+ egg_info['tag_build'] = self.tags()
+ egg_info['tag_date'] = 0
+ edit_config(filename, dict(egg_info=egg_info))
+
+ def finalize_options(self):
+ self.egg_name = safe_name(self.distribution.get_name())
+ self.vtags = self.tags()
+ self.egg_version = self.tagged_version()
+
+ parsed_version = parse_version(self.egg_version)
+
+ try:
+ is_version = isinstance(parsed_version, packaging.version.Version)
+ spec = (
+ "%s==%s" if is_version else "%s===%s"
+ )
+ list(
+ parse_requirements(spec % (self.egg_name, self.egg_version))
+ )
+ except ValueError:
+ raise distutils.errors.DistutilsOptionError(
+ "Invalid distribution name or version syntax: %s-%s" %
+ (self.egg_name, self.egg_version)
+ )
+
+ if self.egg_base is None:
+ dirs = self.distribution.package_dir
+ self.egg_base = (dirs or {}).get('', os.curdir)
+
+ self.ensure_dirname('egg_base')
+ self.egg_info = to_filename(self.egg_name) + '.egg-info'
+ if self.egg_base != os.curdir:
+ self.egg_info = os.path.join(self.egg_base, self.egg_info)
+ if '-' in self.egg_name:
+ self.check_broken_egg_info()
+
+ # Set package version for the benefit of dumber commands
+ # (e.g. sdist, bdist_wininst, etc.)
+ #
+ self.distribution.metadata.version = self.egg_version
+
+ # If we bootstrapped around the lack of a PKG-INFO, as might be the
+ # case in a fresh checkout, make sure that any special tags get added
+ # to the version info
+ #
+ pd = self.distribution._patched_dist
+ if pd is not None and pd.key == self.egg_name.lower():
+ pd._version = self.egg_version
+ pd._parsed_version = parse_version(self.egg_version)
+ self.distribution._patched_dist = None
+
+ def write_or_delete_file(self, what, filename, data, force=False):
+ """Write `data` to `filename` or delete if empty
+
+ If `data` is non-empty, this routine is the same as ``write_file()``.
+ If `data` is empty but not ``None``, this is the same as calling
+ ``delete_file(filename)`. If `data` is ``None``, then this is a no-op
+ unless `filename` exists, in which case a warning is issued about the
+ orphaned file (if `force` is false), or deleted (if `force` is true).
+ """
+ if data:
+ self.write_file(what, filename, data)
+ elif os.path.exists(filename):
+ if data is None and not force:
+ log.warn(
+ "%s not set in setup(), but %s exists", what, filename
+ )
+ return
+ else:
+ self.delete_file(filename)
+
+ def write_file(self, what, filename, data):
+ """Write `data` to `filename` (if not a dry run) after announcing it
+
+ `what` is used in a log message to identify what is being written
+ to the file.
+ """
+ log.info("writing %s to %s", what, filename)
+ if six.PY3:
+ data = data.encode("utf-8")
+ if not self.dry_run:
+ f = open(filename, 'wb')
+ f.write(data)
+ f.close()
+
+ def delete_file(self, filename):
+ """Delete `filename` (if not a dry run) after announcing it"""
+ log.info("deleting %s", filename)
+ if not self.dry_run:
+ os.unlink(filename)
+
+ def tagged_version(self):
+ version = self.distribution.get_version()
+ # egg_info may be called more than once for a distribution,
+ # in which case the version string already contains all tags.
+ if self.vtags and version.endswith(self.vtags):
+ return safe_version(version)
+ return safe_version(version + self.vtags)
+
+ def run(self):
+ self.mkpath(self.egg_info)
+ installer = self.distribution.fetch_build_egg
+ for ep in iter_entry_points('egg_info.writers'):
+ ep.require(installer=installer)
+ writer = ep.resolve()
+ writer(self, ep.name, os.path.join(self.egg_info, ep.name))
+
+ # Get rid of native_libs.txt if it was put there by older bdist_egg
+ nl = os.path.join(self.egg_info, "native_libs.txt")
+ if os.path.exists(nl):
+ self.delete_file(nl)
+
+ self.find_sources()
+
+ def tags(self):
+ version = ''
+ if self.tag_build:
+ version += self.tag_build
+ if self.tag_date:
+ version += time.strftime("-%Y%m%d")
+ return version
+
+ def find_sources(self):
+ """Generate SOURCES.txt manifest file"""
+ manifest_filename = os.path.join(self.egg_info, "SOURCES.txt")
+ mm = manifest_maker(self.distribution)
+ mm.manifest = manifest_filename
+ mm.run()
+ self.filelist = mm.filelist
+
+ def check_broken_egg_info(self):
+ bei = self.egg_name + '.egg-info'
+ if self.egg_base != os.curdir:
+ bei = os.path.join(self.egg_base, bei)
+ if os.path.exists(bei):
+ log.warn(
+ "-" * 78 + '\n'
+ "Note: Your current .egg-info directory has a '-' in its name;"
+ '\nthis will not work correctly with "setup.py develop".\n\n'
+ 'Please rename %s to %s to correct this problem.\n' + '-' * 78,
+ bei, self.egg_info
+ )
+ self.broken_egg_info = self.egg_info
+ self.egg_info = bei # make it work for now
+
+
+class FileList(_FileList):
+ # Implementations of the various MANIFEST.in commands
+
+ def process_template_line(self, line):
+ # Parse the line: split it up, make sure the right number of words
+ # is there, and return the relevant words. 'action' is always
+ # defined: it's the first word of the line. Which of the other
+ # three are defined depends on the action; it'll be either
+ # patterns, (dir and patterns), or (dir_pattern).
+ (action, patterns, dir, dir_pattern) = self._parse_template_line(line)
+
+ # OK, now we know that the action is valid and we have the
+ # right number of words on the line for that action -- so we
+ # can proceed with minimal error-checking.
+ if action == 'include':
+ self.debug_print("include " + ' '.join(patterns))
+ for pattern in patterns:
+ if not self.include(pattern):
+ log.warn("warning: no files found matching '%s'", pattern)
+
+ elif action == 'exclude':
+ self.debug_print("exclude " + ' '.join(patterns))
+ for pattern in patterns:
+ if not self.exclude(pattern):
+ log.warn(("warning: no previously-included files "
+ "found matching '%s'"), pattern)
+
+ elif action == 'global-include':
+ self.debug_print("global-include " + ' '.join(patterns))
+ for pattern in patterns:
+ if not self.global_include(pattern):
+ log.warn(("warning: no files found matching '%s' "
+ "anywhere in distribution"), pattern)
+
+ elif action == 'global-exclude':
+ self.debug_print("global-exclude " + ' '.join(patterns))
+ for pattern in patterns:
+ if not self.global_exclude(pattern):
+ log.warn(("warning: no previously-included files matching "
+ "'%s' found anywhere in distribution"),
+ pattern)
+
+ elif action == 'recursive-include':
+ self.debug_print("recursive-include %s %s" %
+ (dir, ' '.join(patterns)))
+ for pattern in patterns:
+ if not self.recursive_include(dir, pattern):
+ log.warn(("warning: no files found matching '%s' "
+ "under directory '%s'"),
+ pattern, dir)
+
+ elif action == 'recursive-exclude':
+ self.debug_print("recursive-exclude %s %s" %
+ (dir, ' '.join(patterns)))
+ for pattern in patterns:
+ if not self.recursive_exclude(dir, pattern):
+ log.warn(("warning: no previously-included files matching "
+ "'%s' found under directory '%s'"),
+ pattern, dir)
+
+ elif action == 'graft':
+ self.debug_print("graft " + dir_pattern)
+ if not self.graft(dir_pattern):
+ log.warn("warning: no directories found matching '%s'",
+ dir_pattern)
+
+ elif action == 'prune':
+ self.debug_print("prune " + dir_pattern)
+ if not self.prune(dir_pattern):
+ log.warn(("no previously-included directories found "
+ "matching '%s'"), dir_pattern)
+
+ else:
+ raise DistutilsInternalError(
+ "this cannot happen: invalid action '%s'" % action)
+
+ def _remove_files(self, predicate):
+ """
+ Remove all files from the file list that match the predicate.
+ Return True if any matching files were removed
+ """
+ found = False
+ for i in range(len(self.files) - 1, -1, -1):
+ if predicate(self.files[i]):
+ self.debug_print(" removing " + self.files[i])
+ del self.files[i]
+ found = True
+ return found
+
+ def include(self, pattern):
+ """Include files that match 'pattern'."""
+ found = [f for f in glob(pattern) if not os.path.isdir(f)]
+ self.extend(found)
+ return bool(found)
+
+ def exclude(self, pattern):
+ """Exclude files that match 'pattern'."""
+ match = translate_pattern(pattern)
+ return self._remove_files(match.match)
+
+ def recursive_include(self, dir, pattern):
+ """
+ Include all files anywhere in 'dir/' that match the pattern.
+ """
+ full_pattern = os.path.join(dir, '**', pattern)
+ found = [f for f in glob(full_pattern, recursive=True)
+ if not os.path.isdir(f)]
+ self.extend(found)
+ return bool(found)
+
+ def recursive_exclude(self, dir, pattern):
+ """
+ Exclude any file anywhere in 'dir/' that match the pattern.
+ """
+ match = translate_pattern(os.path.join(dir, '**', pattern))
+ return self._remove_files(match.match)
+
+ def graft(self, dir):
+ """Include all files from 'dir/'."""
+ found = [
+ item
+ for match_dir in glob(dir)
+ for item in distutils.filelist.findall(match_dir)
+ ]
+ self.extend(found)
+ return bool(found)
+
+ def prune(self, dir):
+ """Filter out files from 'dir/'."""
+ match = translate_pattern(os.path.join(dir, '**'))
+ return self._remove_files(match.match)
+
+ def global_include(self, pattern):
+ """
+ Include all files anywhere in the current directory that match the
+ pattern. This is very inefficient on large file trees.
+ """
+ if self.allfiles is None:
+ self.findall()
+ match = translate_pattern(os.path.join('**', pattern))
+ found = [f for f in self.allfiles if match.match(f)]
+ self.extend(found)
+ return bool(found)
+
+ def global_exclude(self, pattern):
+ """
+ Exclude all files anywhere that match the pattern.
+ """
+ match = translate_pattern(os.path.join('**', pattern))
+ return self._remove_files(match.match)
+
+ def append(self, item):
+ if item.endswith('\r'): # Fix older sdists built on Windows
+ item = item[:-1]
+ path = convert_path(item)
+
+ if self._safe_path(path):
+ self.files.append(path)
+
+ def extend(self, paths):
+ self.files.extend(filter(self._safe_path, paths))
+
+ def _repair(self):
+ """
+ Replace self.files with only safe paths
+
+ Because some owners of FileList manipulate the underlying
+ ``files`` attribute directly, this method must be called to
+ repair those paths.
+ """
+ self.files = list(filter(self._safe_path, self.files))
+
+ def _safe_path(self, path):
+ enc_warn = "'%s' not %s encodable -- skipping"
+
+ # To avoid accidental trans-codings errors, first to unicode
+ u_path = unicode_utils.filesys_decode(path)
+ if u_path is None:
+ log.warn("'%s' in unexpected encoding -- skipping" % path)
+ return False
+
+ # Must ensure utf-8 encodability
+ utf8_path = unicode_utils.try_encode(u_path, "utf-8")
+ if utf8_path is None:
+ log.warn(enc_warn, path, 'utf-8')
+ return False
+
+ try:
+ # accept is either way checks out
+ if os.path.exists(u_path) or os.path.exists(utf8_path):
+ return True
+ # this will catch any encode errors decoding u_path
+ except UnicodeEncodeError:
+ log.warn(enc_warn, path, sys.getfilesystemencoding())
+
+
+class manifest_maker(sdist):
+ template = "MANIFEST.in"
+
+ def initialize_options(self):
+ self.use_defaults = 1
+ self.prune = 1
+ self.manifest_only = 1
+ self.force_manifest = 1
+
+ def finalize_options(self):
+ pass
+
+ def run(self):
+ self.filelist = FileList()
+ if not os.path.exists(self.manifest):
+ self.write_manifest() # it must exist so it'll get in the list
+ self.add_defaults()
+ if os.path.exists(self.template):
+ self.read_template()
+ self.prune_file_list()
+ self.filelist.sort()
+ self.filelist.remove_duplicates()
+ self.write_manifest()
+
+ def _manifest_normalize(self, path):
+ path = unicode_utils.filesys_decode(path)
+ return path.replace(os.sep, '/')
+
+ def write_manifest(self):
+ """
+ Write the file list in 'self.filelist' to the manifest file
+ named by 'self.manifest'.
+ """
+ self.filelist._repair()
+
+ # Now _repairs should encodability, but not unicode
+ files = [self._manifest_normalize(f) for f in self.filelist.files]
+ msg = "writing manifest file '%s'" % self.manifest
+ self.execute(write_file, (self.manifest, files), msg)
+
+ def warn(self, msg):
+ if not self._should_suppress_warning(msg):
+ sdist.warn(self, msg)
+
+ @staticmethod
+ def _should_suppress_warning(msg):
+ """
+ suppress missing-file warnings from sdist
+ """
+ return re.match(r"standard file .*not found", msg)
+
+ def add_defaults(self):
+ sdist.add_defaults(self)
+ self.filelist.append(self.template)
+ self.filelist.append(self.manifest)
+ rcfiles = list(walk_revctrl())
+ if rcfiles:
+ self.filelist.extend(rcfiles)
+ elif os.path.exists(self.manifest):
+ self.read_manifest()
+ ei_cmd = self.get_finalized_command('egg_info')
+ self.filelist.graft(ei_cmd.egg_info)
+
+ def prune_file_list(self):
+ build = self.get_finalized_command('build')
+ base_dir = self.distribution.get_fullname()
+ self.filelist.prune(build.build_base)
+ self.filelist.prune(base_dir)
+ sep = re.escape(os.sep)
+ self.filelist.exclude_pattern(r'(^|' + sep + r')(RCS|CVS|\.svn)' + sep,
+ is_regex=1)
+
+
+def write_file(filename, contents):
+ """Create a file with the specified name and write 'contents' (a
+ sequence of strings without line terminators) to it.
+ """
+ contents = "\n".join(contents)
+
+ # assuming the contents has been vetted for utf-8 encoding
+ contents = contents.encode("utf-8")
+
+ with open(filename, "wb") as f: # always write POSIX-style manifest
+ f.write(contents)
+
+
+def write_pkg_info(cmd, basename, filename):
+ log.info("writing %s", filename)
+ if not cmd.dry_run:
+ metadata = cmd.distribution.metadata
+ metadata.version, oldver = cmd.egg_version, metadata.version
+ metadata.name, oldname = cmd.egg_name, metadata.name
+
+ try:
+ # write unescaped data to PKG-INFO, so older pkg_resources
+ # can still parse it
+ metadata.write_pkg_info(cmd.egg_info)
+ finally:
+ metadata.name, metadata.version = oldname, oldver
+
+ safe = getattr(cmd.distribution, 'zip_safe', None)
+
+ bdist_egg.write_safety_flag(cmd.egg_info, safe)
+
+
+def warn_depends_obsolete(cmd, basename, filename):
+ if os.path.exists(filename):
+ log.warn(
+ "WARNING: 'depends.txt' is not used by setuptools 0.6!\n"
+ "Use the install_requires/extras_require setup() args instead."
+ )
+
+
+def _write_requirements(stream, reqs):
+ lines = yield_lines(reqs or ())
+ append_cr = lambda line: line + '\n'
+ lines = map(append_cr, lines)
+ stream.writelines(lines)
+
+
+def write_requirements(cmd, basename, filename):
+ dist = cmd.distribution
+ data = six.StringIO()
+ _write_requirements(data, dist.install_requires)
+ extras_require = dist.extras_require or {}
+ for extra in sorted(extras_require):
+ data.write('\n[{extra}]\n'.format(**vars()))
+ _write_requirements(data, extras_require[extra])
+ cmd.write_or_delete_file("requirements", filename, data.getvalue())
+
+
+def write_setup_requirements(cmd, basename, filename):
+ data = io.StringIO()
+ _write_requirements(data, cmd.distribution.setup_requires)
+ cmd.write_or_delete_file("setup-requirements", filename, data.getvalue())
+
+
+def write_toplevel_names(cmd, basename, filename):
+ pkgs = dict.fromkeys(
+ [
+ k.split('.', 1)[0]
+ for k in cmd.distribution.iter_distribution_names()
+ ]
+ )
+ cmd.write_file("top-level names", filename, '\n'.join(sorted(pkgs)) + '\n')
+
+
+def overwrite_arg(cmd, basename, filename):
+ write_arg(cmd, basename, filename, True)
+
+
+def write_arg(cmd, basename, filename, force=False):
+ argname = os.path.splitext(basename)[0]
+ value = getattr(cmd.distribution, argname, None)
+ if value is not None:
+ value = '\n'.join(value) + '\n'
+ cmd.write_or_delete_file(argname, filename, value, force)
+
+
+def write_entries(cmd, basename, filename):
+ ep = cmd.distribution.entry_points
+
+ if isinstance(ep, six.string_types) or ep is None:
+ data = ep
+ elif ep is not None:
+ data = []
+ for section, contents in sorted(ep.items()):
+ if not isinstance(contents, six.string_types):
+ contents = EntryPoint.parse_group(section, contents)
+ contents = '\n'.join(sorted(map(str, contents.values())))
+ data.append('[%s]\n%s\n\n' % (section, contents))
+ data = ''.join(data)
+
+ cmd.write_or_delete_file('entry points', filename, data, True)
+
+
+def get_pkg_info_revision():
+ """
+ Get a -r### off of PKG-INFO Version in case this is an sdist of
+ a subversion revision.
+ """
+ warnings.warn("get_pkg_info_revision is deprecated.", DeprecationWarning)
+ if os.path.exists('PKG-INFO'):
+ with io.open('PKG-INFO') as f:
+ for line in f:
+ match = re.match(r"Version:.*-r(\d+)\s*$", line)
+ if match:
+ return int(match.group(1))
+ return 0
diff --git a/setuptools/command/install.py b/setuptools/command/install.py
new file mode 100644
index 0000000..31a5ddb
--- /dev/null
+++ b/setuptools/command/install.py
@@ -0,0 +1,125 @@
+from distutils.errors import DistutilsArgError
+import inspect
+import glob
+import warnings
+import platform
+import distutils.command.install as orig
+
+import setuptools
+
+# Prior to numpy 1.9, NumPy relies on the '_install' name, so provide it for
+# now. See https://github.com/pypa/setuptools/issues/199/
+_install = orig.install
+
+
+class install(orig.install):
+ """Use easy_install to install the package, w/dependencies"""
+
+ user_options = orig.install.user_options + [
+ ('old-and-unmanageable', None, "Try not to use this!"),
+ ('single-version-externally-managed', None,
+ "used by system package builders to create 'flat' eggs"),
+ ]
+ boolean_options = orig.install.boolean_options + [
+ 'old-and-unmanageable', 'single-version-externally-managed',
+ ]
+ new_commands = [
+ ('install_egg_info', lambda self: True),
+ ('install_scripts', lambda self: True),
+ ]
+ _nc = dict(new_commands)
+
+ def initialize_options(self):
+ orig.install.initialize_options(self)
+ self.old_and_unmanageable = None
+ self.single_version_externally_managed = None
+
+ def finalize_options(self):
+ orig.install.finalize_options(self)
+ if self.root:
+ self.single_version_externally_managed = True
+ elif self.single_version_externally_managed:
+ if not self.root and not self.record:
+ raise DistutilsArgError(
+ "You must specify --record or --root when building system"
+ " packages"
+ )
+
+ def handle_extra_path(self):
+ if self.root or self.single_version_externally_managed:
+ # explicit backward-compatibility mode, allow extra_path to work
+ return orig.install.handle_extra_path(self)
+
+ # Ignore extra_path when installing an egg (or being run by another
+ # command without --root or --single-version-externally-managed
+ self.path_file = None
+ self.extra_dirs = ''
+
+ def run(self):
+ # Explicit request for old-style install? Just do it
+ if self.old_and_unmanageable or self.single_version_externally_managed:
+ return orig.install.run(self)
+
+ if not self._called_from_setup(inspect.currentframe()):
+ # Run in backward-compatibility mode to support bdist_* commands.
+ orig.install.run(self)
+ else:
+ self.do_egg_install()
+
+ @staticmethod
+ def _called_from_setup(run_frame):
+ """
+ Attempt to detect whether run() was called from setup() or by another
+ command. If called by setup(), the parent caller will be the
+ 'run_command' method in 'distutils.dist', and *its* caller will be
+ the 'run_commands' method. If called any other way, the
+ immediate caller *might* be 'run_command', but it won't have been
+ called by 'run_commands'. Return True in that case or if a call stack
+ is unavailable. Return False otherwise.
+ """
+ if run_frame is None:
+ msg = "Call stack not available. bdist_* commands may fail."
+ warnings.warn(msg)
+ if platform.python_implementation() == 'IronPython':
+ msg = "For best results, pass -X:Frames to enable call stack."
+ warnings.warn(msg)
+ return True
+ res = inspect.getouterframes(run_frame)[2]
+ caller, = res[:1]
+ info = inspect.getframeinfo(caller)
+ caller_module = caller.f_globals.get('__name__', '')
+ return (
+ caller_module == 'distutils.dist'
+ and info.function == 'run_commands'
+ )
+
+ def do_egg_install(self):
+
+ easy_install = self.distribution.get_command_class('easy_install')
+
+ cmd = easy_install(
+ self.distribution, args="x", root=self.root, record=self.record,
+ )
+ cmd.ensure_finalized() # finalize before bdist_egg munges install cmd
+ cmd.always_copy_from = '.' # make sure local-dir eggs get installed
+
+ # pick up setup-dir .egg files only: no .egg-info
+ cmd.package_index.scan(glob.glob('*.egg'))
+
+ self.run_command('bdist_egg')
+ args = [self.distribution.get_command_obj('bdist_egg').egg_output]
+
+ if setuptools.bootstrap_install_from:
+ # Bootstrap self-installation of setuptools
+ args.insert(0, setuptools.bootstrap_install_from)
+
+ cmd.args = args
+ cmd.run()
+ setuptools.bootstrap_install_from = None
+
+
+# XXX Python 3.1 doesn't see _nc if this is inside the class
+install.sub_commands = (
+ [cmd for cmd in orig.install.sub_commands if cmd[0] not in install._nc] +
+ install.new_commands
+)
diff --git a/setuptools/command/install_egg_info.py b/setuptools/command/install_egg_info.py
new file mode 100755
index 0000000..edc4718
--- /dev/null
+++ b/setuptools/command/install_egg_info.py
@@ -0,0 +1,62 @@
+from distutils import log, dir_util
+import os
+
+from setuptools import Command
+from setuptools import namespaces
+from setuptools.archive_util import unpack_archive
+import pkg_resources
+
+
+class install_egg_info(namespaces.Installer, Command):
+ """Install an .egg-info directory for the package"""
+
+ description = "Install an .egg-info directory for the package"
+
+ user_options = [
+ ('install-dir=', 'd', "directory to install to"),
+ ]
+
+ def initialize_options(self):
+ self.install_dir = None
+
+ def finalize_options(self):
+ self.set_undefined_options('install_lib',
+ ('install_dir', 'install_dir'))
+ ei_cmd = self.get_finalized_command("egg_info")
+ basename = pkg_resources.Distribution(
+ None, None, ei_cmd.egg_name, ei_cmd.egg_version
+ ).egg_name() + '.egg-info'
+ self.source = ei_cmd.egg_info
+ self.target = os.path.join(self.install_dir, basename)
+ self.outputs = []
+
+ def run(self):
+ self.run_command('egg_info')
+ if os.path.isdir(self.target) and not os.path.islink(self.target):
+ dir_util.remove_tree(self.target, dry_run=self.dry_run)
+ elif os.path.exists(self.target):
+ self.execute(os.unlink, (self.target,), "Removing " + self.target)
+ if not self.dry_run:
+ pkg_resources.ensure_directory(self.target)
+ self.execute(
+ self.copytree, (), "Copying %s to %s" % (self.source, self.target)
+ )
+ self.install_namespaces()
+
+ def get_outputs(self):
+ return self.outputs
+
+ def copytree(self):
+ # Copy the .egg-info tree to site-packages
+ def skimmer(src, dst):
+ # filter out source-control directories; note that 'src' is always
+ # a '/'-separated path, regardless of platform. 'dst' is a
+ # platform-specific path.
+ for skip in '.svn/', 'CVS/':
+ if src.startswith(skip) or '/' + skip in src:
+ return None
+ self.outputs.append(dst)
+ log.debug("Copying %s to %s", src, dst)
+ return dst
+
+ unpack_archive(self.source, self.target, skimmer)
diff --git a/setuptools/command/install_lib.py b/setuptools/command/install_lib.py
new file mode 100644
index 0000000..2b31c3e
--- /dev/null
+++ b/setuptools/command/install_lib.py
@@ -0,0 +1,121 @@
+import os
+import imp
+from itertools import product, starmap
+import distutils.command.install_lib as orig
+
+
+class install_lib(orig.install_lib):
+ """Don't add compiled flags to filenames of non-Python files"""
+
+ def run(self):
+ self.build()
+ outfiles = self.install()
+ if outfiles is not None:
+ # always compile, in case we have any extension stubs to deal with
+ self.byte_compile(outfiles)
+
+ def get_exclusions(self):
+ """
+ Return a collections.Sized collections.Container of paths to be
+ excluded for single_version_externally_managed installations.
+ """
+ all_packages = (
+ pkg
+ for ns_pkg in self._get_SVEM_NSPs()
+ for pkg in self._all_packages(ns_pkg)
+ )
+
+ excl_specs = product(all_packages, self._gen_exclusion_paths())
+ return set(starmap(self._exclude_pkg_path, excl_specs))
+
+ def _exclude_pkg_path(self, pkg, exclusion_path):
+ """
+ Given a package name and exclusion path within that package,
+ compute the full exclusion path.
+ """
+ parts = pkg.split('.') + [exclusion_path]
+ return os.path.join(self.install_dir, *parts)
+
+ @staticmethod
+ def _all_packages(pkg_name):
+ """
+ >>> list(install_lib._all_packages('foo.bar.baz'))
+ ['foo.bar.baz', 'foo.bar', 'foo']
+ """
+ while pkg_name:
+ yield pkg_name
+ pkg_name, sep, child = pkg_name.rpartition('.')
+
+ def _get_SVEM_NSPs(self):
+ """
+ Get namespace packages (list) but only for
+ single_version_externally_managed installations and empty otherwise.
+ """
+ # TODO: is it necessary to short-circuit here? i.e. what's the cost
+ # if get_finalized_command is called even when namespace_packages is
+ # False?
+ if not self.distribution.namespace_packages:
+ return []
+
+ install_cmd = self.get_finalized_command('install')
+ svem = install_cmd.single_version_externally_managed
+
+ return self.distribution.namespace_packages if svem else []
+
+ @staticmethod
+ def _gen_exclusion_paths():
+ """
+ Generate file paths to be excluded for namespace packages (bytecode
+ cache files).
+ """
+ # always exclude the package module itself
+ yield '__init__.py'
+
+ yield '__init__.pyc'
+ yield '__init__.pyo'
+
+ if not hasattr(imp, 'get_tag'):
+ return
+
+ base = os.path.join('__pycache__', '__init__.' + imp.get_tag())
+ yield base + '.pyc'
+ yield base + '.pyo'
+ yield base + '.opt-1.pyc'
+ yield base + '.opt-2.pyc'
+
+ def copy_tree(
+ self, infile, outfile,
+ preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1
+ ):
+ assert preserve_mode and preserve_times and not preserve_symlinks
+ exclude = self.get_exclusions()
+
+ if not exclude:
+ return orig.install_lib.copy_tree(self, infile, outfile)
+
+ # Exclude namespace package __init__.py* files from the output
+
+ from setuptools.archive_util import unpack_directory
+ from distutils import log
+
+ outfiles = []
+
+ def pf(src, dst):
+ if dst in exclude:
+ log.warn("Skipping installation of %s (namespace package)",
+ dst)
+ return False
+
+ log.info("copying %s -> %s", src, os.path.dirname(dst))
+ outfiles.append(dst)
+ return dst
+
+ unpack_directory(infile, outfile, pf)
+ return outfiles
+
+ def get_outputs(self):
+ outputs = orig.install_lib.get_outputs(self)
+ exclude = self.get_exclusions()
+ if exclude:
+ return [f for f in outputs if f not in exclude]
+ return outputs
diff --git a/setuptools/command/install_scripts.py b/setuptools/command/install_scripts.py
new file mode 100755
index 0000000..1623427
--- /dev/null
+++ b/setuptools/command/install_scripts.py
@@ -0,0 +1,65 @@
+from distutils import log
+import distutils.command.install_scripts as orig
+import os
+import sys
+
+from pkg_resources import Distribution, PathMetadata, ensure_directory
+
+
+class install_scripts(orig.install_scripts):
+ """Do normal script install, plus any egg_info wrapper scripts"""
+
+ def initialize_options(self):
+ orig.install_scripts.initialize_options(self)
+ self.no_ep = False
+
+ def run(self):
+ import setuptools.command.easy_install as ei
+
+ self.run_command("egg_info")
+ if self.distribution.scripts:
+ orig.install_scripts.run(self) # run first to set up self.outfiles
+ else:
+ self.outfiles = []
+ if self.no_ep:
+ # don't install entry point scripts into .egg file!
+ return
+
+ ei_cmd = self.get_finalized_command("egg_info")
+ dist = Distribution(
+ ei_cmd.egg_base, PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info),
+ ei_cmd.egg_name, ei_cmd.egg_version,
+ )
+ bs_cmd = self.get_finalized_command('build_scripts')
+ exec_param = getattr(bs_cmd, 'executable', None)
+ bw_cmd = self.get_finalized_command("bdist_wininst")
+ is_wininst = getattr(bw_cmd, '_is_running', False)
+ writer = ei.ScriptWriter
+ if is_wininst:
+ exec_param = "python.exe"
+ writer = ei.WindowsScriptWriter
+ if exec_param == sys.executable:
+ # In case the path to the Python executable contains a space, wrap
+ # it so it's not split up.
+ exec_param = [exec_param]
+ # resolve the writer to the environment
+ writer = writer.best()
+ cmd = writer.command_spec_class.best().from_param(exec_param)
+ for args in writer.get_args(dist, cmd.as_header()):
+ self.write_script(*args)
+
+ def write_script(self, script_name, contents, mode="t", *ignored):
+ """Write an executable file to the scripts directory"""
+ from setuptools.command.easy_install import chmod, current_umask
+
+ log.info("Installing %s script to %s", script_name, self.install_dir)
+ target = os.path.join(self.install_dir, script_name)
+ self.outfiles.append(target)
+
+ mask = current_umask()
+ if not self.dry_run:
+ ensure_directory(target)
+ f = open(target, "w" + mode)
+ f.write(contents)
+ f.close()
+ chmod(target, 0o777 - mask)
diff --git a/setuptools/command/launcher manifest.xml b/setuptools/command/launcher manifest.xml
new file mode 100644
index 0000000..5972a96
--- /dev/null
+++ b/setuptools/command/launcher manifest.xml
@@ -0,0 +1,15 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
+ <assemblyIdentity version="1.0.0.0"
+ processorArchitecture="X86"
+ name="%(name)s"
+ type="win32"/>
+ <!-- Identify the application security requirements. -->
+ <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
+ <security>
+ <requestedPrivileges>
+ <requestedExecutionLevel level="asInvoker" uiAccess="false"/>
+ </requestedPrivileges>
+ </security>
+ </trustInfo>
+</assembly>
diff --git a/setuptools/command/py36compat.py b/setuptools/command/py36compat.py
new file mode 100644
index 0000000..61063e7
--- /dev/null
+++ b/setuptools/command/py36compat.py
@@ -0,0 +1,136 @@
+import os
+from glob import glob
+from distutils.util import convert_path
+from distutils.command import sdist
+
+from setuptools.extern.six.moves import filter
+
+
+class sdist_add_defaults:
+ """
+ Mix-in providing forward-compatibility for functionality as found in
+ distutils on Python 3.7.
+
+ Do not edit the code in this class except to update functionality
+ as implemented in distutils. Instead, override in the subclass.
+ """
+
+ def add_defaults(self):
+ """Add all the default files to self.filelist:
+ - README or README.txt
+ - setup.py
+ - test/test*.py
+ - all pure Python modules mentioned in setup script
+ - all files pointed by package_data (build_py)
+ - all files defined in data_files.
+ - all files defined as scripts.
+ - all C sources listed as part of extensions or C libraries
+ in the setup script (doesn't catch C headers!)
+ Warns if (README or README.txt) or setup.py are missing; everything
+ else is optional.
+ """
+ self._add_defaults_standards()
+ self._add_defaults_optional()
+ self._add_defaults_python()
+ self._add_defaults_data_files()
+ self._add_defaults_ext()
+ self._add_defaults_c_libs()
+ self._add_defaults_scripts()
+
+ @staticmethod
+ def _cs_path_exists(fspath):
+ """
+ Case-sensitive path existence check
+
+ >>> sdist_add_defaults._cs_path_exists(__file__)
+ True
+ >>> sdist_add_defaults._cs_path_exists(__file__.upper())
+ False
+ """
+ if not os.path.exists(fspath):
+ return False
+ # make absolute so we always have a directory
+ abspath = os.path.abspath(fspath)
+ directory, filename = os.path.split(abspath)
+ return filename in os.listdir(directory)
+
+ def _add_defaults_standards(self):
+ standards = [self.READMES, self.distribution.script_name]
+ for fn in standards:
+ if isinstance(fn, tuple):
+ alts = fn
+ got_it = False
+ for fn in alts:
+ if self._cs_path_exists(fn):
+ got_it = True
+ self.filelist.append(fn)
+ break
+
+ if not got_it:
+ self.warn("standard file not found: should have one of " +
+ ', '.join(alts))
+ else:
+ if self._cs_path_exists(fn):
+ self.filelist.append(fn)
+ else:
+ self.warn("standard file '%s' not found" % fn)
+
+ def _add_defaults_optional(self):
+ optional = ['test/test*.py', 'setup.cfg']
+ for pattern in optional:
+ files = filter(os.path.isfile, glob(pattern))
+ self.filelist.extend(files)
+
+ def _add_defaults_python(self):
+ # build_py is used to get:
+ # - python modules
+ # - files defined in package_data
+ build_py = self.get_finalized_command('build_py')
+
+ # getting python files
+ if self.distribution.has_pure_modules():
+ self.filelist.extend(build_py.get_source_files())
+
+ # getting package_data files
+ # (computed in build_py.data_files by build_py.finalize_options)
+ for pkg, src_dir, build_dir, filenames in build_py.data_files:
+ for filename in filenames:
+ self.filelist.append(os.path.join(src_dir, filename))
+
+ def _add_defaults_data_files(self):
+ # getting distribution.data_files
+ if self.distribution.has_data_files():
+ for item in self.distribution.data_files:
+ if isinstance(item, str):
+ # plain file
+ item = convert_path(item)
+ if os.path.isfile(item):
+ self.filelist.append(item)
+ else:
+ # a (dirname, filenames) tuple
+ dirname, filenames = item
+ for f in filenames:
+ f = convert_path(f)
+ if os.path.isfile(f):
+ self.filelist.append(f)
+
+ def _add_defaults_ext(self):
+ if self.distribution.has_ext_modules():
+ build_ext = self.get_finalized_command('build_ext')
+ self.filelist.extend(build_ext.get_source_files())
+
+ def _add_defaults_c_libs(self):
+ if self.distribution.has_c_libraries():
+ build_clib = self.get_finalized_command('build_clib')
+ self.filelist.extend(build_clib.get_source_files())
+
+ def _add_defaults_scripts(self):
+ if self.distribution.has_scripts():
+ build_scripts = self.get_finalized_command('build_scripts')
+ self.filelist.extend(build_scripts.get_source_files())
+
+
+if hasattr(sdist.sdist, '_add_defaults_standards'):
+ # disable the functionality already available upstream
+ class sdist_add_defaults:
+ pass
diff --git a/setuptools/command/register.py b/setuptools/command/register.py
new file mode 100755
index 0000000..8d6336a
--- /dev/null
+++ b/setuptools/command/register.py
@@ -0,0 +1,10 @@
+import distutils.command.register as orig
+
+
+class register(orig.register):
+ __doc__ = orig.register.__doc__
+
+ def run(self):
+ # Make sure that we are using valid current name/version info
+ self.run_command('egg_info')
+ orig.register.run(self)
diff --git a/setuptools/command/rotate.py b/setuptools/command/rotate.py
new file mode 100755
index 0000000..b89353f
--- /dev/null
+++ b/setuptools/command/rotate.py
@@ -0,0 +1,66 @@
+from distutils.util import convert_path
+from distutils import log
+from distutils.errors import DistutilsOptionError
+import os
+import shutil
+
+from setuptools.extern import six
+
+from setuptools import Command
+
+
+class rotate(Command):
+ """Delete older distributions"""
+
+ description = "delete older distributions, keeping N newest files"
+ user_options = [
+ ('match=', 'm', "patterns to match (required)"),
+ ('dist-dir=', 'd', "directory where the distributions are"),
+ ('keep=', 'k', "number of matching distributions to keep"),
+ ]
+
+ boolean_options = []
+
+ def initialize_options(self):
+ self.match = None
+ self.dist_dir = None
+ self.keep = None
+
+ def finalize_options(self):
+ if self.match is None:
+ raise DistutilsOptionError(
+ "Must specify one or more (comma-separated) match patterns "
+ "(e.g. '.zip' or '.egg')"
+ )
+ if self.keep is None:
+ raise DistutilsOptionError("Must specify number of files to keep")
+ try:
+ self.keep = int(self.keep)
+ except ValueError:
+ raise DistutilsOptionError("--keep must be an integer")
+ if isinstance(self.match, six.string_types):
+ self.match = [
+ convert_path(p.strip()) for p in self.match.split(',')
+ ]
+ self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
+
+ def run(self):
+ self.run_command("egg_info")
+ from glob import glob
+
+ for pattern in self.match:
+ pattern = self.distribution.get_name() + '*' + pattern
+ files = glob(os.path.join(self.dist_dir, pattern))
+ files = [(os.path.getmtime(f), f) for f in files]
+ files.sort()
+ files.reverse()
+
+ log.info("%d file(s) matching %s", len(files), pattern)
+ files = files[self.keep:]
+ for (t, f) in files:
+ log.info("Deleting %s", f)
+ if not self.dry_run:
+ if os.path.isdir(f):
+ shutil.rmtree(f)
+ else:
+ os.unlink(f)
diff --git a/setuptools/command/saveopts.py b/setuptools/command/saveopts.py
new file mode 100755
index 0000000..611cec5
--- /dev/null
+++ b/setuptools/command/saveopts.py
@@ -0,0 +1,22 @@
+from setuptools.command.setopt import edit_config, option_base
+
+
+class saveopts(option_base):
+ """Save command-line options to a file"""
+
+ description = "save supplied options to setup.cfg or other config file"
+
+ def run(self):
+ dist = self.distribution
+ settings = {}
+
+ for cmd in dist.command_options:
+
+ if cmd == 'saveopts':
+ continue # don't save our own options!
+
+ for opt, (src, val) in dist.get_option_dict(cmd).items():
+ if src == "command line":
+ settings.setdefault(cmd, {})[opt] = val
+
+ edit_config(self.filename, settings, self.dry_run)
diff --git a/setuptools/command/sdist.py b/setuptools/command/sdist.py
new file mode 100755
index 0000000..bcfae4d
--- /dev/null
+++ b/setuptools/command/sdist.py
@@ -0,0 +1,200 @@
+from distutils import log
+import distutils.command.sdist as orig
+import os
+import sys
+import io
+import contextlib
+
+from setuptools.extern import six
+
+from .py36compat import sdist_add_defaults
+
+import pkg_resources
+
+_default_revctrl = list
+
+
+def walk_revctrl(dirname=''):
+ """Find all files under revision control"""
+ for ep in pkg_resources.iter_entry_points('setuptools.file_finders'):
+ for item in ep.load()(dirname):
+ yield item
+
+
+class sdist(sdist_add_defaults, orig.sdist):
+ """Smart sdist that finds anything supported by revision control"""
+
+ user_options = [
+ ('formats=', None,
+ "formats for source distribution (comma-separated list)"),
+ ('keep-temp', 'k',
+ "keep the distribution tree around after creating " +
+ "archive file(s)"),
+ ('dist-dir=', 'd',
+ "directory to put the source distribution archive(s) in "
+ "[default: dist]"),
+ ]
+
+ negative_opt = {}
+
+ README_EXTENSIONS = ['', '.rst', '.txt', '.md']
+ READMES = tuple('README{0}'.format(ext) for ext in README_EXTENSIONS)
+
+ def run(self):
+ self.run_command('egg_info')
+ ei_cmd = self.get_finalized_command('egg_info')
+ self.filelist = ei_cmd.filelist
+ self.filelist.append(os.path.join(ei_cmd.egg_info, 'SOURCES.txt'))
+ self.check_readme()
+
+ # Run sub commands
+ for cmd_name in self.get_sub_commands():
+ self.run_command(cmd_name)
+
+ self.make_distribution()
+
+ dist_files = getattr(self.distribution, 'dist_files', [])
+ for file in self.archive_files:
+ data = ('sdist', '', file)
+ if data not in dist_files:
+ dist_files.append(data)
+
+ def initialize_options(self):
+ orig.sdist.initialize_options(self)
+
+ self._default_to_gztar()
+
+ def _default_to_gztar(self):
+ # only needed on Python prior to 3.6.
+ if sys.version_info >= (3, 6, 0, 'beta', 1):
+ return
+ self.formats = ['gztar']
+
+ def make_distribution(self):
+ """
+ Workaround for #516
+ """
+ with self._remove_os_link():
+ orig.sdist.make_distribution(self)
+
+ @staticmethod
+ @contextlib.contextmanager
+ def _remove_os_link():
+ """
+ In a context, remove and restore os.link if it exists
+ """
+
+ class NoValue:
+ pass
+
+ orig_val = getattr(os, 'link', NoValue)
+ try:
+ del os.link
+ except Exception:
+ pass
+ try:
+ yield
+ finally:
+ if orig_val is not NoValue:
+ setattr(os, 'link', orig_val)
+
+ def __read_template_hack(self):
+ # This grody hack closes the template file (MANIFEST.in) if an
+ # exception occurs during read_template.
+ # Doing so prevents an error when easy_install attempts to delete the
+ # file.
+ try:
+ orig.sdist.read_template(self)
+ except Exception:
+ _, _, tb = sys.exc_info()
+ tb.tb_next.tb_frame.f_locals['template'].close()
+ raise
+
+ # Beginning with Python 2.7.2, 3.1.4, and 3.2.1, this leaky file handle
+ # has been fixed, so only override the method if we're using an earlier
+ # Python.
+ has_leaky_handle = (
+ sys.version_info < (2, 7, 2)
+ or (3, 0) <= sys.version_info < (3, 1, 4)
+ or (3, 2) <= sys.version_info < (3, 2, 1)
+ )
+ if has_leaky_handle:
+ read_template = __read_template_hack
+
+ def _add_defaults_python(self):
+ """getting python files"""
+ if self.distribution.has_pure_modules():
+ build_py = self.get_finalized_command('build_py')
+ self.filelist.extend(build_py.get_source_files())
+ # This functionality is incompatible with include_package_data, and
+ # will in fact create an infinite recursion if include_package_data
+ # is True. Use of include_package_data will imply that
+ # distutils-style automatic handling of package_data is disabled
+ if not self.distribution.include_package_data:
+ for _, src_dir, _, filenames in build_py.data_files:
+ self.filelist.extend([os.path.join(src_dir, filename)
+ for filename in filenames])
+
+ def _add_defaults_data_files(self):
+ try:
+ if six.PY2:
+ sdist_add_defaults._add_defaults_data_files(self)
+ else:
+ super()._add_defaults_data_files()
+ except TypeError:
+ log.warn("data_files contains unexpected objects")
+
+ def check_readme(self):
+ for f in self.READMES:
+ if os.path.exists(f):
+ return
+ else:
+ self.warn(
+ "standard file not found: should have one of " +
+ ', '.join(self.READMES)
+ )
+
+ def make_release_tree(self, base_dir, files):
+ orig.sdist.make_release_tree(self, base_dir, files)
+
+ # Save any egg_info command line options used to create this sdist
+ dest = os.path.join(base_dir, 'setup.cfg')
+ if hasattr(os, 'link') and os.path.exists(dest):
+ # unlink and re-copy, since it might be hard-linked, and
+ # we don't want to change the source version
+ os.unlink(dest)
+ self.copy_file('setup.cfg', dest)
+
+ self.get_finalized_command('egg_info').save_version_info(dest)
+
+ def _manifest_is_not_generated(self):
+ # check for special comment used in 2.7.1 and higher
+ if not os.path.isfile(self.manifest):
+ return False
+
+ with io.open(self.manifest, 'rb') as fp:
+ first_line = fp.readline()
+ return (first_line !=
+ '# file GENERATED by distutils, do NOT edit\n'.encode())
+
+ def read_manifest(self):
+ """Read the manifest file (named by 'self.manifest') and use it to
+ fill in 'self.filelist', the list of files to include in the source
+ distribution.
+ """
+ log.info("reading manifest file '%s'", self.manifest)
+ manifest = open(self.manifest, 'rb')
+ for line in manifest:
+ # The manifest must contain UTF-8. See #303.
+ if six.PY3:
+ try:
+ line = line.decode('UTF-8')
+ except UnicodeDecodeError:
+ log.warn("%r not UTF-8 decodable -- skipping" % line)
+ continue
+ # ignore comments and blank lines
+ line = line.strip()
+ if line.startswith('#') or not line:
+ continue
+ self.filelist.append(line)
+ manifest.close()
diff --git a/setuptools/command/setopt.py b/setuptools/command/setopt.py
new file mode 100755
index 0000000..7e57cc0
--- /dev/null
+++ b/setuptools/command/setopt.py
@@ -0,0 +1,149 @@
+from distutils.util import convert_path
+from distutils import log
+from distutils.errors import DistutilsOptionError
+import distutils
+import os
+
+from setuptools.extern.six.moves import configparser
+
+from setuptools import Command
+
+__all__ = ['config_file', 'edit_config', 'option_base', 'setopt']
+
+
+def config_file(kind="local"):
+ """Get the filename of the distutils, local, global, or per-user config
+
+ `kind` must be one of "local", "global", or "user"
+ """
+ if kind == 'local':
+ return 'setup.cfg'
+ if kind == 'global':
+ return os.path.join(
+ os.path.dirname(distutils.__file__), 'distutils.cfg'
+ )
+ if kind == 'user':
+ dot = os.name == 'posix' and '.' or ''
+ return os.path.expanduser(convert_path("~/%spydistutils.cfg" % dot))
+ raise ValueError(
+ "config_file() type must be 'local', 'global', or 'user'", kind
+ )
+
+
+def edit_config(filename, settings, dry_run=False):
+ """Edit a configuration file to include `settings`
+
+ `settings` is a dictionary of dictionaries or ``None`` values, keyed by
+ command/section name. A ``None`` value means to delete the entire section,
+ while a dictionary lists settings to be changed or deleted in that section.
+ A setting of ``None`` means to delete that setting.
+ """
+ log.debug("Reading configuration from %s", filename)
+ opts = configparser.RawConfigParser()
+ opts.read([filename])
+ for section, options in settings.items():
+ if options is None:
+ log.info("Deleting section [%s] from %s", section, filename)
+ opts.remove_section(section)
+ else:
+ if not opts.has_section(section):
+ log.debug("Adding new section [%s] to %s", section, filename)
+ opts.add_section(section)
+ for option, value in options.items():
+ if value is None:
+ log.debug(
+ "Deleting %s.%s from %s",
+ section, option, filename
+ )
+ opts.remove_option(section, option)
+ if not opts.options(section):
+ log.info("Deleting empty [%s] section from %s",
+ section, filename)
+ opts.remove_section(section)
+ else:
+ log.debug(
+ "Setting %s.%s to %r in %s",
+ section, option, value, filename
+ )
+ opts.set(section, option, value)
+
+ log.info("Writing %s", filename)
+ if not dry_run:
+ with open(filename, 'w') as f:
+ opts.write(f)
+
+
+class option_base(Command):
+ """Abstract base class for commands that mess with config files"""
+
+ user_options = [
+ ('global-config', 'g',
+ "save options to the site-wide distutils.cfg file"),
+ ('user-config', 'u',
+ "save options to the current user's pydistutils.cfg file"),
+ ('filename=', 'f',
+ "configuration file to use (default=setup.cfg)"),
+ ]
+
+ boolean_options = [
+ 'global-config', 'user-config',
+ ]
+
+ def initialize_options(self):
+ self.global_config = None
+ self.user_config = None
+ self.filename = None
+
+ def finalize_options(self):
+ filenames = []
+ if self.global_config:
+ filenames.append(config_file('global'))
+ if self.user_config:
+ filenames.append(config_file('user'))
+ if self.filename is not None:
+ filenames.append(self.filename)
+ if not filenames:
+ filenames.append(config_file('local'))
+ if len(filenames) > 1:
+ raise DistutilsOptionError(
+ "Must specify only one configuration file option",
+ filenames
+ )
+ self.filename, = filenames
+
+
+class setopt(option_base):
+ """Save command-line options to a file"""
+
+ description = "set an option in setup.cfg or another config file"
+
+ user_options = [
+ ('command=', 'c', 'command to set an option for'),
+ ('option=', 'o', 'option to set'),
+ ('set-value=', 's', 'value of the option'),
+ ('remove', 'r', 'remove (unset) the value'),
+ ] + option_base.user_options
+
+ boolean_options = option_base.boolean_options + ['remove']
+
+ def initialize_options(self):
+ option_base.initialize_options(self)
+ self.command = None
+ self.option = None
+ self.set_value = None
+ self.remove = None
+
+ def finalize_options(self):
+ option_base.finalize_options(self)
+ if self.command is None or self.option is None:
+ raise DistutilsOptionError("Must specify --command *and* --option")
+ if self.set_value is None and not self.remove:
+ raise DistutilsOptionError("Must specify --set-value or --remove")
+
+ def run(self):
+ edit_config(
+ self.filename, {
+ self.command: {self.option.replace('-', '_'): self.set_value}
+ },
+ self.dry_run
+ )
diff --git a/setuptools/command/test.py b/setuptools/command/test.py
new file mode 100644
index 0000000..51aee1f
--- /dev/null
+++ b/setuptools/command/test.py
@@ -0,0 +1,268 @@
+import os
+import operator
+import sys
+import contextlib
+import itertools
+import unittest
+from distutils.errors import DistutilsError, DistutilsOptionError
+from distutils import log
+from unittest import TestLoader
+
+from setuptools.extern import six
+from setuptools.extern.six.moves import map, filter
+
+from pkg_resources import (resource_listdir, resource_exists, normalize_path,
+ working_set, _namespace_packages, evaluate_marker,
+ add_activation_listener, require, EntryPoint)
+from setuptools import Command
+
+
+class ScanningLoader(TestLoader):
+
+ def __init__(self):
+ TestLoader.__init__(self)
+ self._visited = set()
+
+ def loadTestsFromModule(self, module, pattern=None):
+ """Return a suite of all tests cases contained in the given module
+
+ If the module is a package, load tests from all the modules in it.
+ If the module has an ``additional_tests`` function, call it and add
+ the return value to the tests.
+ """
+ if module in self._visited:
+ return None
+ self._visited.add(module)
+
+ tests = []
+ tests.append(TestLoader.loadTestsFromModule(self, module))
+
+ if hasattr(module, "additional_tests"):
+ tests.append(module.additional_tests())
+
+ if hasattr(module, '__path__'):
+ for file in resource_listdir(module.__name__, ''):
+ if file.endswith('.py') and file != '__init__.py':
+ submodule = module.__name__ + '.' + file[:-3]
+ else:
+ if resource_exists(module.__name__, file + '/__init__.py'):
+ submodule = module.__name__ + '.' + file
+ else:
+ continue
+ tests.append(self.loadTestsFromName(submodule))
+
+ if len(tests) != 1:
+ return self.suiteClass(tests)
+ else:
+ return tests[0] # don't create a nested suite for only one return
+
+
+# adapted from jaraco.classes.properties:NonDataProperty
+class NonDataProperty(object):
+ def __init__(self, fget):
+ self.fget = fget
+
+ def __get__(self, obj, objtype=None):
+ if obj is None:
+ return self
+ return self.fget(obj)
+
+
+class test(Command):
+ """Command to run unit tests after in-place build"""
+
+ description = "run unit tests after in-place build"
+
+ user_options = [
+ ('test-module=', 'm', "Run 'test_suite' in specified module"),
+ ('test-suite=', 's',
+ "Run single test, case or suite (e.g. 'module.test_suite')"),
+ ('test-runner=', 'r', "Test runner to use"),
+ ]
+
+ def initialize_options(self):
+ self.test_suite = None
+ self.test_module = None
+ self.test_loader = None
+ self.test_runner = None
+
+ def finalize_options(self):
+
+ if self.test_suite and self.test_module:
+ msg = "You may specify a module or a suite, but not both"
+ raise DistutilsOptionError(msg)
+
+ if self.test_suite is None:
+ if self.test_module is None:
+ self.test_suite = self.distribution.test_suite
+ else:
+ self.test_suite = self.test_module + ".test_suite"
+
+ if self.test_loader is None:
+ self.test_loader = getattr(self.distribution, 'test_loader', None)
+ if self.test_loader is None:
+ self.test_loader = "setuptools.command.test:ScanningLoader"
+ if self.test_runner is None:
+ self.test_runner = getattr(self.distribution, 'test_runner', None)
+
+ @NonDataProperty
+ def test_args(self):
+ return list(self._test_args())
+
+ def _test_args(self):
+ if not self.test_suite and sys.version_info >= (2, 7):
+ yield 'discover'
+ if self.verbose:
+ yield '--verbose'
+ if self.test_suite:
+ yield self.test_suite
+
+ def with_project_on_sys_path(self, func):
+ """
+ Backward compatibility for project_on_sys_path context.
+ """
+ with self.project_on_sys_path():
+ func()
+
+ @contextlib.contextmanager
+ def project_on_sys_path(self, include_dists=[]):
+ with_2to3 = six.PY3 and getattr(self.distribution, 'use_2to3', False)
+
+ if with_2to3:
+ # If we run 2to3 we can not do this inplace:
+
+ # Ensure metadata is up-to-date
+ self.reinitialize_command('build_py', inplace=0)
+ self.run_command('build_py')
+ bpy_cmd = self.get_finalized_command("build_py")
+ build_path = normalize_path(bpy_cmd.build_lib)
+
+ # Build extensions
+ self.reinitialize_command('egg_info', egg_base=build_path)
+ self.run_command('egg_info')
+
+ self.reinitialize_command('build_ext', inplace=0)
+ self.run_command('build_ext')
+ else:
+ # Without 2to3 inplace works fine:
+ self.run_command('egg_info')
+
+ # Build extensions in-place
+ self.reinitialize_command('build_ext', inplace=1)
+ self.run_command('build_ext')
+
+ ei_cmd = self.get_finalized_command("egg_info")
+
+ old_path = sys.path[:]
+ old_modules = sys.modules.copy()
+
+ try:
+ project_path = normalize_path(ei_cmd.egg_base)
+ sys.path.insert(0, project_path)
+ working_set.__init__()
+ add_activation_listener(lambda dist: dist.activate())
+ require('%s==%s' % (ei_cmd.egg_name, ei_cmd.egg_version))
+ with self.paths_on_pythonpath([project_path]):
+ yield
+ finally:
+ sys.path[:] = old_path
+ sys.modules.clear()
+ sys.modules.update(old_modules)
+ working_set.__init__()
+
+ @staticmethod
+ @contextlib.contextmanager
+ def paths_on_pythonpath(paths):
+ """
+ Add the indicated paths to the head of the PYTHONPATH environment
+ variable so that subprocesses will also see the packages at
+ these paths.
+
+ Do this in a context that restores the value on exit.
+ """
+ nothing = object()
+ orig_pythonpath = os.environ.get('PYTHONPATH', nothing)
+ current_pythonpath = os.environ.get('PYTHONPATH', '')
+ try:
+ prefix = os.pathsep.join(paths)
+ to_join = filter(None, [prefix, current_pythonpath])
+ new_path = os.pathsep.join(to_join)
+ if new_path:
+ os.environ['PYTHONPATH'] = new_path
+ yield
+ finally:
+ if orig_pythonpath is nothing:
+ os.environ.pop('PYTHONPATH', None)
+ else:
+ os.environ['PYTHONPATH'] = orig_pythonpath
+
+ @staticmethod
+ def install_dists(dist):
+ """
+ Install the requirements indicated by self.distribution and
+ return an iterable of the dists that were built.
+ """
+ ir_d = dist.fetch_build_eggs(dist.install_requires)
+ tr_d = dist.fetch_build_eggs(dist.tests_require or [])
+ er_d = dist.fetch_build_eggs(
+ v for k, v in dist.extras_require.items()
+ if k.startswith(':') and evaluate_marker(k[1:])
+ )
+ return itertools.chain(ir_d, tr_d, er_d)
+
+ def run(self):
+ installed_dists = self.install_dists(self.distribution)
+
+ cmd = ' '.join(self._argv)
+ if self.dry_run:
+ self.announce('skipping "%s" (dry run)' % cmd)
+ return
+
+ self.announce('running "%s"' % cmd)
+
+ paths = map(operator.attrgetter('location'), installed_dists)
+ with self.paths_on_pythonpath(paths):
+ with self.project_on_sys_path():
+ self.run_tests()
+
+ def run_tests(self):
+ # Purge modules under test from sys.modules. The test loader will
+ # re-import them from the build location. Required when 2to3 is used
+ # with namespace packages.
+ if six.PY3 and getattr(self.distribution, 'use_2to3', False):
+ module = self.test_suite.split('.')[0]
+ if module in _namespace_packages:
+ del_modules = []
+ if module in sys.modules:
+ del_modules.append(module)
+ module += '.'
+ for name in sys.modules:
+ if name.startswith(module):
+ del_modules.append(name)
+ list(map(sys.modules.__delitem__, del_modules))
+
+ test = unittest.main(
+ None, None, self._argv,
+ testLoader=self._resolve_as_ep(self.test_loader),
+ testRunner=self._resolve_as_ep(self.test_runner),
+ exit=False,
+ )
+ if not test.result.wasSuccessful():
+ msg = 'Test failed: %s' % test.result
+ self.announce(msg, log.ERROR)
+ raise DistutilsError(msg)
+
+ @property
+ def _argv(self):
+ return ['unittest'] + self.test_args
+
+ @staticmethod
+ def _resolve_as_ep(val):
+ """
+ Load the indicated attribute value, called, as a as if it were
+ specified as an entry point.
+ """
+ if val is None:
+ return
+ parsed = EntryPoint.parse("x=" + val)
+ return parsed.resolve()()
diff --git a/setuptools/command/upload.py b/setuptools/command/upload.py
new file mode 100644
index 0000000..a44173a
--- /dev/null
+++ b/setuptools/command/upload.py
@@ -0,0 +1,42 @@
+import getpass
+from distutils.command import upload as orig
+
+
+class upload(orig.upload):
+ """
+ Override default upload behavior to obtain password
+ in a variety of different ways.
+ """
+
+ def finalize_options(self):
+ orig.upload.finalize_options(self)
+ self.username = (
+ self.username or
+ getpass.getuser()
+ )
+ # Attempt to obtain password. Short circuit evaluation at the first
+ # sign of success.
+ self.password = (
+ self.password or
+ self._load_password_from_keyring() or
+ self._prompt_for_password()
+ )
+
+ def _load_password_from_keyring(self):
+ """
+ Attempt to load password from keyring. Suppress Exceptions.
+ """
+ try:
+ keyring = __import__('keyring')
+ return keyring.get_password(self.repository, self.username)
+ except Exception:
+ pass
+
+ def _prompt_for_password(self):
+ """
+ Prompt for a password on the tty. Suppress Exceptions.
+ """
+ try:
+ return getpass.getpass()
+ except (Exception, KeyboardInterrupt):
+ pass
diff --git a/setuptools/command/upload_docs.py b/setuptools/command/upload_docs.py
new file mode 100644
index 0000000..07aa564
--- /dev/null
+++ b/setuptools/command/upload_docs.py
@@ -0,0 +1,206 @@
+# -*- coding: utf-8 -*-
+"""upload_docs
+
+Implements a Distutils 'upload_docs' subcommand (upload documentation to
+PyPI's pythonhosted.org).
+"""
+
+from base64 import standard_b64encode
+from distutils import log
+from distutils.errors import DistutilsOptionError
+import os
+import socket
+import zipfile
+import tempfile
+import shutil
+import itertools
+import functools
+
+from setuptools.extern import six
+from setuptools.extern.six.moves import http_client, urllib
+
+from pkg_resources import iter_entry_points
+from .upload import upload
+
+
+def _encode(s):
+ errors = 'surrogateescape' if six.PY3 else 'strict'
+ return s.encode('utf-8', errors)
+
+
+class upload_docs(upload):
+ # override the default repository as upload_docs isn't
+ # supported by Warehouse (and won't be).
+ DEFAULT_REPOSITORY = 'https://pypi.python.org/pypi/'
+
+ description = 'Upload documentation to PyPI'
+
+ user_options = [
+ ('repository=', 'r',
+ "url of repository [default: %s]" % upload.DEFAULT_REPOSITORY),
+ ('show-response', None,
+ 'display full response text from server'),
+ ('upload-dir=', None, 'directory to upload'),
+ ]
+ boolean_options = upload.boolean_options
+
+ def has_sphinx(self):
+ if self.upload_dir is None:
+ for ep in iter_entry_points('distutils.commands', 'build_sphinx'):
+ return True
+
+ sub_commands = [('build_sphinx', has_sphinx)]
+
+ def initialize_options(self):
+ upload.initialize_options(self)
+ self.upload_dir = None
+ self.target_dir = None
+
+ def finalize_options(self):
+ upload.finalize_options(self)
+ if self.upload_dir is None:
+ if self.has_sphinx():
+ build_sphinx = self.get_finalized_command('build_sphinx')
+ self.target_dir = build_sphinx.builder_target_dir
+ else:
+ build = self.get_finalized_command('build')
+ self.target_dir = os.path.join(build.build_base, 'docs')
+ else:
+ self.ensure_dirname('upload_dir')
+ self.target_dir = self.upload_dir
+ if 'pypi.python.org' in self.repository:
+ log.warn("Upload_docs command is deprecated. Use RTD instead.")
+ self.announce('Using upload directory %s' % self.target_dir)
+
+ def create_zipfile(self, filename):
+ zip_file = zipfile.ZipFile(filename, "w")
+ try:
+ self.mkpath(self.target_dir) # just in case
+ for root, dirs, files in os.walk(self.target_dir):
+ if root == self.target_dir and not files:
+ tmpl = "no files found in upload directory '%s'"
+ raise DistutilsOptionError(tmpl % self.target_dir)
+ for name in files:
+ full = os.path.join(root, name)
+ relative = root[len(self.target_dir):].lstrip(os.path.sep)
+ dest = os.path.join(relative, name)
+ zip_file.write(full, dest)
+ finally:
+ zip_file.close()
+
+ def run(self):
+ # Run sub commands
+ for cmd_name in self.get_sub_commands():
+ self.run_command(cmd_name)
+
+ tmp_dir = tempfile.mkdtemp()
+ name = self.distribution.metadata.get_name()
+ zip_file = os.path.join(tmp_dir, "%s.zip" % name)
+ try:
+ self.create_zipfile(zip_file)
+ self.upload_file(zip_file)
+ finally:
+ shutil.rmtree(tmp_dir)
+
+ @staticmethod
+ def _build_part(item, sep_boundary):
+ key, values = item
+ title = '\nContent-Disposition: form-data; name="%s"' % key
+ # handle multiple entries for the same name
+ if not isinstance(values, list):
+ values = [values]
+ for value in values:
+ if isinstance(value, tuple):
+ title += '; filename="%s"' % value[0]
+ value = value[1]
+ else:
+ value = _encode(value)
+ yield sep_boundary
+ yield _encode(title)
+ yield b"\n\n"
+ yield value
+ if value and value[-1:] == b'\r':
+ yield b'\n' # write an extra newline (lurve Macs)
+
+ @classmethod
+ def _build_multipart(cls, data):
+ """
+ Build up the MIME payload for the POST data
+ """
+ boundary = b'--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
+ sep_boundary = b'\n--' + boundary
+ end_boundary = sep_boundary + b'--'
+ end_items = end_boundary, b"\n",
+ builder = functools.partial(
+ cls._build_part,
+ sep_boundary=sep_boundary,
+ )
+ part_groups = map(builder, data.items())
+ parts = itertools.chain.from_iterable(part_groups)
+ body_items = itertools.chain(parts, end_items)
+ content_type = 'multipart/form-data; boundary=%s' % boundary.decode('ascii')
+ return b''.join(body_items), content_type
+
+ def upload_file(self, filename):
+ with open(filename, 'rb') as f:
+ content = f.read()
+ meta = self.distribution.metadata
+ data = {
+ ':action': 'doc_upload',
+ 'name': meta.get_name(),
+ 'content': (os.path.basename(filename), content),
+ }
+ # set up the authentication
+ credentials = _encode(self.username + ':' + self.password)
+ credentials = standard_b64encode(credentials)
+ if six.PY3:
+ credentials = credentials.decode('ascii')
+ auth = "Basic " + credentials
+
+ body, ct = self._build_multipart(data)
+
+ msg = "Submitting documentation to %s" % (self.repository)
+ self.announce(msg, log.INFO)
+
+ # build the Request
+ # We can't use urllib2 since we need to send the Basic
+ # auth right with the first request
+ schema, netloc, url, params, query, fragments = \
+ urllib.parse.urlparse(self.repository)
+ assert not params and not query and not fragments
+ if schema == 'http':
+ conn = http_client.HTTPConnection(netloc)
+ elif schema == 'https':
+ conn = http_client.HTTPSConnection(netloc)
+ else:
+ raise AssertionError("unsupported schema " + schema)
+
+ data = ''
+ try:
+ conn.connect()
+ conn.putrequest("POST", url)
+ content_type = ct
+ conn.putheader('Content-type', content_type)
+ conn.putheader('Content-length', str(len(body)))
+ conn.putheader('Authorization', auth)
+ conn.endheaders()
+ conn.send(body)
+ except socket.error as e:
+ self.announce(str(e), log.ERROR)
+ return
+
+ r = conn.getresponse()
+ if r.status == 200:
+ msg = 'Server response (%s): %s' % (r.status, r.reason)
+ self.announce(msg, log.INFO)
+ elif r.status == 301:
+ location = r.getheader('Location')
+ if location is None:
+ location = 'https://pythonhosted.org/%s/' % meta.get_name()
+ msg = 'Upload successful. Visit %s' % location
+ self.announce(msg, log.INFO)
+ else:
+ msg = 'Upload failed (%s): %s' % (r.status, r.reason)
+ self.announce(msg, log.ERROR)
+ if self.show_response:
+ print('-' * 75, r.read(), '-' * 75)
diff --git a/setuptools/config.py b/setuptools/config.py
new file mode 100644
index 0000000..8eddcae
--- /dev/null
+++ b/setuptools/config.py
@@ -0,0 +1,556 @@
+from __future__ import absolute_import, unicode_literals
+import io
+import os
+import sys
+from collections import defaultdict
+from functools import partial
+from importlib import import_module
+
+from distutils.errors import DistutilsOptionError, DistutilsFileError
+from setuptools.extern.six import string_types
+
+
+def read_configuration(
+ filepath, find_others=False, ignore_option_errors=False):
+ """Read given configuration file and returns options from it as a dict.
+
+ :param str|unicode filepath: Path to configuration file
+ to get options from.
+
+ :param bool find_others: Whether to search for other configuration files
+ which could be on in various places.
+
+ :param bool ignore_option_errors: Whether to silently ignore
+ options, values of which could not be resolved (e.g. due to exceptions
+ in directives such as file:, attr:, etc.).
+ If False exceptions are propagated as expected.
+
+ :rtype: dict
+ """
+ from setuptools.dist import Distribution, _Distribution
+
+ filepath = os.path.abspath(filepath)
+
+ if not os.path.isfile(filepath):
+ raise DistutilsFileError(
+ 'Configuration file %s does not exist.' % filepath)
+
+ current_directory = os.getcwd()
+ os.chdir(os.path.dirname(filepath))
+
+ try:
+ dist = Distribution()
+
+ filenames = dist.find_config_files() if find_others else []
+ if filepath not in filenames:
+ filenames.append(filepath)
+
+ _Distribution.parse_config_files(dist, filenames=filenames)
+
+ handlers = parse_configuration(
+ dist, dist.command_options,
+ ignore_option_errors=ignore_option_errors)
+
+ finally:
+ os.chdir(current_directory)
+
+ return configuration_to_dict(handlers)
+
+
+def configuration_to_dict(handlers):
+ """Returns configuration data gathered by given handlers as a dict.
+
+ :param list[ConfigHandler] handlers: Handlers list,
+ usually from parse_configuration()
+
+ :rtype: dict
+ """
+ config_dict = defaultdict(dict)
+
+ for handler in handlers:
+
+ obj_alias = handler.section_prefix
+ target_obj = handler.target_obj
+
+ for option in handler.set_options:
+ getter = getattr(target_obj, 'get_%s' % option, None)
+
+ if getter is None:
+ value = getattr(target_obj, option)
+
+ else:
+ value = getter()
+
+ config_dict[obj_alias][option] = value
+
+ return config_dict
+
+
+def parse_configuration(
+ distribution, command_options, ignore_option_errors=False):
+ """Performs additional parsing of configuration options
+ for a distribution.
+
+ Returns a list of used option handlers.
+
+ :param Distribution distribution:
+ :param dict command_options:
+ :param bool ignore_option_errors: Whether to silently ignore
+ options, values of which could not be resolved (e.g. due to exceptions
+ in directives such as file:, attr:, etc.).
+ If False exceptions are propagated as expected.
+ :rtype: list
+ """
+ meta = ConfigMetadataHandler(
+ distribution.metadata, command_options, ignore_option_errors)
+ meta.parse()
+
+ options = ConfigOptionsHandler(
+ distribution, command_options, ignore_option_errors)
+ options.parse()
+
+ return meta, options
+
+
+class ConfigHandler(object):
+ """Handles metadata supplied in configuration files."""
+
+ section_prefix = None
+ """Prefix for config sections handled by this handler.
+ Must be provided by class heirs.
+
+ """
+
+ aliases = {}
+ """Options aliases.
+ For compatibility with various packages. E.g.: d2to1 and pbr.
+ Note: `-` in keys is replaced with `_` by config parser.
+
+ """
+
+ def __init__(self, target_obj, options, ignore_option_errors=False):
+ sections = {}
+
+ section_prefix = self.section_prefix
+ for section_name, section_options in options.items():
+ if not section_name.startswith(section_prefix):
+ continue
+
+ section_name = section_name.replace(section_prefix, '').strip('.')
+ sections[section_name] = section_options
+
+ self.ignore_option_errors = ignore_option_errors
+ self.target_obj = target_obj
+ self.sections = sections
+ self.set_options = []
+
+ @property
+ def parsers(self):
+ """Metadata item name to parser function mapping."""
+ raise NotImplementedError(
+ '%s must provide .parsers property' % self.__class__.__name__)
+
+ def __setitem__(self, option_name, value):
+ unknown = tuple()
+ target_obj = self.target_obj
+
+ # Translate alias into real name.
+ option_name = self.aliases.get(option_name, option_name)
+
+ current_value = getattr(target_obj, option_name, unknown)
+
+ if current_value is unknown:
+ raise KeyError(option_name)
+
+ if current_value:
+ # Already inhabited. Skipping.
+ return
+
+ skip_option = False
+ parser = self.parsers.get(option_name)
+ if parser:
+ try:
+ value = parser(value)
+
+ except Exception:
+ skip_option = True
+ if not self.ignore_option_errors:
+ raise
+
+ if skip_option:
+ return
+
+ setter = getattr(target_obj, 'set_%s' % option_name, None)
+ if setter is None:
+ setattr(target_obj, option_name, value)
+ else:
+ setter(value)
+
+ self.set_options.append(option_name)
+
+ @classmethod
+ def _parse_list(cls, value, separator=','):
+ """Represents value as a list.
+
+ Value is split either by separator (defaults to comma) or by lines.
+
+ :param value:
+ :param separator: List items separator character.
+ :rtype: list
+ """
+ if isinstance(value, list): # _get_parser_compound case
+ return value
+
+ if '\n' in value:
+ value = value.splitlines()
+ else:
+ value = value.split(separator)
+
+ return [chunk.strip() for chunk in value if chunk.strip()]
+
+ @classmethod
+ def _parse_dict(cls, value):
+ """Represents value as a dict.
+
+ :param value:
+ :rtype: dict
+ """
+ separator = '='
+ result = {}
+ for line in cls._parse_list(value):
+ key, sep, val = line.partition(separator)
+ if sep != separator:
+ raise DistutilsOptionError(
+ 'Unable to parse option value to dict: %s' % value)
+ result[key.strip()] = val.strip()
+
+ return result
+
+ @classmethod
+ def _parse_bool(cls, value):
+ """Represents value as boolean.
+
+ :param value:
+ :rtype: bool
+ """
+ value = value.lower()
+ return value in ('1', 'true', 'yes')
+
+ @classmethod
+ def _parse_file(cls, value):
+ """Represents value as a string, allowing including text
+ from nearest files using `file:` directive.
+
+ Directive is sandboxed and won't reach anything outside
+ directory with setup.py.
+
+ Examples:
+ file: LICENSE
+ file: README.rst, CHANGELOG.md, src/file.txt
+
+ :param str value:
+ :rtype: str
+ """
+ include_directive = 'file:'
+
+ if not isinstance(value, string_types):
+ return value
+
+ if not value.startswith(include_directive):
+ return value
+
+ spec = value[len(include_directive):]
+ filepaths = (os.path.abspath(path.strip()) for path in spec.split(','))
+ return '\n'.join(
+ cls._read_file(path)
+ for path in filepaths
+ if (cls._assert_local(path) or True)
+ and os.path.isfile(path)
+ )
+
+ @staticmethod
+ def _assert_local(filepath):
+ if not filepath.startswith(os.getcwd()):
+ raise DistutilsOptionError(
+ '`file:` directive can not access %s' % filepath)
+
+ @staticmethod
+ def _read_file(filepath):
+ with io.open(filepath, encoding='utf-8') as f:
+ return f.read()
+
+ @classmethod
+ def _parse_attr(cls, value):
+ """Represents value as a module attribute.
+
+ Examples:
+ attr: package.attr
+ attr: package.module.attr
+
+ :param str value:
+ :rtype: str
+ """
+ attr_directive = 'attr:'
+ if not value.startswith(attr_directive):
+ return value
+
+ attrs_path = value.replace(attr_directive, '').strip().split('.')
+ attr_name = attrs_path.pop()
+
+ module_name = '.'.join(attrs_path)
+ module_name = module_name or '__init__'
+
+ sys.path.insert(0, os.getcwd())
+ try:
+ module = import_module(module_name)
+ value = getattr(module, attr_name)
+
+ finally:
+ sys.path = sys.path[1:]
+
+ return value
+
+ @classmethod
+ def _get_parser_compound(cls, *parse_methods):
+ """Returns parser function to represents value as a list.
+
+ Parses a value applying given methods one after another.
+
+ :param parse_methods:
+ :rtype: callable
+ """
+ def parse(value):
+ parsed = value
+
+ for method in parse_methods:
+ parsed = method(parsed)
+
+ return parsed
+
+ return parse
+
+ @classmethod
+ def _parse_section_to_dict(cls, section_options, values_parser=None):
+ """Parses section options into a dictionary.
+
+ Optionally applies a given parser to values.
+
+ :param dict section_options:
+ :param callable values_parser:
+ :rtype: dict
+ """
+ value = {}
+ values_parser = values_parser or (lambda val: val)
+ for key, (_, val) in section_options.items():
+ value[key] = values_parser(val)
+ return value
+
+ def parse_section(self, section_options):
+ """Parses configuration file section.
+
+ :param dict section_options:
+ """
+ for (name, (_, value)) in section_options.items():
+ try:
+ self[name] = value
+
+ except KeyError:
+ pass # Keep silent for a new option may appear anytime.
+
+ def parse(self):
+ """Parses configuration file items from one
+ or more related sections.
+
+ """
+ for section_name, section_options in self.sections.items():
+
+ method_postfix = ''
+ if section_name: # [section.option] variant
+ method_postfix = '_%s' % section_name
+
+ section_parser_method = getattr(
+ self,
+ # Dots in section names are tranlsated into dunderscores.
+ ('parse_section%s' % method_postfix).replace('.', '__'),
+ None)
+
+ if section_parser_method is None:
+ raise DistutilsOptionError(
+ 'Unsupported distribution option section: [%s.%s]' % (
+ self.section_prefix, section_name))
+
+ section_parser_method(section_options)
+
+
+class ConfigMetadataHandler(ConfigHandler):
+
+ section_prefix = 'metadata'
+
+ aliases = {
+ 'home_page': 'url',
+ 'summary': 'description',
+ 'classifier': 'classifiers',
+ 'platform': 'platforms',
+ }
+
+ strict_mode = False
+ """We need to keep it loose, to be partially compatible with
+ `pbr` and `d2to1` packages which also uses `metadata` section.
+
+ """
+
+ @property
+ def parsers(self):
+ """Metadata item name to parser function mapping."""
+ parse_list = self._parse_list
+ parse_file = self._parse_file
+ parse_dict = self._parse_dict
+
+ return {
+ 'platforms': parse_list,
+ 'keywords': parse_list,
+ 'provides': parse_list,
+ 'requires': parse_list,
+ 'obsoletes': parse_list,
+ 'classifiers': self._get_parser_compound(parse_file, parse_list),
+ 'license': parse_file,
+ 'description': parse_file,
+ 'long_description': parse_file,
+ 'version': self._parse_version,
+ 'project_urls': parse_dict,
+ }
+
+ def _parse_version(self, value):
+ """Parses `version` option value.
+
+ :param value:
+ :rtype: str
+
+ """
+ version = self._parse_attr(value)
+
+ if callable(version):
+ version = version()
+
+ if not isinstance(version, string_types):
+ if hasattr(version, '__iter__'):
+ version = '.'.join(map(str, version))
+ else:
+ version = '%s' % version
+
+ return version
+
+
+class ConfigOptionsHandler(ConfigHandler):
+
+ section_prefix = 'options'
+
+ @property
+ def parsers(self):
+ """Metadata item name to parser function mapping."""
+ parse_list = self._parse_list
+ parse_list_semicolon = partial(self._parse_list, separator=';')
+ parse_bool = self._parse_bool
+ parse_dict = self._parse_dict
+
+ return {
+ 'zip_safe': parse_bool,
+ 'use_2to3': parse_bool,
+ 'include_package_data': parse_bool,
+ 'package_dir': parse_dict,
+ 'use_2to3_fixers': parse_list,
+ 'use_2to3_exclude_fixers': parse_list,
+ 'convert_2to3_doctests': parse_list,
+ 'scripts': parse_list,
+ 'eager_resources': parse_list,
+ 'dependency_links': parse_list,
+ 'namespace_packages': parse_list,
+ 'install_requires': parse_list_semicolon,
+ 'setup_requires': parse_list_semicolon,
+ 'tests_require': parse_list_semicolon,
+ 'packages': self._parse_packages,
+ 'entry_points': self._parse_file,
+ 'py_modules': parse_list,
+ }
+
+ def _parse_packages(self, value):
+ """Parses `packages` option value.
+
+ :param value:
+ :rtype: list
+ """
+ find_directive = 'find:'
+
+ if not value.startswith(find_directive):
+ return self._parse_list(value)
+
+ # Read function arguments from a dedicated section.
+ find_kwargs = self.parse_section_packages__find(
+ self.sections.get('packages.find', {}))
+
+ from setuptools import find_packages
+
+ return find_packages(**find_kwargs)
+
+ def parse_section_packages__find(self, section_options):
+ """Parses `packages.find` configuration file section.
+
+ To be used in conjunction with _parse_packages().
+
+ :param dict section_options:
+ """
+ section_data = self._parse_section_to_dict(
+ section_options, self._parse_list)
+
+ valid_keys = ['where', 'include', 'exclude']
+
+ find_kwargs = dict(
+ [(k, v) for k, v in section_data.items() if k in valid_keys and v])
+
+ where = find_kwargs.get('where')
+ if where is not None:
+ find_kwargs['where'] = where[0] # cast list to single val
+
+ return find_kwargs
+
+ def parse_section_entry_points(self, section_options):
+ """Parses `entry_points` configuration file section.
+
+ :param dict section_options:
+ """
+ parsed = self._parse_section_to_dict(section_options, self._parse_list)
+ self['entry_points'] = parsed
+
+ def _parse_package_data(self, section_options):
+ parsed = self._parse_section_to_dict(section_options, self._parse_list)
+
+ root = parsed.get('*')
+ if root:
+ parsed[''] = root
+ del parsed['*']
+
+ return parsed
+
+ def parse_section_package_data(self, section_options):
+ """Parses `package_data` configuration file section.
+
+ :param dict section_options:
+ """
+ self['package_data'] = self._parse_package_data(section_options)
+
+ def parse_section_exclude_package_data(self, section_options):
+ """Parses `exclude_package_data` configuration file section.
+
+ :param dict section_options:
+ """
+ self['exclude_package_data'] = self._parse_package_data(
+ section_options)
+
+ def parse_section_extras_require(self, section_options):
+ """Parses `extras_require` configuration file section.
+
+ :param dict section_options:
+ """
+ parse_list = partial(self._parse_list, separator=';')
+ self['extras_require'] = self._parse_section_to_dict(
+ section_options, parse_list)
diff --git a/setuptools/dep_util.py b/setuptools/dep_util.py
new file mode 100644
index 0000000..2931c13
--- /dev/null
+++ b/setuptools/dep_util.py
@@ -0,0 +1,23 @@
+from distutils.dep_util import newer_group
+
+# yes, this is was almost entirely copy-pasted from
+# 'newer_pairwise()', this is just another convenience
+# function.
+def newer_pairwise_group(sources_groups, targets):
+ """Walk both arguments in parallel, testing if each source group is newer
+ than its corresponding target. Returns a pair of lists (sources_groups,
+ targets) where sources is newer than target, according to the semantics
+ of 'newer_group()'.
+ """
+ if len(sources_groups) != len(targets):
+ raise ValueError("'sources_group' and 'targets' must be the same length")
+
+ # build a pair of lists (sources_groups, targets) where source is newer
+ n_sources = []
+ n_targets = []
+ for i in range(len(sources_groups)):
+ if newer_group(sources_groups[i], targets[i]):
+ n_sources.append(sources_groups[i])
+ n_targets.append(targets[i])
+
+ return n_sources, n_targets
diff --git a/setuptools/depends.py b/setuptools/depends.py
new file mode 100644
index 0000000..45e7052
--- /dev/null
+++ b/setuptools/depends.py
@@ -0,0 +1,186 @@
+import sys
+import imp
+import marshal
+from distutils.version import StrictVersion
+from imp import PKG_DIRECTORY, PY_COMPILED, PY_SOURCE, PY_FROZEN
+
+from .py33compat import Bytecode
+
+
+__all__ = [
+ 'Require', 'find_module', 'get_module_constant', 'extract_constant'
+]
+
+
+class Require:
+ """A prerequisite to building or installing a distribution"""
+
+ def __init__(self, name, requested_version, module, homepage='',
+ attribute=None, format=None):
+
+ if format is None and requested_version is not None:
+ format = StrictVersion
+
+ if format is not None:
+ requested_version = format(requested_version)
+ if attribute is None:
+ attribute = '__version__'
+
+ self.__dict__.update(locals())
+ del self.self
+
+ def full_name(self):
+ """Return full package/distribution name, w/version"""
+ if self.requested_version is not None:
+ return '%s-%s' % (self.name, self.requested_version)
+ return self.name
+
+ def version_ok(self, version):
+ """Is 'version' sufficiently up-to-date?"""
+ return self.attribute is None or self.format is None or \
+ str(version) != "unknown" and version >= self.requested_version
+
+ def get_version(self, paths=None, default="unknown"):
+ """Get version number of installed module, 'None', or 'default'
+
+ Search 'paths' for module. If not found, return 'None'. If found,
+ return the extracted version attribute, or 'default' if no version
+ attribute was specified, or the value cannot be determined without
+ importing the module. The version is formatted according to the
+ requirement's version format (if any), unless it is 'None' or the
+ supplied 'default'.
+ """
+
+ if self.attribute is None:
+ try:
+ f, p, i = find_module(self.module, paths)
+ if f:
+ f.close()
+ return default
+ except ImportError:
+ return None
+
+ v = get_module_constant(self.module, self.attribute, default, paths)
+
+ if v is not None and v is not default and self.format is not None:
+ return self.format(v)
+
+ return v
+
+ def is_present(self, paths=None):
+ """Return true if dependency is present on 'paths'"""
+ return self.get_version(paths) is not None
+
+ def is_current(self, paths=None):
+ """Return true if dependency is present and up-to-date on 'paths'"""
+ version = self.get_version(paths)
+ if version is None:
+ return False
+ return self.version_ok(version)
+
+
+def find_module(module, paths=None):
+ """Just like 'imp.find_module()', but with package support"""
+
+ parts = module.split('.')
+
+ while parts:
+ part = parts.pop(0)
+ f, path, (suffix, mode, kind) = info = imp.find_module(part, paths)
+
+ if kind == PKG_DIRECTORY:
+ parts = parts or ['__init__']
+ paths = [path]
+
+ elif parts:
+ raise ImportError("Can't find %r in %s" % (parts, module))
+
+ return info
+
+
+def get_module_constant(module, symbol, default=-1, paths=None):
+ """Find 'module' by searching 'paths', and extract 'symbol'
+
+ Return 'None' if 'module' does not exist on 'paths', or it does not define
+ 'symbol'. If the module defines 'symbol' as a constant, return the
+ constant. Otherwise, return 'default'."""
+
+ try:
+ f, path, (suffix, mode, kind) = find_module(module, paths)
+ except ImportError:
+ # Module doesn't exist
+ return None
+
+ try:
+ if kind == PY_COMPILED:
+ f.read(8) # skip magic & date
+ code = marshal.load(f)
+ elif kind == PY_FROZEN:
+ code = imp.get_frozen_object(module)
+ elif kind == PY_SOURCE:
+ code = compile(f.read(), path, 'exec')
+ else:
+ # Not something we can parse; we'll have to import it. :(
+ if module not in sys.modules:
+ imp.load_module(module, f, path, (suffix, mode, kind))
+ return getattr(sys.modules[module], symbol, None)
+
+ finally:
+ if f:
+ f.close()
+
+ return extract_constant(code, symbol, default)
+
+
+def extract_constant(code, symbol, default=-1):
+ """Extract the constant value of 'symbol' from 'code'
+
+ If the name 'symbol' is bound to a constant value by the Python code
+ object 'code', return that value. If 'symbol' is bound to an expression,
+ return 'default'. Otherwise, return 'None'.
+
+ Return value is based on the first assignment to 'symbol'. 'symbol' must
+ be a global, or at least a non-"fast" local in the code block. That is,
+ only 'STORE_NAME' and 'STORE_GLOBAL' opcodes are checked, and 'symbol'
+ must be present in 'code.co_names'.
+ """
+ if symbol not in code.co_names:
+ # name's not there, can't possibly be an assignment
+ return None
+
+ name_idx = list(code.co_names).index(symbol)
+
+ STORE_NAME = 90
+ STORE_GLOBAL = 97
+ LOAD_CONST = 100
+
+ const = default
+
+ for byte_code in Bytecode(code):
+ op = byte_code.opcode
+ arg = byte_code.arg
+
+ if op == LOAD_CONST:
+ const = code.co_consts[arg]
+ elif arg == name_idx and (op == STORE_NAME or op == STORE_GLOBAL):
+ return const
+ else:
+ const = default
+
+
+def _update_globals():
+ """
+ Patch the globals to remove the objects not available on some platforms.
+
+ XXX it'd be better to test assertions about bytecode instead.
+ """
+
+ if not sys.platform.startswith('java') and sys.platform != 'cli':
+ return
+ incompatible = 'extract_constant', 'get_module_constant'
+ for name in incompatible:
+ del globals()[name]
+ __all__.remove(name)
+
+
+_update_globals()
diff --git a/setuptools/dist.py b/setuptools/dist.py
new file mode 100644
index 0000000..321ab6b
--- /dev/null
+++ b/setuptools/dist.py
@@ -0,0 +1,1061 @@
+# -*- coding: utf-8 -*-
+__all__ = ['Distribution']
+
+import re
+import os
+import warnings
+import numbers
+import distutils.log
+import distutils.core
+import distutils.cmd
+import distutils.dist
+import itertools
+from collections import defaultdict
+from distutils.errors import (
+ DistutilsOptionError, DistutilsPlatformError, DistutilsSetupError,
+)
+from distutils.util import rfc822_escape
+from distutils.version import StrictVersion
+
+from setuptools.extern import six
+from setuptools.extern import packaging
+from setuptools.extern.six.moves import map, filter, filterfalse
+
+from setuptools.depends import Require
+from setuptools import windows_support
+from setuptools.monkey import get_unpatched
+from setuptools.config import parse_configuration
+import pkg_resources
+from .py36compat import Distribution_parse_config_files
+
+__import__('setuptools.extern.packaging.specifiers')
+__import__('setuptools.extern.packaging.version')
+
+
+def _get_unpatched(cls):
+ warnings.warn("Do not call this function", DeprecationWarning)
+ return get_unpatched(cls)
+
+
+def get_metadata_version(dist_md):
+ if dist_md.long_description_content_type or dist_md.provides_extras:
+ return StrictVersion('2.1')
+ elif (dist_md.maintainer is not None or
+ dist_md.maintainer_email is not None or
+ getattr(dist_md, 'python_requires', None) is not None):
+ return StrictVersion('1.2')
+ elif (dist_md.provides or dist_md.requires or dist_md.obsoletes or
+ dist_md.classifiers or dist_md.download_url):
+ return StrictVersion('1.1')
+
+ return StrictVersion('1.0')
+
+
+# Based on Python 3.5 version
+def write_pkg_file(self, file):
+ """Write the PKG-INFO format data to a file object.
+ """
+ version = get_metadata_version(self)
+
+ file.write('Metadata-Version: %s\n' % version)
+ file.write('Name: %s\n' % self.get_name())
+ file.write('Version: %s\n' % self.get_version())
+ file.write('Summary: %s\n' % self.get_description())
+ file.write('Home-page: %s\n' % self.get_url())
+
+ if version < StrictVersion('1.2'):
+ file.write('Author: %s\n' % self.get_contact())
+ file.write('Author-email: %s\n' % self.get_contact_email())
+ else:
+ optional_fields = (
+ ('Author', 'author'),
+ ('Author-email', 'author_email'),
+ ('Maintainer', 'maintainer'),
+ ('Maintainer-email', 'maintainer_email'),
+ )
+
+ for field, attr in optional_fields:
+ attr_val = getattr(self, attr)
+ if six.PY2:
+ attr_val = self._encode_field(attr_val)
+
+ if attr_val is not None:
+ file.write('%s: %s\n' % (field, attr_val))
+
+ file.write('License: %s\n' % self.get_license())
+ if self.download_url:
+ file.write('Download-URL: %s\n' % self.download_url)
+ for project_url in self.project_urls.items():
+ file.write('Project-URL: %s, %s\n' % project_url)
+
+ long_desc = rfc822_escape(self.get_long_description())
+ file.write('Description: %s\n' % long_desc)
+
+ keywords = ','.join(self.get_keywords())
+ if keywords:
+ file.write('Keywords: %s\n' % keywords)
+
+ if version >= StrictVersion('1.2'):
+ for platform in self.get_platforms():
+ file.write('Platform: %s\n' % platform)
+ else:
+ self._write_list(file, 'Platform', self.get_platforms())
+
+ self._write_list(file, 'Classifier', self.get_classifiers())
+
+ # PEP 314
+ self._write_list(file, 'Requires', self.get_requires())
+ self._write_list(file, 'Provides', self.get_provides())
+ self._write_list(file, 'Obsoletes', self.get_obsoletes())
+
+ # Setuptools specific for PEP 345
+ if hasattr(self, 'python_requires'):
+ file.write('Requires-Python: %s\n' % self.python_requires)
+
+ # PEP 566
+ if self.long_description_content_type:
+ file.write(
+ 'Description-Content-Type: %s\n' %
+ self.long_description_content_type
+ )
+ if self.provides_extras:
+ for extra in self.provides_extras:
+ file.write('Provides-Extra: %s\n' % extra)
+
+
+sequence = tuple, list
+
+
+def check_importable(dist, attr, value):
+ try:
+ ep = pkg_resources.EntryPoint.parse('x=' + value)
+ assert not ep.extras
+ except (TypeError, ValueError, AttributeError, AssertionError):
+ raise DistutilsSetupError(
+ "%r must be importable 'module:attrs' string (got %r)"
+ % (attr, value)
+ )
+
+
+def assert_string_list(dist, attr, value):
+ """Verify that value is a string list or None"""
+ try:
+ assert ''.join(value) != value
+ except (TypeError, ValueError, AttributeError, AssertionError):
+ raise DistutilsSetupError(
+ "%r must be a list of strings (got %r)" % (attr, value)
+ )
+
+
+def check_nsp(dist, attr, value):
+ """Verify that namespace packages are valid"""
+ ns_packages = value
+ assert_string_list(dist, attr, ns_packages)
+ for nsp in ns_packages:
+ if not dist.has_contents_for(nsp):
+ raise DistutilsSetupError(
+ "Distribution contains no modules or packages for " +
+ "namespace package %r" % nsp
+ )
+ parent, sep, child = nsp.rpartition('.')
+ if parent and parent not in ns_packages:
+ distutils.log.warn(
+ "WARNING: %r is declared as a package namespace, but %r"
+ " is not: please correct this in setup.py", nsp, parent
+ )
+
+
+def check_extras(dist, attr, value):
+ """Verify that extras_require mapping is valid"""
+ try:
+ list(itertools.starmap(_check_extra, value.items()))
+ except (TypeError, ValueError, AttributeError):
+ raise DistutilsSetupError(
+ "'extras_require' must be a dictionary whose values are "
+ "strings or lists of strings containing valid project/version "
+ "requirement specifiers."
+ )
+
+
+def _check_extra(extra, reqs):
+ name, sep, marker = extra.partition(':')
+ if marker and pkg_resources.invalid_marker(marker):
+ raise DistutilsSetupError("Invalid environment marker: " + marker)
+ list(pkg_resources.parse_requirements(reqs))
+
+
+def assert_bool(dist, attr, value):
+ """Verify that value is True, False, 0, or 1"""
+ if bool(value) != value:
+ tmpl = "{attr!r} must be a boolean value (got {value!r})"
+ raise DistutilsSetupError(tmpl.format(attr=attr, value=value))
+
+
+def check_requirements(dist, attr, value):
+ """Verify that install_requires is a valid requirements list"""
+ try:
+ list(pkg_resources.parse_requirements(value))
+ if isinstance(value, (dict, set)):
+ raise TypeError("Unordered types are not allowed")
+ except (TypeError, ValueError) as error:
+ tmpl = (
+ "{attr!r} must be a string or list of strings "
+ "containing valid project/version requirement specifiers; {error}"
+ )
+ raise DistutilsSetupError(tmpl.format(attr=attr, error=error))
+
+
+def check_specifier(dist, attr, value):
+ """Verify that value is a valid version specifier"""
+ try:
+ packaging.specifiers.SpecifierSet(value)
+ except packaging.specifiers.InvalidSpecifier as error:
+ tmpl = (
+ "{attr!r} must be a string "
+ "containing valid version specifiers; {error}"
+ )
+ raise DistutilsSetupError(tmpl.format(attr=attr, error=error))
+
+
+def check_entry_points(dist, attr, value):
+ """Verify that entry_points map is parseable"""
+ try:
+ pkg_resources.EntryPoint.parse_map(value)
+ except ValueError as e:
+ raise DistutilsSetupError(e)
+
+
+def check_test_suite(dist, attr, value):
+ if not isinstance(value, six.string_types):
+ raise DistutilsSetupError("test_suite must be a string")
+
+
+def check_package_data(dist, attr, value):
+ """Verify that value is a dictionary of package names to glob lists"""
+ if isinstance(value, dict):
+ for k, v in value.items():
+ if not isinstance(k, str):
+ break
+ try:
+ iter(v)
+ except TypeError:
+ break
+ else:
+ return
+ raise DistutilsSetupError(
+ attr + " must be a dictionary mapping package names to lists of "
+ "wildcard patterns"
+ )
+
+
+def check_packages(dist, attr, value):
+ for pkgname in value:
+ if not re.match(r'\w+(\.\w+)*', pkgname):
+ distutils.log.warn(
+ "WARNING: %r not a valid package name; please use only "
+ ".-separated package names in setup.py", pkgname
+ )
+
+
+_Distribution = get_unpatched(distutils.core.Distribution)
+
+
+class Distribution(Distribution_parse_config_files, _Distribution):
+ """Distribution with support for features, tests, and package data
+
+ This is an enhanced version of 'distutils.dist.Distribution' that
+ effectively adds the following new optional keyword arguments to 'setup()':
+
+ 'install_requires' -- a string or sequence of strings specifying project
+ versions that the distribution requires when installed, in the format
+ used by 'pkg_resources.require()'. They will be installed
+ automatically when the package is installed. If you wish to use
+ packages that are not available in PyPI, or want to give your users an
+ alternate download location, you can add a 'find_links' option to the
+ '[easy_install]' section of your project's 'setup.cfg' file, and then
+ setuptools will scan the listed web pages for links that satisfy the
+ requirements.
+
+ 'extras_require' -- a dictionary mapping names of optional "extras" to the
+ additional requirement(s) that using those extras incurs. For example,
+ this::
+
+ extras_require = dict(reST = ["docutils>=0.3", "reSTedit"])
+
+ indicates that the distribution can optionally provide an extra
+ capability called "reST", but it can only be used if docutils and
+ reSTedit are installed. If the user installs your package using
+ EasyInstall and requests one of your extras, the corresponding
+ additional requirements will be installed if needed.
+
+ 'features' **deprecated** -- a dictionary mapping option names to
+ 'setuptools.Feature'
+ objects. Features are a portion of the distribution that can be
+ included or excluded based on user options, inter-feature dependencies,
+ and availability on the current system. Excluded features are omitted
+ from all setup commands, including source and binary distributions, so
+ you can create multiple distributions from the same source tree.
+ Feature names should be valid Python identifiers, except that they may
+ contain the '-' (minus) sign. Features can be included or excluded
+ via the command line options '--with-X' and '--without-X', where 'X' is
+ the name of the feature. Whether a feature is included by default, and
+ whether you are allowed to control this from the command line, is
+ determined by the Feature object. See the 'Feature' class for more
+ information.
+
+ 'test_suite' -- the name of a test suite to run for the 'test' command.
+ If the user runs 'python setup.py test', the package will be installed,
+ and the named test suite will be run. The format is the same as
+ would be used on a 'unittest.py' command line. That is, it is the
+ dotted name of an object to import and call to generate a test suite.
+
+ 'package_data' -- a dictionary mapping package names to lists of filenames
+ or globs to use to find data files contained in the named packages.
+ If the dictionary has filenames or globs listed under '""' (the empty
+ string), those names will be searched for in every package, in addition
+ to any names for the specific package. Data files found using these
+ names/globs will be installed along with the package, in the same
+ location as the package. Note that globs are allowed to reference
+ the contents of non-package subdirectories, as long as you use '/' as
+ a path separator. (Globs are automatically converted to
+ platform-specific paths at runtime.)
+
+ In addition to these new keywords, this class also has several new methods
+ for manipulating the distribution's contents. For example, the 'include()'
+ and 'exclude()' methods can be thought of as in-place add and subtract
+ commands that add or remove packages, modules, extensions, and so on from
+ the distribution. They are used by the feature subsystem to configure the
+ distribution for the included and excluded features.
+ """
+
+ _patched_dist = None
+
+ def patch_missing_pkg_info(self, attrs):
+ # Fake up a replacement for the data that would normally come from
+ # PKG-INFO, but which might not yet be built if this is a fresh
+ # checkout.
+ #
+ if not attrs or 'name' not in attrs or 'version' not in attrs:
+ return
+ key = pkg_resources.safe_name(str(attrs['name'])).lower()
+ dist = pkg_resources.working_set.by_key.get(key)
+ if dist is not None and not dist.has_metadata('PKG-INFO'):
+ dist._version = pkg_resources.safe_version(str(attrs['version']))
+ self._patched_dist = dist
+
+ def __init__(self, attrs=None):
+ have_package_data = hasattr(self, "package_data")
+ if not have_package_data:
+ self.package_data = {}
+ attrs = attrs or {}
+ if 'features' in attrs or 'require_features' in attrs:
+ Feature.warn_deprecated()
+ self.require_features = []
+ self.features = {}
+ self.dist_files = []
+ self.src_root = attrs.pop("src_root", None)
+ self.patch_missing_pkg_info(attrs)
+ self.project_urls = attrs.get('project_urls', {})
+ self.dependency_links = attrs.pop('dependency_links', [])
+ self.setup_requires = attrs.pop('setup_requires', [])
+ for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'):
+ vars(self).setdefault(ep.name, None)
+ _Distribution.__init__(self, attrs)
+
+ # The project_urls attribute may not be supported in distutils, so
+ # prime it here from our value if not automatically set
+ self.metadata.project_urls = getattr(
+ self.metadata, 'project_urls', self.project_urls)
+ self.metadata.long_description_content_type = attrs.get(
+ 'long_description_content_type'
+ )
+ self.metadata.provides_extras = getattr(
+ self.metadata, 'provides_extras', set()
+ )
+
+ if isinstance(self.metadata.version, numbers.Number):
+ # Some people apparently take "version number" too literally :)
+ self.metadata.version = str(self.metadata.version)
+
+ if self.metadata.version is not None:
+ try:
+ ver = packaging.version.Version(self.metadata.version)
+ normalized_version = str(ver)
+ if self.metadata.version != normalized_version:
+ warnings.warn(
+ "Normalizing '%s' to '%s'" % (
+ self.metadata.version,
+ normalized_version,
+ )
+ )
+ self.metadata.version = normalized_version
+ except (packaging.version.InvalidVersion, TypeError):
+ warnings.warn(
+ "The version specified (%r) is an invalid version, this "
+ "may not work as expected with newer versions of "
+ "setuptools, pip, and PyPI. Please see PEP 440 for more "
+ "details." % self.metadata.version
+ )
+ self._finalize_requires()
+
+ def _finalize_requires(self):
+ """
+ Set `metadata.python_requires` and fix environment markers
+ in `install_requires` and `extras_require`.
+ """
+ if getattr(self, 'python_requires', None):
+ self.metadata.python_requires = self.python_requires
+
+ if getattr(self, 'extras_require', None):
+ for extra in self.extras_require.keys():
+ # Since this gets called multiple times at points where the
+ # keys have become 'converted' extras, ensure that we are only
+ # truly adding extras we haven't seen before here.
+ extra = extra.split(':')[0]
+ if extra:
+ self.metadata.provides_extras.add(extra)
+
+ self._convert_extras_requirements()
+ self._move_install_requirements_markers()
+
+ def _convert_extras_requirements(self):
+ """
+ Convert requirements in `extras_require` of the form
+ `"extra": ["barbazquux; {marker}"]` to
+ `"extra:{marker}": ["barbazquux"]`.
+ """
+ spec_ext_reqs = getattr(self, 'extras_require', None) or {}
+ self._tmp_extras_require = defaultdict(list)
+ for section, v in spec_ext_reqs.items():
+ # Do not strip empty sections.
+ self._tmp_extras_require[section]
+ for r in pkg_resources.parse_requirements(v):
+ suffix = self._suffix_for(r)
+ self._tmp_extras_require[section + suffix].append(r)
+
+ @staticmethod
+ def _suffix_for(req):
+ """
+ For a requirement, return the 'extras_require' suffix for
+ that requirement.
+ """
+ return ':' + str(req.marker) if req.marker else ''
+
+ def _move_install_requirements_markers(self):
+ """
+ Move requirements in `install_requires` that are using environment
+ markers `extras_require`.
+ """
+
+ # divide the install_requires into two sets, simple ones still
+ # handled by install_requires and more complex ones handled
+ # by extras_require.
+
+ def is_simple_req(req):
+ return not req.marker
+
+ spec_inst_reqs = getattr(self, 'install_requires', None) or ()
+ inst_reqs = list(pkg_resources.parse_requirements(spec_inst_reqs))
+ simple_reqs = filter(is_simple_req, inst_reqs)
+ complex_reqs = filterfalse(is_simple_req, inst_reqs)
+ self.install_requires = list(map(str, simple_reqs))
+
+ for r in complex_reqs:
+ self._tmp_extras_require[':' + str(r.marker)].append(r)
+ self.extras_require = dict(
+ (k, [str(r) for r in map(self._clean_req, v)])
+ for k, v in self._tmp_extras_require.items()
+ )
+
+ def _clean_req(self, req):
+ """
+ Given a Requirement, remove environment markers and return it.
+ """
+ req.marker = None
+ return req
+
+ def parse_config_files(self, filenames=None, ignore_option_errors=False):
+ """Parses configuration files from various levels
+ and loads configuration.
+
+ """
+ _Distribution.parse_config_files(self, filenames=filenames)
+
+ parse_configuration(self, self.command_options,
+ ignore_option_errors=ignore_option_errors)
+ self._finalize_requires()
+
+ def parse_command_line(self):
+ """Process features after parsing command line options"""
+ result = _Distribution.parse_command_line(self)
+ if self.features:
+ self._finalize_features()
+ return result
+
+ def _feature_attrname(self, name):
+ """Convert feature name to corresponding option attribute name"""
+ return 'with_' + name.replace('-', '_')
+
+ def fetch_build_eggs(self, requires):
+ """Resolve pre-setup requirements"""
+ resolved_dists = pkg_resources.working_set.resolve(
+ pkg_resources.parse_requirements(requires),
+ installer=self.fetch_build_egg,
+ replace_conflicting=True,
+ )
+ for dist in resolved_dists:
+ pkg_resources.working_set.add(dist, replace=True)
+ return resolved_dists
+
+ def finalize_options(self):
+ _Distribution.finalize_options(self)
+ if self.features:
+ self._set_global_opts_from_features()
+
+ for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'):
+ value = getattr(self, ep.name, None)
+ if value is not None:
+ ep.require(installer=self.fetch_build_egg)
+ ep.load()(self, ep.name, value)
+ if getattr(self, 'convert_2to3_doctests', None):
+ # XXX may convert to set here when we can rely on set being builtin
+ self.convert_2to3_doctests = [
+ os.path.abspath(p)
+ for p in self.convert_2to3_doctests
+ ]
+ else:
+ self.convert_2to3_doctests = []
+
+ def get_egg_cache_dir(self):
+ egg_cache_dir = os.path.join(os.curdir, '.eggs')
+ if not os.path.exists(egg_cache_dir):
+ os.mkdir(egg_cache_dir)
+ windows_support.hide_file(egg_cache_dir)
+ readme_txt_filename = os.path.join(egg_cache_dir, 'README.txt')
+ with open(readme_txt_filename, 'w') as f:
+ f.write('This directory contains eggs that were downloaded '
+ 'by setuptools to build, test, and run plug-ins.\n\n')
+ f.write('This directory caches those eggs to prevent '
+ 'repeated downloads.\n\n')
+ f.write('However, it is safe to delete this directory.\n\n')
+
+ return egg_cache_dir
+
+ def fetch_build_egg(self, req):
+ """Fetch an egg needed for building"""
+ from setuptools.command.easy_install import easy_install
+ dist = self.__class__({'script_args': ['easy_install']})
+ opts = dist.get_option_dict('easy_install')
+ opts.clear()
+ opts.update(
+ (k, v)
+ for k, v in self.get_option_dict('easy_install').items()
+ if k in (
+ # don't use any other settings
+ 'find_links', 'site_dirs', 'index_url',
+ 'optimize', 'site_dirs', 'allow_hosts',
+ ))
+ if self.dependency_links:
+ links = self.dependency_links[:]
+ if 'find_links' in opts:
+ links = opts['find_links'][1] + links
+ opts['find_links'] = ('setup', links)
+ install_dir = self.get_egg_cache_dir()
+ cmd = easy_install(
+ dist, args=["x"], install_dir=install_dir,
+ exclude_scripts=True,
+ always_copy=False, build_directory=None, editable=False,
+ upgrade=False, multi_version=True, no_report=True, user=False
+ )
+ cmd.ensure_finalized()
+ return cmd.easy_install(req)
+
+ def _set_global_opts_from_features(self):
+ """Add --with-X/--without-X options based on optional features"""
+
+ go = []
+ no = self.negative_opt.copy()
+
+ for name, feature in self.features.items():
+ self._set_feature(name, None)
+ feature.validate(self)
+
+ if feature.optional:
+ descr = feature.description
+ incdef = ' (default)'
+ excdef = ''
+ if not feature.include_by_default():
+ excdef, incdef = incdef, excdef
+
+ new = (
+ ('with-' + name, None, 'include ' + descr + incdef),
+ ('without-' + name, None, 'exclude ' + descr + excdef),
+ )
+ go.extend(new)
+ no['without-' + name] = 'with-' + name
+
+ self.global_options = self.feature_options = go + self.global_options
+ self.negative_opt = self.feature_negopt = no
+
+ def _finalize_features(self):
+ """Add/remove features and resolve dependencies between them"""
+
+ # First, flag all the enabled items (and thus their dependencies)
+ for name, feature in self.features.items():
+ enabled = self.feature_is_included(name)
+ if enabled or (enabled is None and feature.include_by_default()):
+ feature.include_in(self)
+ self._set_feature(name, 1)
+
+ # Then disable the rest, so that off-by-default features don't
+ # get flagged as errors when they're required by an enabled feature
+ for name, feature in self.features.items():
+ if not self.feature_is_included(name):
+ feature.exclude_from(self)
+ self._set_feature(name, 0)
+
+ def get_command_class(self, command):
+ """Pluggable version of get_command_class()"""
+ if command in self.cmdclass:
+ return self.cmdclass[command]
+
+ eps = pkg_resources.iter_entry_points('distutils.commands', command)
+ for ep in eps:
+ ep.require(installer=self.fetch_build_egg)
+ self.cmdclass[command] = cmdclass = ep.load()
+ return cmdclass
+ else:
+ return _Distribution.get_command_class(self, command)
+
+ def print_commands(self):
+ for ep in pkg_resources.iter_entry_points('distutils.commands'):
+ if ep.name not in self.cmdclass:
+ # don't require extras as the commands won't be invoked
+ cmdclass = ep.resolve()
+ self.cmdclass[ep.name] = cmdclass
+ return _Distribution.print_commands(self)
+
+ def get_command_list(self):
+ for ep in pkg_resources.iter_entry_points('distutils.commands'):
+ if ep.name not in self.cmdclass:
+ # don't require extras as the commands won't be invoked
+ cmdclass = ep.resolve()
+ self.cmdclass[ep.name] = cmdclass
+ return _Distribution.get_command_list(self)
+
+ def _set_feature(self, name, status):
+ """Set feature's inclusion status"""
+ setattr(self, self._feature_attrname(name), status)
+
+ def feature_is_included(self, name):
+ """Return 1 if feature is included, 0 if excluded, 'None' if unknown"""
+ return getattr(self, self._feature_attrname(name))
+
+ def include_feature(self, name):
+ """Request inclusion of feature named 'name'"""
+
+ if self.feature_is_included(name) == 0:
+ descr = self.features[name].description
+ raise DistutilsOptionError(
+ descr + " is required, but was excluded or is not available"
+ )
+ self.features[name].include_in(self)
+ self._set_feature(name, 1)
+
+ def include(self, **attrs):
+ """Add items to distribution that are named in keyword arguments
+
+ For example, 'dist.exclude(py_modules=["x"])' would add 'x' to
+ the distribution's 'py_modules' attribute, if it was not already
+ there.
+
+ Currently, this method only supports inclusion for attributes that are
+ lists or tuples. If you need to add support for adding to other
+ attributes in this or a subclass, you can add an '_include_X' method,
+ where 'X' is the name of the attribute. The method will be called with
+ the value passed to 'include()'. So, 'dist.include(foo={"bar":"baz"})'
+ will try to call 'dist._include_foo({"bar":"baz"})', which can then
+ handle whatever special inclusion logic is needed.
+ """
+ for k, v in attrs.items():
+ include = getattr(self, '_include_' + k, None)
+ if include:
+ include(v)
+ else:
+ self._include_misc(k, v)
+
+ def exclude_package(self, package):
+ """Remove packages, modules, and extensions in named package"""
+
+ pfx = package + '.'
+ if self.packages:
+ self.packages = [
+ p for p in self.packages
+ if p != package and not p.startswith(pfx)
+ ]
+
+ if self.py_modules:
+ self.py_modules = [
+ p for p in self.py_modules
+ if p != package and not p.startswith(pfx)
+ ]
+
+ if self.ext_modules:
+ self.ext_modules = [
+ p for p in self.ext_modules
+ if p.name != package and not p.name.startswith(pfx)
+ ]
+
+ def has_contents_for(self, package):
+ """Return true if 'exclude_package(package)' would do something"""
+
+ pfx = package + '.'
+
+ for p in self.iter_distribution_names():
+ if p == package or p.startswith(pfx):
+ return True
+
+ def _exclude_misc(self, name, value):
+ """Handle 'exclude()' for list/tuple attrs without a special handler"""
+ if not isinstance(value, sequence):
+ raise DistutilsSetupError(
+ "%s: setting must be a list or tuple (%r)" % (name, value)
+ )
+ try:
+ old = getattr(self, name)
+ except AttributeError:
+ raise DistutilsSetupError(
+ "%s: No such distribution setting" % name
+ )
+ if old is not None and not isinstance(old, sequence):
+ raise DistutilsSetupError(
+ name + ": this setting cannot be changed via include/exclude"
+ )
+ elif old:
+ setattr(self, name, [item for item in old if item not in value])
+
+ def _include_misc(self, name, value):
+ """Handle 'include()' for list/tuple attrs without a special handler"""
+
+ if not isinstance(value, sequence):
+ raise DistutilsSetupError(
+ "%s: setting must be a list (%r)" % (name, value)
+ )
+ try:
+ old = getattr(self, name)
+ except AttributeError:
+ raise DistutilsSetupError(
+ "%s: No such distribution setting" % name
+ )
+ if old is None:
+ setattr(self, name, value)
+ elif not isinstance(old, sequence):
+ raise DistutilsSetupError(
+ name + ": this setting cannot be changed via include/exclude"
+ )
+ else:
+ new = [item for item in value if item not in old]
+ setattr(self, name, old + new)
+
+ def exclude(self, **attrs):
+ """Remove items from distribution that are named in keyword arguments
+
+ For example, 'dist.exclude(py_modules=["x"])' would remove 'x' from
+ the distribution's 'py_modules' attribute. Excluding packages uses
+ the 'exclude_package()' method, so all of the package's contained
+ packages, modules, and extensions are also excluded.
+
+ Currently, this method only supports exclusion from attributes that are
+ lists or tuples. If you need to add support for excluding from other
+ attributes in this or a subclass, you can add an '_exclude_X' method,
+ where 'X' is the name of the attribute. The method will be called with
+ the value passed to 'exclude()'. So, 'dist.exclude(foo={"bar":"baz"})'
+ will try to call 'dist._exclude_foo({"bar":"baz"})', which can then
+ handle whatever special exclusion logic is needed.
+ """
+ for k, v in attrs.items():
+ exclude = getattr(self, '_exclude_' + k, None)
+ if exclude:
+ exclude(v)
+ else:
+ self._exclude_misc(k, v)
+
+ def _exclude_packages(self, packages):
+ if not isinstance(packages, sequence):
+ raise DistutilsSetupError(
+ "packages: setting must be a list or tuple (%r)" % (packages,)
+ )
+ list(map(self.exclude_package, packages))
+
+ def _parse_command_opts(self, parser, args):
+ # Remove --with-X/--without-X options when processing command args
+ self.global_options = self.__class__.global_options
+ self.negative_opt = self.__class__.negative_opt
+
+ # First, expand any aliases
+ command = args[0]
+ aliases = self.get_option_dict('aliases')
+ while command in aliases:
+ src, alias = aliases[command]
+ del aliases[command] # ensure each alias can expand only once!
+ import shlex
+ args[:1] = shlex.split(alias, True)
+ command = args[0]
+
+ nargs = _Distribution._parse_command_opts(self, parser, args)
+
+ # Handle commands that want to consume all remaining arguments
+ cmd_class = self.get_command_class(command)
+ if getattr(cmd_class, 'command_consumes_arguments', None):
+ self.get_option_dict(command)['args'] = ("command line", nargs)
+ if nargs is not None:
+ return []
+
+ return nargs
+
+ def get_cmdline_options(self):
+ """Return a '{cmd: {opt:val}}' map of all command-line options
+
+ Option names are all long, but do not include the leading '--', and
+ contain dashes rather than underscores. If the option doesn't take
+ an argument (e.g. '--quiet'), the 'val' is 'None'.
+
+ Note that options provided by config files are intentionally excluded.
+ """
+
+ d = {}
+
+ for cmd, opts in self.command_options.items():
+
+ for opt, (src, val) in opts.items():
+
+ if src != "command line":
+ continue
+
+ opt = opt.replace('_', '-')
+
+ if val == 0:
+ cmdobj = self.get_command_obj(cmd)
+ neg_opt = self.negative_opt.copy()
+ neg_opt.update(getattr(cmdobj, 'negative_opt', {}))
+ for neg, pos in neg_opt.items():
+ if pos == opt:
+ opt = neg
+ val = None
+ break
+ else:
+ raise AssertionError("Shouldn't be able to get here")
+
+ elif val == 1:
+ val = None
+
+ d.setdefault(cmd, {})[opt] = val
+
+ return d
+
+ def iter_distribution_names(self):
+ """Yield all packages, modules, and extension names in distribution"""
+
+ for pkg in self.packages or ():
+ yield pkg
+
+ for module in self.py_modules or ():
+ yield module
+
+ for ext in self.ext_modules or ():
+ if isinstance(ext, tuple):
+ name, buildinfo = ext
+ else:
+ name = ext.name
+ if name.endswith('module'):
+ name = name[:-6]
+ yield name
+
+ def handle_display_options(self, option_order):
+ """If there were any non-global "display-only" options
+ (--help-commands or the metadata display options) on the command
+ line, display the requested info and return true; else return
+ false.
+ """
+ import sys
+
+ if six.PY2 or self.help_commands:
+ return _Distribution.handle_display_options(self, option_order)
+
+ # Stdout may be StringIO (e.g. in tests)
+ import io
+ if not isinstance(sys.stdout, io.TextIOWrapper):
+ return _Distribution.handle_display_options(self, option_order)
+
+ # Don't wrap stdout if utf-8 is already the encoding. Provides
+ # workaround for #334.
+ if sys.stdout.encoding.lower() in ('utf-8', 'utf8'):
+ return _Distribution.handle_display_options(self, option_order)
+
+ # Print metadata in UTF-8 no matter the platform
+ encoding = sys.stdout.encoding
+ errors = sys.stdout.errors
+ newline = sys.platform != 'win32' and '\n' or None
+ line_buffering = sys.stdout.line_buffering
+
+ sys.stdout = io.TextIOWrapper(
+ sys.stdout.detach(), 'utf-8', errors, newline, line_buffering)
+ try:
+ return _Distribution.handle_display_options(self, option_order)
+ finally:
+ sys.stdout = io.TextIOWrapper(
+ sys.stdout.detach(), encoding, errors, newline, line_buffering)
+
+
+class Feature:
+ """
+ **deprecated** -- The `Feature` facility was never completely implemented
+ or supported, `has reported issues
+ <https://github.com/pypa/setuptools/issues/58>`_ and will be removed in
+ a future version.
+
+ A subset of the distribution that can be excluded if unneeded/wanted
+
+ Features are created using these keyword arguments:
+
+ 'description' -- a short, human readable description of the feature, to
+ be used in error messages, and option help messages.
+
+ 'standard' -- if true, the feature is included by default if it is
+ available on the current system. Otherwise, the feature is only
+ included if requested via a command line '--with-X' option, or if
+ another included feature requires it. The default setting is 'False'.
+
+ 'available' -- if true, the feature is available for installation on the
+ current system. The default setting is 'True'.
+
+ 'optional' -- if true, the feature's inclusion can be controlled from the
+ command line, using the '--with-X' or '--without-X' options. If
+ false, the feature's inclusion status is determined automatically,
+ based on 'availabile', 'standard', and whether any other feature
+ requires it. The default setting is 'True'.
+
+ 'require_features' -- a string or sequence of strings naming features
+ that should also be included if this feature is included. Defaults to
+ empty list. May also contain 'Require' objects that should be
+ added/removed from the distribution.
+
+ 'remove' -- a string or list of strings naming packages to be removed
+ from the distribution if this feature is *not* included. If the
+ feature *is* included, this argument is ignored. This argument exists
+ to support removing features that "crosscut" a distribution, such as
+ defining a 'tests' feature that removes all the 'tests' subpackages
+ provided by other features. The default for this argument is an empty
+ list. (Note: the named package(s) or modules must exist in the base
+ distribution when the 'setup()' function is initially called.)
+
+ other keywords -- any other keyword arguments are saved, and passed to
+ the distribution's 'include()' and 'exclude()' methods when the
+ feature is included or excluded, respectively. So, for example, you
+ could pass 'packages=["a","b"]' to cause packages 'a' and 'b' to be
+ added or removed from the distribution as appropriate.
+
+ A feature must include at least one 'requires', 'remove', or other
+ keyword argument. Otherwise, it can't affect the distribution in any way.
+ Note also that you can subclass 'Feature' to create your own specialized
+ feature types that modify the distribution in other ways when included or
+ excluded. See the docstrings for the various methods here for more detail.
+ Aside from the methods, the only feature attributes that distributions look
+ at are 'description' and 'optional'.
+ """
+
+ @staticmethod
+ def warn_deprecated():
+ msg = (
+ "Features are deprecated and will be removed in a future "
+ "version. See https://github.com/pypa/setuptools/issues/65."
+ )
+ warnings.warn(msg, DeprecationWarning, stacklevel=3)
+
+ def __init__(
+ self, description, standard=False, available=True,
+ optional=True, require_features=(), remove=(), **extras):
+ self.warn_deprecated()
+
+ self.description = description
+ self.standard = standard
+ self.available = available
+ self.optional = optional
+ if isinstance(require_features, (str, Require)):
+ require_features = require_features,
+
+ self.require_features = [
+ r for r in require_features if isinstance(r, str)
+ ]
+ er = [r for r in require_features if not isinstance(r, str)]
+ if er:
+ extras['require_features'] = er
+
+ if isinstance(remove, str):
+ remove = remove,
+ self.remove = remove
+ self.extras = extras
+
+ if not remove and not require_features and not extras:
+ raise DistutilsSetupError(
+ "Feature %s: must define 'require_features', 'remove', or "
+ "at least one of 'packages', 'py_modules', etc."
+ )
+
+ def include_by_default(self):
+ """Should this feature be included by default?"""
+ return self.available and self.standard
+
+ def include_in(self, dist):
+ """Ensure feature and its requirements are included in distribution
+
+ You may override this in a subclass to perform additional operations on
+ the distribution. Note that this method may be called more than once
+ per feature, and so should be idempotent.
+
+ """
+
+ if not self.available:
+ raise DistutilsPlatformError(
+ self.description + " is required, "
+ "but is not available on this platform"
+ )
+
+ dist.include(**self.extras)
+
+ for f in self.require_features:
+ dist.include_feature(f)
+
+ def exclude_from(self, dist):
+ """Ensure feature is excluded from distribution
+
+ You may override this in a subclass to perform additional operations on
+ the distribution. This method will be called at most once per
+ feature, and only after all included features have been asked to
+ include themselves.
+ """
+
+ dist.exclude(**self.extras)
+
+ if self.remove:
+ for item in self.remove:
+ dist.exclude_package(item)
+
+ def validate(self, dist):
+ """Verify that feature makes sense in context of distribution
+
+ This method is called by the distribution just before it parses its
+ command line. It checks to ensure that the 'remove' attribute, if any,
+ contains only valid package/module names that are present in the base
+ distribution when 'setup()' is called. You may override it in a
+ subclass to perform any other required validation of the feature
+ against a target distribution.
+ """
+
+ for item in self.remove:
+ if not dist.has_contents_for(item):
+ raise DistutilsSetupError(
+ "%s wants to be able to remove %s, but the distribution"
+ " doesn't contain any packages or modules under %s"
+ % (self.description, item, item)
+ )
diff --git a/setuptools/extension.py b/setuptools/extension.py
new file mode 100644
index 0000000..2946889
--- /dev/null
+++ b/setuptools/extension.py
@@ -0,0 +1,57 @@
+import re
+import functools
+import distutils.core
+import distutils.errors
+import distutils.extension
+
+from setuptools.extern.six.moves import map
+
+from .monkey import get_unpatched
+
+
+def _have_cython():
+ """
+ Return True if Cython can be imported.
+ """
+ cython_impl = 'Cython.Distutils.build_ext'
+ try:
+ # from (cython_impl) import build_ext
+ __import__(cython_impl, fromlist=['build_ext']).build_ext
+ return True
+ except Exception:
+ pass
+ return False
+
+
+# for compatibility
+have_pyrex = _have_cython
+
+_Extension = get_unpatched(distutils.core.Extension)
+
+
+class Extension(_Extension):
+ """Extension that uses '.c' files in place of '.pyx' files"""
+
+ def __init__(self, name, sources, *args, **kw):
+ # The *args is needed for compatibility as calls may use positional
+ # arguments. py_limited_api may be set only via keyword.
+ self.py_limited_api = kw.pop("py_limited_api", False)
+ _Extension.__init__(self, name, sources, *args, **kw)
+
+ def _convert_pyx_sources_to_lang(self):
+ """
+ Replace sources with .pyx extensions to sources with the target
+ language extension. This mechanism allows language authors to supply
+ pre-converted sources but to prefer the .pyx sources.
+ """
+ if _have_cython():
+ # the build has Cython, so allow it to compile the .pyx files
+ return
+ lang = self.language or ''
+ target_ext = '.cpp' if lang.lower() == 'c++' else '.c'
+ sub = functools.partial(re.sub, '.pyx$', target_ext)
+ self.sources = list(map(sub, self.sources))
+
+
+class Library(Extension):
+ """Just like a regular Extension, but built as a library instead"""
diff --git a/setuptools/extern/__init__.py b/setuptools/extern/__init__.py
new file mode 100644
index 0000000..da3d668
--- /dev/null
+++ b/setuptools/extern/__init__.py
@@ -0,0 +1,73 @@
+import sys
+
+
+class VendorImporter:
+ """
+ A PEP 302 meta path importer for finding optionally-vendored
+ or otherwise naturally-installed packages from root_name.
+ """
+
+ def __init__(self, root_name, vendored_names=(), vendor_pkg=None):
+ self.root_name = root_name
+ self.vendored_names = set(vendored_names)
+ self.vendor_pkg = vendor_pkg or root_name.replace('extern', '_vendor')
+
+ @property
+ def search_path(self):
+ """
+ Search first the vendor package then as a natural package.
+ """
+ yield self.vendor_pkg + '.'
+ yield ''
+
+ def find_module(self, fullname, path=None):
+ """
+ Return self when fullname starts with root_name and the
+ target module is one vendored through this importer.
+ """
+ root, base, target = fullname.partition(self.root_name + '.')
+ if root:
+ return
+ if not any(map(target.startswith, self.vendored_names)):
+ return
+ return self
+
+ def load_module(self, fullname):
+ """
+ Iterate over the search path to locate and load fullname.
+ """
+ root, base, target = fullname.partition(self.root_name + '.')
+ for prefix in self.search_path:
+ try:
+ extant = prefix + target
+ __import__(extant)
+ mod = sys.modules[extant]
+ sys.modules[fullname] = mod
+ # mysterious hack:
+ # Remove the reference to the extant package/module
+ # on later Python versions to cause relative imports
+ # in the vendor package to resolve the same modules
+ # as those going through this importer.
+ if sys.version_info > (3, 3):
+ del sys.modules[extant]
+ return mod
+ except ImportError:
+ pass
+ else:
+ raise ImportError(
+ "The '{target}' package is required; "
+ "normally this is bundled with this package so if you get "
+ "this warning, consult the packager of your "
+ "distribution.".format(**locals())
+ )
+
+ def install(self):
+ """
+ Install this importer into sys.meta_path if not already present.
+ """
+ if self not in sys.meta_path:
+ sys.meta_path.append(self)
+
+
+names = 'six', 'packaging', 'pyparsing',
+VendorImporter(__name__, names, 'setuptools._vendor').install()
diff --git a/setuptools/glibc.py b/setuptools/glibc.py
new file mode 100644
index 0000000..a134591
--- /dev/null
+++ b/setuptools/glibc.py
@@ -0,0 +1,86 @@
+# This file originally from pip:
+# https://github.com/pypa/pip/blob/8f4f15a5a95d7d5b511ceaee9ed261176c181970/src/pip/_internal/utils/glibc.py
+from __future__ import absolute_import
+
+import ctypes
+import re
+import warnings
+
+
+def glibc_version_string():
+ "Returns glibc version string, or None if not using glibc."
+
+ # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
+ # manpage says, "If filename is NULL, then the returned handle is for the
+ # main program". This way we can let the linker do the work to figure out
+ # which libc our process is actually using.
+ process_namespace = ctypes.CDLL(None)
+ try:
+ gnu_get_libc_version = process_namespace.gnu_get_libc_version
+ except AttributeError:
+ # Symbol doesn't exist -> therefore, we are not linked to
+ # glibc.
+ return None
+
+ # Call gnu_get_libc_version, which returns a string like "2.5"
+ gnu_get_libc_version.restype = ctypes.c_char_p
+ version_str = gnu_get_libc_version()
+ # py2 / py3 compatibility:
+ if not isinstance(version_str, str):
+ version_str = version_str.decode("ascii")
+
+ return version_str
+
+
+# Separated out from have_compatible_glibc for easier unit testing
+def check_glibc_version(version_str, required_major, minimum_minor):
+ # Parse string and check against requested version.
+ #
+ # We use a regexp instead of str.split because we want to discard any
+ # random junk that might come after the minor version -- this might happen
+ # in patched/forked versions of glibc (e.g. Linaro's version of glibc
+ # uses version strings like "2.20-2014.11"). See gh-3588.
+ m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)
+ if not m:
+ warnings.warn("Expected glibc version with 2 components major.minor,"
+ " got: %s" % version_str, RuntimeWarning)
+ return False
+ return (int(m.group("major")) == required_major and
+ int(m.group("minor")) >= minimum_minor)
+
+
+def have_compatible_glibc(required_major, minimum_minor):
+ version_str = glibc_version_string()
+ if version_str is None:
+ return False
+ return check_glibc_version(version_str, required_major, minimum_minor)
+
+
+# platform.libc_ver regularly returns completely nonsensical glibc
+# versions. E.g. on my computer, platform says:
+#
+# ~$ python2.7 -c 'import platform; print(platform.libc_ver())'
+# ('glibc', '2.7')
+# ~$ python3.5 -c 'import platform; print(platform.libc_ver())'
+# ('glibc', '2.9')
+#
+# But the truth is:
+#
+# ~$ ldd --version
+# ldd (Debian GLIBC 2.22-11) 2.22
+#
+# This is unfortunate, because it means that the linehaul data on libc
+# versions that was generated by pip 8.1.2 and earlier is useless and
+# misleading. Solution: instead of using platform, use our code that actually
+# works.
+def libc_ver():
+ """Try to determine the glibc version
+
+ Returns a tuple of strings (lib, version) which default to empty strings
+ in case the lookup fails.
+ """
+ glibc_version = glibc_version_string()
+ if glibc_version is None:
+ return ("", "")
+ else:
+ return ("glibc", glibc_version)
diff --git a/setuptools/glob.py b/setuptools/glob.py
new file mode 100644
index 0000000..6c781de
--- /dev/null
+++ b/setuptools/glob.py
@@ -0,0 +1,176 @@
+"""
+Filename globbing utility. Mostly a copy of `glob` from Python 3.5.
+
+Changes include:
+ * `yield from` and PEP3102 `*` removed.
+ * `bytes` changed to `six.binary_type`.
+ * Hidden files are not ignored.
+"""
+
+import os
+import re
+import fnmatch
+from setuptools.extern.six import binary_type
+
+__all__ = ["glob", "iglob", "escape"]
+
+
+def glob(pathname, recursive=False):
+ """Return a list of paths matching a pathname pattern.
+
+ The pattern may contain simple shell-style wildcards a la
+ fnmatch. However, unlike fnmatch, filenames starting with a
+ dot are special cases that are not matched by '*' and '?'
+ patterns.
+
+ If recursive is true, the pattern '**' will match any files and
+ zero or more directories and subdirectories.
+ """
+ return list(iglob(pathname, recursive=recursive))
+
+
+def iglob(pathname, recursive=False):
+ """Return an iterator which yields the paths matching a pathname pattern.
+
+ The pattern may contain simple shell-style wildcards a la
+ fnmatch. However, unlike fnmatch, filenames starting with a
+ dot are special cases that are not matched by '*' and '?'
+ patterns.
+
+ If recursive is true, the pattern '**' will match any files and
+ zero or more directories and subdirectories.
+ """
+ it = _iglob(pathname, recursive)
+ if recursive and _isrecursive(pathname):
+ s = next(it) # skip empty string
+ assert not s
+ return it
+
+
+def _iglob(pathname, recursive):
+ dirname, basename = os.path.split(pathname)
+ if not has_magic(pathname):
+ if basename:
+ if os.path.lexists(pathname):
+ yield pathname
+ else:
+ # Patterns ending with a slash should match only directories
+ if os.path.isdir(dirname):
+ yield pathname
+ return
+ if not dirname:
+ if recursive and _isrecursive(basename):
+ for x in glob2(dirname, basename):
+ yield x
+ else:
+ for x in glob1(dirname, basename):
+ yield x
+ return
+ # `os.path.split()` returns the argument itself as a dirname if it is a
+ # drive or UNC path. Prevent an infinite recursion if a drive or UNC path
+ # contains magic characters (i.e. r'\\?\C:').
+ if dirname != pathname and has_magic(dirname):
+ dirs = _iglob(dirname, recursive)
+ else:
+ dirs = [dirname]
+ if has_magic(basename):
+ if recursive and _isrecursive(basename):
+ glob_in_dir = glob2
+ else:
+ glob_in_dir = glob1
+ else:
+ glob_in_dir = glob0
+ for dirname in dirs:
+ for name in glob_in_dir(dirname, basename):
+ yield os.path.join(dirname, name)
+
+
+# These 2 helper functions non-recursively glob inside a literal directory.
+# They return a list of basenames. `glob1` accepts a pattern while `glob0`
+# takes a literal basename (so it only has to check for its existence).
+
+
+def glob1(dirname, pattern):
+ if not dirname:
+ if isinstance(pattern, binary_type):
+ dirname = os.curdir.encode('ASCII')
+ else:
+ dirname = os.curdir
+ try:
+ names = os.listdir(dirname)
+ except OSError:
+ return []
+ return fnmatch.filter(names, pattern)
+
+
+def glob0(dirname, basename):
+ if not basename:
+ # `os.path.split()` returns an empty basename for paths ending with a
+ # directory separator. 'q*x/' should match only directories.
+ if os.path.isdir(dirname):
+ return [basename]
+ else:
+ if os.path.lexists(os.path.join(dirname, basename)):
+ return [basename]
+ return []
+
+
+# This helper function recursively yields relative pathnames inside a literal
+# directory.
+
+
+def glob2(dirname, pattern):
+ assert _isrecursive(pattern)
+ yield pattern[:0]
+ for x in _rlistdir(dirname):
+ yield x
+
+
+# Recursively yields relative pathnames inside a literal directory.
+def _rlistdir(dirname):
+ if not dirname:
+ if isinstance(dirname, binary_type):
+ dirname = binary_type(os.curdir, 'ASCII')
+ else:
+ dirname = os.curdir
+ try:
+ names = os.listdir(dirname)
+ except os.error:
+ return
+ for x in names:
+ yield x
+ path = os.path.join(dirname, x) if dirname else x
+ for y in _rlistdir(path):
+ yield os.path.join(x, y)
+
+
+magic_check = re.compile('([*?[])')
+magic_check_bytes = re.compile(b'([*?[])')
+
+
+def has_magic(s):
+ if isinstance(s, binary_type):
+ match = magic_check_bytes.search(s)
+ else:
+ match = magic_check.search(s)
+ return match is not None
+
+
+def _isrecursive(pattern):
+ if isinstance(pattern, binary_type):
+ return pattern == b'**'
+ else:
+ return pattern == '**'
+
+
+def escape(pathname):
+ """Escape all special characters.
+ """
+ # Escaping is done by wrapping any of "*?[" between square brackets.
+ # Metacharacters do not work in the drive part and shouldn't be escaped.
+ drive, pathname = os.path.splitdrive(pathname)
+ if isinstance(pathname, binary_type):
+ pathname = magic_check_bytes.sub(br'[\1]', pathname)
+ else:
+ pathname = magic_check.sub(r'[\1]', pathname)
+ return drive + pathname
diff --git a/setuptools/gui-32.exe b/setuptools/gui-32.exe
new file mode 100644
index 0000000..f8d3509
--- /dev/null
+++ b/setuptools/gui-32.exe
Binary files differ
diff --git a/setuptools/gui-64.exe b/setuptools/gui-64.exe
new file mode 100644
index 0000000..330c51a
--- /dev/null
+++ b/setuptools/gui-64.exe
Binary files differ
diff --git a/setuptools/gui.exe b/setuptools/gui.exe
new file mode 100644
index 0000000..f8d3509
--- /dev/null
+++ b/setuptools/gui.exe
Binary files differ
diff --git a/setuptools/launch.py b/setuptools/launch.py
new file mode 100644
index 0000000..308283e
--- /dev/null
+++ b/setuptools/launch.py
@@ -0,0 +1,35 @@
+"""
+Launch the Python script on the command line after
+setuptools is bootstrapped via import.
+"""
+
+# Note that setuptools gets imported implicitly by the
+# invocation of this script using python -m setuptools.launch
+
+import tokenize
+import sys
+
+
+def run():
+ """
+ Run the script in sys.argv[1] as if it had
+ been invoked naturally.
+ """
+ __builtins__
+ script_name = sys.argv[1]
+ namespace = dict(
+ __file__=script_name,
+ __name__='__main__',
+ __doc__=None,
+ )
+ sys.argv[:] = sys.argv[1:]
+
+ open_ = getattr(tokenize, 'open', open)
+ script = open_(script_name).read()
+ norm_script = script.replace('\\r\\n', '\\n')
+ code = compile(norm_script, script_name, 'exec')
+ exec(code, namespace)
+
+
+if __name__ == '__main__':
+ run()
diff --git a/setuptools/lib2to3_ex.py b/setuptools/lib2to3_ex.py
new file mode 100644
index 0000000..4b1a73f
--- /dev/null
+++ b/setuptools/lib2to3_ex.py
@@ -0,0 +1,62 @@
+"""
+Customized Mixin2to3 support:
+
+ - adds support for converting doctests
+
+
+This module raises an ImportError on Python 2.
+"""
+
+from distutils.util import Mixin2to3 as _Mixin2to3
+from distutils import log
+from lib2to3.refactor import RefactoringTool, get_fixers_from_package
+
+import setuptools
+
+
+class DistutilsRefactoringTool(RefactoringTool):
+ def log_error(self, msg, *args, **kw):
+ log.error(msg, *args)
+
+ def log_message(self, msg, *args):
+ log.info(msg, *args)
+
+ def log_debug(self, msg, *args):
+ log.debug(msg, *args)
+
+
+class Mixin2to3(_Mixin2to3):
+ def run_2to3(self, files, doctests=False):
+ # See of the distribution option has been set, otherwise check the
+ # setuptools default.
+ if self.distribution.use_2to3 is not True:
+ return
+ if not files:
+ return
+ log.info("Fixing " + " ".join(files))
+ self.__build_fixer_names()
+ self.__exclude_fixers()
+ if doctests:
+ if setuptools.run_2to3_on_doctests:
+ r = DistutilsRefactoringTool(self.fixer_names)
+ r.refactor(files, write=True, doctests_only=True)
+ else:
+ _Mixin2to3.run_2to3(self, files)
+
+ def __build_fixer_names(self):
+ if self.fixer_names:
+ return
+ self.fixer_names = []
+ for p in setuptools.lib2to3_fixer_packages:
+ self.fixer_names.extend(get_fixers_from_package(p))
+ if self.distribution.use_2to3_fixers is not None:
+ for p in self.distribution.use_2to3_fixers:
+ self.fixer_names.extend(get_fixers_from_package(p))
+
+ def __exclude_fixers(self):
+ excluded_fixers = getattr(self, 'exclude_fixers', [])
+ if self.distribution.use_2to3_exclude_fixers is not None:
+ excluded_fixers.extend(self.distribution.use_2to3_exclude_fixers)
+ for fixer_name in excluded_fixers:
+ if fixer_name in self.fixer_names:
+ self.fixer_names.remove(fixer_name)
diff --git a/setuptools/monkey.py b/setuptools/monkey.py
new file mode 100644
index 0000000..08ed50d
--- /dev/null
+++ b/setuptools/monkey.py
@@ -0,0 +1,181 @@
+"""
+Monkey patching of distutils.
+"""
+
+import sys
+import distutils.filelist
+import platform
+import types
+import functools
+from importlib import import_module
+import inspect
+
+from setuptools.extern import six
+
+import setuptools
+
+__all__ = []
+"""
+Everything is private. Contact the project team
+if you think you need this functionality.
+"""
+
+
+def _get_mro(cls):
+ """
+ Returns the bases classes for cls sorted by the MRO.
+
+ Works around an issue on Jython where inspect.getmro will not return all
+ base classes if multiple classes share the same name. Instead, this
+ function will return a tuple containing the class itself, and the contents
+ of cls.__bases__. See https://github.com/pypa/setuptools/issues/1024.
+ """
+ if platform.python_implementation() == "Jython":
+ return (cls,) + cls.__bases__
+ return inspect.getmro(cls)
+
+
+def get_unpatched(item):
+ lookup = (
+ get_unpatched_class if isinstance(item, six.class_types) else
+ get_unpatched_function if isinstance(item, types.FunctionType) else
+ lambda item: None
+ )
+ return lookup(item)
+
+
+def get_unpatched_class(cls):
+ """Protect against re-patching the distutils if reloaded
+
+ Also ensures that no other distutils extension monkeypatched the distutils
+ first.
+ """
+ external_bases = (
+ cls
+ for cls in _get_mro(cls)
+ if not cls.__module__.startswith('setuptools')
+ )
+ base = next(external_bases)
+ if not base.__module__.startswith('distutils'):
+ msg = "distutils has already been patched by %r" % cls
+ raise AssertionError(msg)
+ return base
+
+
+def patch_all():
+ # we can't patch distutils.cmd, alas
+ distutils.core.Command = setuptools.Command
+
+ has_issue_12885 = sys.version_info <= (3, 5, 3)
+
+ if has_issue_12885:
+ # fix findall bug in distutils (http://bugs.python.org/issue12885)
+ distutils.filelist.findall = setuptools.findall
+
+ needs_warehouse = (
+ sys.version_info < (2, 7, 13)
+ or
+ (3, 0) < sys.version_info < (3, 3, 7)
+ or
+ (3, 4) < sys.version_info < (3, 4, 6)
+ or
+ (3, 5) < sys.version_info <= (3, 5, 3)
+ )
+
+ if needs_warehouse:
+ warehouse = 'https://upload.pypi.org/legacy/'
+ distutils.config.PyPIRCCommand.DEFAULT_REPOSITORY = warehouse
+
+ _patch_distribution_metadata_write_pkg_file()
+
+ # Install Distribution throughout the distutils
+ for module in distutils.dist, distutils.core, distutils.cmd:
+ module.Distribution = setuptools.dist.Distribution
+
+ # Install the patched Extension
+ distutils.core.Extension = setuptools.extension.Extension
+ distutils.extension.Extension = setuptools.extension.Extension
+ if 'distutils.command.build_ext' in sys.modules:
+ sys.modules['distutils.command.build_ext'].Extension = (
+ setuptools.extension.Extension
+ )
+
+ patch_for_msvc_specialized_compiler()
+
+
+def _patch_distribution_metadata_write_pkg_file():
+ """Patch write_pkg_file to also write Requires-Python/Requires-External"""
+ distutils.dist.DistributionMetadata.write_pkg_file = (
+ setuptools.dist.write_pkg_file
+ )
+
+
+def patch_func(replacement, target_mod, func_name):
+ """
+ Patch func_name in target_mod with replacement
+
+ Important - original must be resolved by name to avoid
+ patching an already patched function.
+ """
+ original = getattr(target_mod, func_name)
+
+ # set the 'unpatched' attribute on the replacement to
+ # point to the original.
+ vars(replacement).setdefault('unpatched', original)
+
+ # replace the function in the original module
+ setattr(target_mod, func_name, replacement)
+
+
+def get_unpatched_function(candidate):
+ return getattr(candidate, 'unpatched')
+
+
+def patch_for_msvc_specialized_compiler():
+ """
+ Patch functions in distutils to use standalone Microsoft Visual C++
+ compilers.
+ """
+ # import late to avoid circular imports on Python < 3.5
+ msvc = import_module('setuptools.msvc')
+
+ if platform.system() != 'Windows':
+ # Compilers only availables on Microsoft Windows
+ return
+
+ def patch_params(mod_name, func_name):
+ """
+ Prepare the parameters for patch_func to patch indicated function.
+ """
+ repl_prefix = 'msvc9_' if 'msvc9' in mod_name else 'msvc14_'
+ repl_name = repl_prefix + func_name.lstrip('_')
+ repl = getattr(msvc, repl_name)
+ mod = import_module(mod_name)
+ if not hasattr(mod, func_name):
+ raise ImportError(func_name)
+ return repl, mod, func_name
+
+ # Python 2.7 to 3.4
+ msvc9 = functools.partial(patch_params, 'distutils.msvc9compiler')
+
+ # Python 3.5+
+ msvc14 = functools.partial(patch_params, 'distutils._msvccompiler')
+
+ try:
+ # Patch distutils.msvc9compiler
+ patch_func(*msvc9('find_vcvarsall'))
+ patch_func(*msvc9('query_vcvarsall'))
+ except ImportError:
+ pass
+
+ try:
+ # Patch distutils._msvccompiler._get_vc_env
+ patch_func(*msvc14('_get_vc_env'))
+ except ImportError:
+ pass
+
+ try:
+ # Patch distutils._msvccompiler.gen_lib_options for Numpy
+ patch_func(*msvc14('gen_lib_options'))
+ except ImportError:
+ pass
diff --git a/setuptools/msvc.py b/setuptools/msvc.py
new file mode 100644
index 0000000..5e20b3f
--- /dev/null
+++ b/setuptools/msvc.py
@@ -0,0 +1,1302 @@
+"""
+Improved support for Microsoft Visual C++ compilers.
+
+Known supported compilers:
+--------------------------
+Microsoft Visual C++ 9.0:
+ Microsoft Visual C++ Compiler for Python 2.7 (x86, amd64)
+ Microsoft Windows SDK 6.1 (x86, x64, ia64)
+ Microsoft Windows SDK 7.0 (x86, x64, ia64)
+
+Microsoft Visual C++ 10.0:
+ Microsoft Windows SDK 7.1 (x86, x64, ia64)
+
+Microsoft Visual C++ 14.0:
+ Microsoft Visual C++ Build Tools 2015 (x86, x64, arm)
+ Microsoft Visual Studio 2017 (x86, x64, arm, arm64)
+ Microsoft Visual Studio Build Tools 2017 (x86, x64, arm, arm64)
+"""
+
+import os
+import sys
+import platform
+import itertools
+import distutils.errors
+from setuptools.extern.packaging.version import LegacyVersion
+
+from setuptools.extern.six.moves import filterfalse
+
+from .monkey import get_unpatched
+
+if platform.system() == 'Windows':
+ from setuptools.extern.six.moves import winreg
+ safe_env = os.environ
+else:
+ """
+ Mock winreg and environ so the module can be imported
+ on this platform.
+ """
+
+ class winreg:
+ HKEY_USERS = None
+ HKEY_CURRENT_USER = None
+ HKEY_LOCAL_MACHINE = None
+ HKEY_CLASSES_ROOT = None
+
+ safe_env = dict()
+
+_msvc9_suppress_errors = (
+ # msvc9compiler isn't available on some platforms
+ ImportError,
+
+ # msvc9compiler raises DistutilsPlatformError in some
+ # environments. See #1118.
+ distutils.errors.DistutilsPlatformError,
+)
+
+try:
+ from distutils.msvc9compiler import Reg
+except _msvc9_suppress_errors:
+ pass
+
+
+def msvc9_find_vcvarsall(version):
+ """
+ Patched "distutils.msvc9compiler.find_vcvarsall" to use the standalone
+ compiler build for Python (VCForPython). Fall back to original behavior
+ when the standalone compiler is not available.
+
+ Redirect the path of "vcvarsall.bat".
+
+ Known supported compilers
+ -------------------------
+ Microsoft Visual C++ 9.0:
+ Microsoft Visual C++ Compiler for Python 2.7 (x86, amd64)
+
+ Parameters
+ ----------
+ version: float
+ Required Microsoft Visual C++ version.
+
+ Return
+ ------
+ vcvarsall.bat path: str
+ """
+ VC_BASE = r'Software\%sMicrosoft\DevDiv\VCForPython\%0.1f'
+ key = VC_BASE % ('', version)
+ try:
+ # Per-user installs register the compiler path here
+ productdir = Reg.get_value(key, "installdir")
+ except KeyError:
+ try:
+ # All-user installs on a 64-bit system register here
+ key = VC_BASE % ('Wow6432Node\\', version)
+ productdir = Reg.get_value(key, "installdir")
+ except KeyError:
+ productdir = None
+
+ if productdir:
+ vcvarsall = os.path.os.path.join(productdir, "vcvarsall.bat")
+ if os.path.isfile(vcvarsall):
+ return vcvarsall
+
+ return get_unpatched(msvc9_find_vcvarsall)(version)
+
+
+def msvc9_query_vcvarsall(ver, arch='x86', *args, **kwargs):
+ """
+ Patched "distutils.msvc9compiler.query_vcvarsall" for support extra
+ compilers.
+
+ Set environment without use of "vcvarsall.bat".
+
+ Known supported compilers
+ -------------------------
+ Microsoft Visual C++ 9.0:
+ Microsoft Visual C++ Compiler for Python 2.7 (x86, amd64)
+ Microsoft Windows SDK 6.1 (x86, x64, ia64)
+ Microsoft Windows SDK 7.0 (x86, x64, ia64)
+
+ Microsoft Visual C++ 10.0:
+ Microsoft Windows SDK 7.1 (x86, x64, ia64)
+
+ Parameters
+ ----------
+ ver: float
+ Required Microsoft Visual C++ version.
+ arch: str
+ Target architecture.
+
+ Return
+ ------
+ environment: dict
+ """
+ # Try to get environement from vcvarsall.bat (Classical way)
+ try:
+ orig = get_unpatched(msvc9_query_vcvarsall)
+ return orig(ver, arch, *args, **kwargs)
+ except distutils.errors.DistutilsPlatformError:
+ # Pass error if Vcvarsall.bat is missing
+ pass
+ except ValueError:
+ # Pass error if environment not set after executing vcvarsall.bat
+ pass
+
+ # If error, try to set environment directly
+ try:
+ return EnvironmentInfo(arch, ver).return_env()
+ except distutils.errors.DistutilsPlatformError as exc:
+ _augment_exception(exc, ver, arch)
+ raise
+
+
+def msvc14_get_vc_env(plat_spec):
+ """
+ Patched "distutils._msvccompiler._get_vc_env" for support extra
+ compilers.
+
+ Set environment without use of "vcvarsall.bat".
+
+ Known supported compilers
+ -------------------------
+ Microsoft Visual C++ 14.0:
+ Microsoft Visual C++ Build Tools 2015 (x86, x64, arm)
+ Microsoft Visual Studio 2017 (x86, x64, arm, arm64)
+ Microsoft Visual Studio Build Tools 2017 (x86, x64, arm, arm64)
+
+ Parameters
+ ----------
+ plat_spec: str
+ Target architecture.
+
+ Return
+ ------
+ environment: dict
+ """
+ # Try to get environment from vcvarsall.bat (Classical way)
+ try:
+ return get_unpatched(msvc14_get_vc_env)(plat_spec)
+ except distutils.errors.DistutilsPlatformError:
+ # Pass error Vcvarsall.bat is missing
+ pass
+
+ # If error, try to set environment directly
+ try:
+ return EnvironmentInfo(plat_spec, vc_min_ver=14.0).return_env()
+ except distutils.errors.DistutilsPlatformError as exc:
+ _augment_exception(exc, 14.0)
+ raise
+
+
+def msvc14_gen_lib_options(*args, **kwargs):
+ """
+ Patched "distutils._msvccompiler.gen_lib_options" for fix
+ compatibility between "numpy.distutils" and "distutils._msvccompiler"
+ (for Numpy < 1.11.2)
+ """
+ if "numpy.distutils" in sys.modules:
+ import numpy as np
+ if LegacyVersion(np.__version__) < LegacyVersion('1.11.2'):
+ return np.distutils.ccompiler.gen_lib_options(*args, **kwargs)
+ return get_unpatched(msvc14_gen_lib_options)(*args, **kwargs)
+
+
+def _augment_exception(exc, version, arch=''):
+ """
+ Add details to the exception message to help guide the user
+ as to what action will resolve it.
+ """
+ # Error if MSVC++ directory not found or environment not set
+ message = exc.args[0]
+
+ if "vcvarsall" in message.lower() or "visual c" in message.lower():
+ # Special error message if MSVC++ not installed
+ tmpl = 'Microsoft Visual C++ {version:0.1f} is required.'
+ message = tmpl.format(**locals())
+ msdownload = 'www.microsoft.com/download/details.aspx?id=%d'
+ if version == 9.0:
+ if arch.lower().find('ia64') > -1:
+ # For VC++ 9.0, if IA64 support is needed, redirect user
+ # to Windows SDK 7.0
+ message += ' Get it with "Microsoft Windows SDK 7.0": '
+ message += msdownload % 3138
+ else:
+ # For VC++ 9.0 redirect user to Vc++ for Python 2.7 :
+ # This redirection link is maintained by Microsoft.
+ # Contact vspython@microsoft.com if it needs updating.
+ message += ' Get it from http://aka.ms/vcpython27'
+ elif version == 10.0:
+ # For VC++ 10.0 Redirect user to Windows SDK 7.1
+ message += ' Get it with "Microsoft Windows SDK 7.1": '
+ message += msdownload % 8279
+ elif version >= 14.0:
+ # For VC++ 14.0 Redirect user to Visual C++ Build Tools
+ message += (' Get it with "Microsoft Visual C++ Build Tools": '
+ r'http://landinghub.visualstudio.com/'
+ 'visual-cpp-build-tools')
+
+ exc.args = (message, )
+
+
+class PlatformInfo:
+ """
+ Current and Target Architectures informations.
+
+ Parameters
+ ----------
+ arch: str
+ Target architecture.
+ """
+ current_cpu = safe_env.get('processor_architecture', '').lower()
+
+ def __init__(self, arch):
+ self.arch = arch.lower().replace('x64', 'amd64')
+
+ @property
+ def target_cpu(self):
+ return self.arch[self.arch.find('_') + 1:]
+
+ def target_is_x86(self):
+ return self.target_cpu == 'x86'
+
+ def current_is_x86(self):
+ return self.current_cpu == 'x86'
+
+ def current_dir(self, hidex86=False, x64=False):
+ """
+ Current platform specific subfolder.
+
+ Parameters
+ ----------
+ hidex86: bool
+ return '' and not '\x86' if architecture is x86.
+ x64: bool
+ return '\x64' and not '\amd64' if architecture is amd64.
+
+ Return
+ ------
+ subfolder: str
+ '\target', or '' (see hidex86 parameter)
+ """
+ return (
+ '' if (self.current_cpu == 'x86' and hidex86) else
+ r'\x64' if (self.current_cpu == 'amd64' and x64) else
+ r'\%s' % self.current_cpu
+ )
+
+ def target_dir(self, hidex86=False, x64=False):
+ r"""
+ Target platform specific subfolder.
+
+ Parameters
+ ----------
+ hidex86: bool
+ return '' and not '\x86' if architecture is x86.
+ x64: bool
+ return '\x64' and not '\amd64' if architecture is amd64.
+
+ Return
+ ------
+ subfolder: str
+ '\current', or '' (see hidex86 parameter)
+ """
+ return (
+ '' if (self.target_cpu == 'x86' and hidex86) else
+ r'\x64' if (self.target_cpu == 'amd64' and x64) else
+ r'\%s' % self.target_cpu
+ )
+
+ def cross_dir(self, forcex86=False):
+ r"""
+ Cross platform specific subfolder.
+
+ Parameters
+ ----------
+ forcex86: bool
+ Use 'x86' as current architecture even if current acritecture is
+ not x86.
+
+ Return
+ ------
+ subfolder: str
+ '' if target architecture is current architecture,
+ '\current_target' if not.
+ """
+ current = 'x86' if forcex86 else self.current_cpu
+ return (
+ '' if self.target_cpu == current else
+ self.target_dir().replace('\\', '\\%s_' % current)
+ )
+
+
+class RegistryInfo:
+ """
+ Microsoft Visual Studio related registry informations.
+
+ Parameters
+ ----------
+ platform_info: PlatformInfo
+ "PlatformInfo" instance.
+ """
+ HKEYS = (winreg.HKEY_USERS,
+ winreg.HKEY_CURRENT_USER,
+ winreg.HKEY_LOCAL_MACHINE,
+ winreg.HKEY_CLASSES_ROOT)
+
+ def __init__(self, platform_info):
+ self.pi = platform_info
+
+ @property
+ def visualstudio(self):
+ """
+ Microsoft Visual Studio root registry key.
+ """
+ return 'VisualStudio'
+
+ @property
+ def sxs(self):
+ """
+ Microsoft Visual Studio SxS registry key.
+ """
+ return os.path.join(self.visualstudio, 'SxS')
+
+ @property
+ def vc(self):
+ """
+ Microsoft Visual C++ VC7 registry key.
+ """
+ return os.path.join(self.sxs, 'VC7')
+
+ @property
+ def vs(self):
+ """
+ Microsoft Visual Studio VS7 registry key.
+ """
+ return os.path.join(self.sxs, 'VS7')
+
+ @property
+ def vc_for_python(self):
+ """
+ Microsoft Visual C++ for Python registry key.
+ """
+ return r'DevDiv\VCForPython'
+
+ @property
+ def microsoft_sdk(self):
+ """
+ Microsoft SDK registry key.
+ """
+ return 'Microsoft SDKs'
+
+ @property
+ def windows_sdk(self):
+ """
+ Microsoft Windows/Platform SDK registry key.
+ """
+ return os.path.join(self.microsoft_sdk, 'Windows')
+
+ @property
+ def netfx_sdk(self):
+ """
+ Microsoft .NET Framework SDK registry key.
+ """
+ return os.path.join(self.microsoft_sdk, 'NETFXSDK')
+
+ @property
+ def windows_kits_roots(self):
+ """
+ Microsoft Windows Kits Roots registry key.
+ """
+ return r'Windows Kits\Installed Roots'
+
+ def microsoft(self, key, x86=False):
+ """
+ Return key in Microsoft software registry.
+
+ Parameters
+ ----------
+ key: str
+ Registry key path where look.
+ x86: str
+ Force x86 software registry.
+
+ Return
+ ------
+ str: value
+ """
+ node64 = '' if self.pi.current_is_x86() or x86 else 'Wow6432Node'
+ return os.path.join('Software', node64, 'Microsoft', key)
+
+ def lookup(self, key, name):
+ """
+ Look for values in registry in Microsoft software registry.
+
+ Parameters
+ ----------
+ key: str
+ Registry key path where look.
+ name: str
+ Value name to find.
+
+ Return
+ ------
+ str: value
+ """
+ KEY_READ = winreg.KEY_READ
+ openkey = winreg.OpenKey
+ ms = self.microsoft
+ for hkey in self.HKEYS:
+ try:
+ bkey = openkey(hkey, ms(key), 0, KEY_READ)
+ except (OSError, IOError):
+ if not self.pi.current_is_x86():
+ try:
+ bkey = openkey(hkey, ms(key, True), 0, KEY_READ)
+ except (OSError, IOError):
+ continue
+ else:
+ continue
+ try:
+ return winreg.QueryValueEx(bkey, name)[0]
+ except (OSError, IOError):
+ pass
+
+
+class SystemInfo:
+ """
+ Microsoft Windows and Visual Studio related system inormations.
+
+ Parameters
+ ----------
+ registry_info: RegistryInfo
+ "RegistryInfo" instance.
+ vc_ver: float
+ Required Microsoft Visual C++ version.
+ """
+
+ # Variables and properties in this class use originals CamelCase variables
+ # names from Microsoft source files for more easy comparaison.
+ WinDir = safe_env.get('WinDir', '')
+ ProgramFiles = safe_env.get('ProgramFiles', '')
+ ProgramFilesx86 = safe_env.get('ProgramFiles(x86)', ProgramFiles)
+
+ def __init__(self, registry_info, vc_ver=None):
+ self.ri = registry_info
+ self.pi = self.ri.pi
+ self.vc_ver = vc_ver or self._find_latest_available_vc_ver()
+
+ def _find_latest_available_vc_ver(self):
+ try:
+ return self.find_available_vc_vers()[-1]
+ except IndexError:
+ err = 'No Microsoft Visual C++ version found'
+ raise distutils.errors.DistutilsPlatformError(err)
+
+ def find_available_vc_vers(self):
+ """
+ Find all available Microsoft Visual C++ versions.
+ """
+ ms = self.ri.microsoft
+ vckeys = (self.ri.vc, self.ri.vc_for_python, self.ri.vs)
+ vc_vers = []
+ for hkey in self.ri.HKEYS:
+ for key in vckeys:
+ try:
+ bkey = winreg.OpenKey(hkey, ms(key), 0, winreg.KEY_READ)
+ except (OSError, IOError):
+ continue
+ subkeys, values, _ = winreg.QueryInfoKey(bkey)
+ for i in range(values):
+ try:
+ ver = float(winreg.EnumValue(bkey, i)[0])
+ if ver not in vc_vers:
+ vc_vers.append(ver)
+ except ValueError:
+ pass
+ for i in range(subkeys):
+ try:
+ ver = float(winreg.EnumKey(bkey, i))
+ if ver not in vc_vers:
+ vc_vers.append(ver)
+ except ValueError:
+ pass
+ return sorted(vc_vers)
+
+ @property
+ def VSInstallDir(self):
+ """
+ Microsoft Visual Studio directory.
+ """
+ # Default path
+ name = 'Microsoft Visual Studio %0.1f' % self.vc_ver
+ default = os.path.join(self.ProgramFilesx86, name)
+
+ # Try to get path from registry, if fail use default path
+ return self.ri.lookup(self.ri.vs, '%0.1f' % self.vc_ver) or default
+
+ @property
+ def VCInstallDir(self):
+ """
+ Microsoft Visual C++ directory.
+ """
+ self.VSInstallDir
+
+ guess_vc = self._guess_vc() or self._guess_vc_legacy()
+
+ # Try to get "VC++ for Python" path from registry as default path
+ reg_path = os.path.join(self.ri.vc_for_python, '%0.1f' % self.vc_ver)
+ python_vc = self.ri.lookup(reg_path, 'installdir')
+ default_vc = os.path.join(python_vc, 'VC') if python_vc else guess_vc
+
+ # Try to get path from registry, if fail use default path
+ path = self.ri.lookup(self.ri.vc, '%0.1f' % self.vc_ver) or default_vc
+
+ if not os.path.isdir(path):
+ msg = 'Microsoft Visual C++ directory not found'
+ raise distutils.errors.DistutilsPlatformError(msg)
+
+ return path
+
+ def _guess_vc(self):
+ """
+ Locate Visual C for 2017
+ """
+ if self.vc_ver <= 14.0:
+ return
+
+ default = r'VC\Tools\MSVC'
+ guess_vc = os.path.join(self.VSInstallDir, default)
+ # Subdir with VC exact version as name
+ try:
+ vc_exact_ver = os.listdir(guess_vc)[-1]
+ return os.path.join(guess_vc, vc_exact_ver)
+ except (OSError, IOError, IndexError):
+ pass
+
+ def _guess_vc_legacy(self):
+ """
+ Locate Visual C for versions prior to 2017
+ """
+ default = r'Microsoft Visual Studio %0.1f\VC' % self.vc_ver
+ return os.path.join(self.ProgramFilesx86, default)
+
+ @property
+ def WindowsSdkVersion(self):
+ """
+ Microsoft Windows SDK versions for specified MSVC++ version.
+ """
+ if self.vc_ver <= 9.0:
+ return ('7.0', '6.1', '6.0a')
+ elif self.vc_ver == 10.0:
+ return ('7.1', '7.0a')
+ elif self.vc_ver == 11.0:
+ return ('8.0', '8.0a')
+ elif self.vc_ver == 12.0:
+ return ('8.1', '8.1a')
+ elif self.vc_ver >= 14.0:
+ return ('10.0', '8.1')
+
+ @property
+ def WindowsSdkLastVersion(self):
+ """
+ Microsoft Windows SDK last version
+ """
+ return self._use_last_dir_name(os.path.join(
+ self.WindowsSdkDir, 'lib'))
+
+ @property
+ def WindowsSdkDir(self):
+ """
+ Microsoft Windows SDK directory.
+ """
+ sdkdir = ''
+ for ver in self.WindowsSdkVersion:
+ # Try to get it from registry
+ loc = os.path.join(self.ri.windows_sdk, 'v%s' % ver)
+ sdkdir = self.ri.lookup(loc, 'installationfolder')
+ if sdkdir:
+ break
+ if not sdkdir or not os.path.isdir(sdkdir):
+ # Try to get "VC++ for Python" version from registry
+ path = os.path.join(self.ri.vc_for_python, '%0.1f' % self.vc_ver)
+ install_base = self.ri.lookup(path, 'installdir')
+ if install_base:
+ sdkdir = os.path.join(install_base, 'WinSDK')
+ if not sdkdir or not os.path.isdir(sdkdir):
+ # If fail, use default new path
+ for ver in self.WindowsSdkVersion:
+ intver = ver[:ver.rfind('.')]
+ path = r'Microsoft SDKs\Windows Kits\%s' % (intver)
+ d = os.path.join(self.ProgramFiles, path)
+ if os.path.isdir(d):
+ sdkdir = d
+ if not sdkdir or not os.path.isdir(sdkdir):
+ # If fail, use default old path
+ for ver in self.WindowsSdkVersion:
+ path = r'Microsoft SDKs\Windows\v%s' % ver
+ d = os.path.join(self.ProgramFiles, path)
+ if os.path.isdir(d):
+ sdkdir = d
+ if not sdkdir:
+ # If fail, use Platform SDK
+ sdkdir = os.path.join(self.VCInstallDir, 'PlatformSDK')
+ return sdkdir
+
+ @property
+ def WindowsSDKExecutablePath(self):
+ """
+ Microsoft Windows SDK executable directory.
+ """
+ # Find WinSDK NetFx Tools registry dir name
+ if self.vc_ver <= 11.0:
+ netfxver = 35
+ arch = ''
+ else:
+ netfxver = 40
+ hidex86 = True if self.vc_ver <= 12.0 else False
+ arch = self.pi.current_dir(x64=True, hidex86=hidex86)
+ fx = 'WinSDK-NetFx%dTools%s' % (netfxver, arch.replace('\\', '-'))
+
+ # liste all possibles registry paths
+ regpaths = []
+ if self.vc_ver >= 14.0:
+ for ver in self.NetFxSdkVersion:
+ regpaths += [os.path.join(self.ri.netfx_sdk, ver, fx)]
+
+ for ver in self.WindowsSdkVersion:
+ regpaths += [os.path.join(self.ri.windows_sdk, 'v%sA' % ver, fx)]
+
+ # Return installation folder from the more recent path
+ for path in regpaths:
+ execpath = self.ri.lookup(path, 'installationfolder')
+ if execpath:
+ break
+ return execpath
+
+ @property
+ def FSharpInstallDir(self):
+ """
+ Microsoft Visual F# directory.
+ """
+ path = r'%0.1f\Setup\F#' % self.vc_ver
+ path = os.path.join(self.ri.visualstudio, path)
+ return self.ri.lookup(path, 'productdir') or ''
+
+ @property
+ def UniversalCRTSdkDir(self):
+ """
+ Microsoft Universal CRT SDK directory.
+ """
+ # Set Kit Roots versions for specified MSVC++ version
+ if self.vc_ver >= 14.0:
+ vers = ('10', '81')
+ else:
+ vers = ()
+
+ # Find path of the more recent Kit
+ for ver in vers:
+ sdkdir = self.ri.lookup(self.ri.windows_kits_roots,
+ 'kitsroot%s' % ver)
+ if sdkdir:
+ break
+ return sdkdir or ''
+
+ @property
+ def UniversalCRTSdkLastVersion(self):
+ """
+ Microsoft Universal C Runtime SDK last version
+ """
+ return self._use_last_dir_name(os.path.join(
+ self.UniversalCRTSdkDir, 'lib'))
+
+ @property
+ def NetFxSdkVersion(self):
+ """
+ Microsoft .NET Framework SDK versions.
+ """
+ # Set FxSdk versions for specified MSVC++ version
+ if self.vc_ver >= 14.0:
+ return ('4.6.1', '4.6')
+ else:
+ return ()
+
+ @property
+ def NetFxSdkDir(self):
+ """
+ Microsoft .NET Framework SDK directory.
+ """
+ for ver in self.NetFxSdkVersion:
+ loc = os.path.join(self.ri.netfx_sdk, ver)
+ sdkdir = self.ri.lookup(loc, 'kitsinstallationfolder')
+ if sdkdir:
+ break
+ return sdkdir or ''
+
+ @property
+ def FrameworkDir32(self):
+ """
+ Microsoft .NET Framework 32bit directory.
+ """
+ # Default path
+ guess_fw = os.path.join(self.WinDir, r'Microsoft.NET\Framework')
+
+ # Try to get path from registry, if fail use default path
+ return self.ri.lookup(self.ri.vc, 'frameworkdir32') or guess_fw
+
+ @property
+ def FrameworkDir64(self):
+ """
+ Microsoft .NET Framework 64bit directory.
+ """
+ # Default path
+ guess_fw = os.path.join(self.WinDir, r'Microsoft.NET\Framework64')
+
+ # Try to get path from registry, if fail use default path
+ return self.ri.lookup(self.ri.vc, 'frameworkdir64') or guess_fw
+
+ @property
+ def FrameworkVersion32(self):
+ """
+ Microsoft .NET Framework 32bit versions.
+ """
+ return self._find_dot_net_versions(32)
+
+ @property
+ def FrameworkVersion64(self):
+ """
+ Microsoft .NET Framework 64bit versions.
+ """
+ return self._find_dot_net_versions(64)
+
+ def _find_dot_net_versions(self, bits):
+ """
+ Find Microsoft .NET Framework versions.
+
+ Parameters
+ ----------
+ bits: int
+ Platform number of bits: 32 or 64.
+ """
+ # Find actual .NET version in registry
+ reg_ver = self.ri.lookup(self.ri.vc, 'frameworkver%d' % bits)
+ dot_net_dir = getattr(self, 'FrameworkDir%d' % bits)
+ ver = reg_ver or self._use_last_dir_name(dot_net_dir, 'v') or ''
+
+ # Set .NET versions for specified MSVC++ version
+ if self.vc_ver >= 12.0:
+ frameworkver = (ver, 'v4.0')
+ elif self.vc_ver >= 10.0:
+ frameworkver = ('v4.0.30319' if ver.lower()[:2] != 'v4' else ver,
+ 'v3.5')
+ elif self.vc_ver == 9.0:
+ frameworkver = ('v3.5', 'v2.0.50727')
+ if self.vc_ver == 8.0:
+ frameworkver = ('v3.0', 'v2.0.50727')
+ return frameworkver
+
+ def _use_last_dir_name(self, path, prefix=''):
+ """
+ Return name of the last dir in path or '' if no dir found.
+
+ Parameters
+ ----------
+ path: str
+ Use dirs in this path
+ prefix: str
+ Use only dirs startings by this prefix
+ """
+ matching_dirs = (
+ dir_name
+ for dir_name in reversed(os.listdir(path))
+ if os.path.isdir(os.path.join(path, dir_name)) and
+ dir_name.startswith(prefix)
+ )
+ return next(matching_dirs, None) or ''
+
+
+class EnvironmentInfo:
+ """
+ Return environment variables for specified Microsoft Visual C++ version
+ and platform : Lib, Include, Path and libpath.
+
+ This function is compatible with Microsoft Visual C++ 9.0 to 14.0.
+
+ Script created by analysing Microsoft environment configuration files like
+ "vcvars[...].bat", "SetEnv.Cmd", "vcbuildtools.bat", ...
+
+ Parameters
+ ----------
+ arch: str
+ Target architecture.
+ vc_ver: float
+ Required Microsoft Visual C++ version. If not set, autodetect the last
+ version.
+ vc_min_ver: float
+ Minimum Microsoft Visual C++ version.
+ """
+
+ # Variables and properties in this class use originals CamelCase variables
+ # names from Microsoft source files for more easy comparaison.
+
+ def __init__(self, arch, vc_ver=None, vc_min_ver=0):
+ self.pi = PlatformInfo(arch)
+ self.ri = RegistryInfo(self.pi)
+ self.si = SystemInfo(self.ri, vc_ver)
+
+ if self.vc_ver < vc_min_ver:
+ err = 'No suitable Microsoft Visual C++ version found'
+ raise distutils.errors.DistutilsPlatformError(err)
+
+ @property
+ def vc_ver(self):
+ """
+ Microsoft Visual C++ version.
+ """
+ return self.si.vc_ver
+
+ @property
+ def VSTools(self):
+ """
+ Microsoft Visual Studio Tools
+ """
+ paths = [r'Common7\IDE', r'Common7\Tools']
+
+ if self.vc_ver >= 14.0:
+ arch_subdir = self.pi.current_dir(hidex86=True, x64=True)
+ paths += [r'Common7\IDE\CommonExtensions\Microsoft\TestWindow']
+ paths += [r'Team Tools\Performance Tools']
+ paths += [r'Team Tools\Performance Tools%s' % arch_subdir]
+
+ return [os.path.join(self.si.VSInstallDir, path) for path in paths]
+
+ @property
+ def VCIncludes(self):
+ """
+ Microsoft Visual C++ & Microsoft Foundation Class Includes
+ """
+ return [os.path.join(self.si.VCInstallDir, 'Include'),
+ os.path.join(self.si.VCInstallDir, r'ATLMFC\Include')]
+
+ @property
+ def VCLibraries(self):
+ """
+ Microsoft Visual C++ & Microsoft Foundation Class Libraries
+ """
+ if self.vc_ver >= 15.0:
+ arch_subdir = self.pi.target_dir(x64=True)
+ else:
+ arch_subdir = self.pi.target_dir(hidex86=True)
+ paths = ['Lib%s' % arch_subdir, r'ATLMFC\Lib%s' % arch_subdir]
+
+ if self.vc_ver >= 14.0:
+ paths += [r'Lib\store%s' % arch_subdir]
+
+ return [os.path.join(self.si.VCInstallDir, path) for path in paths]
+
+ @property
+ def VCStoreRefs(self):
+ """
+ Microsoft Visual C++ store references Libraries
+ """
+ if self.vc_ver < 14.0:
+ return []
+ return [os.path.join(self.si.VCInstallDir, r'Lib\store\references')]
+
+ @property
+ def VCTools(self):
+ """
+ Microsoft Visual C++ Tools
+ """
+ si = self.si
+ tools = [os.path.join(si.VCInstallDir, 'VCPackages')]
+
+ forcex86 = True if self.vc_ver <= 10.0 else False
+ arch_subdir = self.pi.cross_dir(forcex86)
+ if arch_subdir:
+ tools += [os.path.join(si.VCInstallDir, 'Bin%s' % arch_subdir)]
+
+ if self.vc_ver == 14.0:
+ path = 'Bin%s' % self.pi.current_dir(hidex86=True)
+ tools += [os.path.join(si.VCInstallDir, path)]
+
+ elif self.vc_ver >= 15.0:
+ host_dir = (r'bin\HostX86%s' if self.pi.current_is_x86() else
+ r'bin\HostX64%s')
+ tools += [os.path.join(
+ si.VCInstallDir, host_dir % self.pi.target_dir(x64=True))]
+
+ if self.pi.current_cpu != self.pi.target_cpu:
+ tools += [os.path.join(
+ si.VCInstallDir, host_dir % self.pi.current_dir(x64=True))]
+
+ else:
+ tools += [os.path.join(si.VCInstallDir, 'Bin')]
+
+ return tools
+
+ @property
+ def OSLibraries(self):
+ """
+ Microsoft Windows SDK Libraries
+ """
+ if self.vc_ver <= 10.0:
+ arch_subdir = self.pi.target_dir(hidex86=True, x64=True)
+ return [os.path.join(self.si.WindowsSdkDir, 'Lib%s' % arch_subdir)]
+
+ else:
+ arch_subdir = self.pi.target_dir(x64=True)
+ lib = os.path.join(self.si.WindowsSdkDir, 'lib')
+ libver = self._sdk_subdir
+ return [os.path.join(lib, '%sum%s' % (libver , arch_subdir))]
+
+ @property
+ def OSIncludes(self):
+ """
+ Microsoft Windows SDK Include
+ """
+ include = os.path.join(self.si.WindowsSdkDir, 'include')
+
+ if self.vc_ver <= 10.0:
+ return [include, os.path.join(include, 'gl')]
+
+ else:
+ if self.vc_ver >= 14.0:
+ sdkver = self._sdk_subdir
+ else:
+ sdkver = ''
+ return [os.path.join(include, '%sshared' % sdkver),
+ os.path.join(include, '%sum' % sdkver),
+ os.path.join(include, '%swinrt' % sdkver)]
+
+ @property
+ def OSLibpath(self):
+ """
+ Microsoft Windows SDK Libraries Paths
+ """
+ ref = os.path.join(self.si.WindowsSdkDir, 'References')
+ libpath = []
+
+ if self.vc_ver <= 9.0:
+ libpath += self.OSLibraries
+
+ if self.vc_ver >= 11.0:
+ libpath += [os.path.join(ref, r'CommonConfiguration\Neutral')]
+
+ if self.vc_ver >= 14.0:
+ libpath += [
+ ref,
+ os.path.join(self.si.WindowsSdkDir, 'UnionMetadata'),
+ os.path.join(
+ ref,
+ 'Windows.Foundation.UniversalApiContract',
+ '1.0.0.0',
+ ),
+ os.path.join(
+ ref,
+ 'Windows.Foundation.FoundationContract',
+ '1.0.0.0',
+ ),
+ os.path.join(
+ ref,
+ 'Windows.Networking.Connectivity.WwanContract',
+ '1.0.0.0',
+ ),
+ os.path.join(
+ self.si.WindowsSdkDir,
+ 'ExtensionSDKs',
+ 'Microsoft.VCLibs',
+ '%0.1f' % self.vc_ver,
+ 'References',
+ 'CommonConfiguration',
+ 'neutral',
+ ),
+ ]
+ return libpath
+
+ @property
+ def SdkTools(self):
+ """
+ Microsoft Windows SDK Tools
+ """
+ return list(self._sdk_tools())
+
+ def _sdk_tools(self):
+ """
+ Microsoft Windows SDK Tools paths generator
+ """
+ if self.vc_ver < 15.0:
+ bin_dir = 'Bin' if self.vc_ver <= 11.0 else r'Bin\x86'
+ yield os.path.join(self.si.WindowsSdkDir, bin_dir)
+
+ if not self.pi.current_is_x86():
+ arch_subdir = self.pi.current_dir(x64=True)
+ path = 'Bin%s' % arch_subdir
+ yield os.path.join(self.si.WindowsSdkDir, path)
+
+ if self.vc_ver == 10.0 or self.vc_ver == 11.0:
+ if self.pi.target_is_x86():
+ arch_subdir = ''
+ else:
+ arch_subdir = self.pi.current_dir(hidex86=True, x64=True)
+ path = r'Bin\NETFX 4.0 Tools%s' % arch_subdir
+ yield os.path.join(self.si.WindowsSdkDir, path)
+
+ elif self.vc_ver >= 15.0:
+ path = os.path.join(self.si.WindowsSdkDir, 'Bin')
+ arch_subdir = self.pi.current_dir(x64=True)
+ sdkver = self.si.WindowsSdkLastVersion
+ yield os.path.join(path, '%s%s' % (sdkver, arch_subdir))
+
+ if self.si.WindowsSDKExecutablePath:
+ yield self.si.WindowsSDKExecutablePath
+
+ @property
+ def _sdk_subdir(self):
+ """
+ Microsoft Windows SDK version subdir
+ """
+ ucrtver = self.si.WindowsSdkLastVersion
+ return ('%s\\' % ucrtver) if ucrtver else ''
+
+ @property
+ def SdkSetup(self):
+ """
+ Microsoft Windows SDK Setup
+ """
+ if self.vc_ver > 9.0:
+ return []
+
+ return [os.path.join(self.si.WindowsSdkDir, 'Setup')]
+
+ @property
+ def FxTools(self):
+ """
+ Microsoft .NET Framework Tools
+ """
+ pi = self.pi
+ si = self.si
+
+ if self.vc_ver <= 10.0:
+ include32 = True
+ include64 = not pi.target_is_x86() and not pi.current_is_x86()
+ else:
+ include32 = pi.target_is_x86() or pi.current_is_x86()
+ include64 = pi.current_cpu == 'amd64' or pi.target_cpu == 'amd64'
+
+ tools = []
+ if include32:
+ tools += [os.path.join(si.FrameworkDir32, ver)
+ for ver in si.FrameworkVersion32]
+ if include64:
+ tools += [os.path.join(si.FrameworkDir64, ver)
+ for ver in si.FrameworkVersion64]
+ return tools
+
+ @property
+ def NetFxSDKLibraries(self):
+ """
+ Microsoft .Net Framework SDK Libraries
+ """
+ if self.vc_ver < 14.0 or not self.si.NetFxSdkDir:
+ return []
+
+ arch_subdir = self.pi.target_dir(x64=True)
+ return [os.path.join(self.si.NetFxSdkDir, r'lib\um%s' % arch_subdir)]
+
+ @property
+ def NetFxSDKIncludes(self):
+ """
+ Microsoft .Net Framework SDK Includes
+ """
+ if self.vc_ver < 14.0 or not self.si.NetFxSdkDir:
+ return []
+
+ return [os.path.join(self.si.NetFxSdkDir, r'include\um')]
+
+ @property
+ def VsTDb(self):
+ """
+ Microsoft Visual Studio Team System Database
+ """
+ return [os.path.join(self.si.VSInstallDir, r'VSTSDB\Deploy')]
+
+ @property
+ def MSBuild(self):
+ """
+ Microsoft Build Engine
+ """
+ if self.vc_ver < 12.0:
+ return []
+ elif self.vc_ver < 15.0:
+ base_path = self.si.ProgramFilesx86
+ arch_subdir = self.pi.current_dir(hidex86=True)
+ else:
+ base_path = self.si.VSInstallDir
+ arch_subdir = ''
+
+ path = r'MSBuild\%0.1f\bin%s' % (self.vc_ver, arch_subdir)
+ build = [os.path.join(base_path, path)]
+
+ if self.vc_ver >= 15.0:
+ # Add Roslyn C# & Visual Basic Compiler
+ build += [os.path.join(base_path, path, 'Roslyn')]
+
+ return build
+
+ @property
+ def HTMLHelpWorkshop(self):
+ """
+ Microsoft HTML Help Workshop
+ """
+ if self.vc_ver < 11.0:
+ return []
+
+ return [os.path.join(self.si.ProgramFilesx86, 'HTML Help Workshop')]
+
+ @property
+ def UCRTLibraries(self):
+ """
+ Microsoft Universal C Runtime SDK Libraries
+ """
+ if self.vc_ver < 14.0:
+ return []
+
+ arch_subdir = self.pi.target_dir(x64=True)
+ lib = os.path.join(self.si.UniversalCRTSdkDir, 'lib')
+ ucrtver = self._ucrt_subdir
+ return [os.path.join(lib, '%sucrt%s' % (ucrtver, arch_subdir))]
+
+ @property
+ def UCRTIncludes(self):
+ """
+ Microsoft Universal C Runtime SDK Include
+ """
+ if self.vc_ver < 14.0:
+ return []
+
+ include = os.path.join(self.si.UniversalCRTSdkDir, 'include')
+ return [os.path.join(include, '%sucrt' % self._ucrt_subdir)]
+
+ @property
+ def _ucrt_subdir(self):
+ """
+ Microsoft Universal C Runtime SDK version subdir
+ """
+ ucrtver = self.si.UniversalCRTSdkLastVersion
+ return ('%s\\' % ucrtver) if ucrtver else ''
+
+ @property
+ def FSharp(self):
+ """
+ Microsoft Visual F#
+ """
+ if self.vc_ver < 11.0 and self.vc_ver > 12.0:
+ return []
+
+ return self.si.FSharpInstallDir
+
+ @property
+ def VCRuntimeRedist(self):
+ """
+ Microsoft Visual C++ runtime redistribuable dll
+ """
+ arch_subdir = self.pi.target_dir(x64=True)
+ if self.vc_ver < 15:
+ redist_path = self.si.VCInstallDir
+ vcruntime = 'redist%s\\Microsoft.VC%d0.CRT\\vcruntime%d0.dll'
+ else:
+ redist_path = self.si.VCInstallDir.replace('\\Tools', '\\Redist')
+ vcruntime = 'onecore%s\\Microsoft.VC%d0.CRT\\vcruntime%d0.dll'
+
+ # Visual Studio 2017 is still Visual C++ 14.0
+ dll_ver = 14.0 if self.vc_ver == 15 else self.vc_ver
+
+ vcruntime = vcruntime % (arch_subdir, self.vc_ver, dll_ver)
+ return os.path.join(redist_path, vcruntime)
+
+ def return_env(self, exists=True):
+ """
+ Return environment dict.
+
+ Parameters
+ ----------
+ exists: bool
+ It True, only return existing paths.
+ """
+ env = dict(
+ include=self._build_paths('include',
+ [self.VCIncludes,
+ self.OSIncludes,
+ self.UCRTIncludes,
+ self.NetFxSDKIncludes],
+ exists),
+ lib=self._build_paths('lib',
+ [self.VCLibraries,
+ self.OSLibraries,
+ self.FxTools,
+ self.UCRTLibraries,
+ self.NetFxSDKLibraries],
+ exists),
+ libpath=self._build_paths('libpath',
+ [self.VCLibraries,
+ self.FxTools,
+ self.VCStoreRefs,
+ self.OSLibpath],
+ exists),
+ path=self._build_paths('path',
+ [self.VCTools,
+ self.VSTools,
+ self.VsTDb,
+ self.SdkTools,
+ self.SdkSetup,
+ self.FxTools,
+ self.MSBuild,
+ self.HTMLHelpWorkshop,
+ self.FSharp],
+ exists),
+ )
+ if self.vc_ver >= 14 and os.path.isfile(self.VCRuntimeRedist):
+ env['py_vcruntime_redist'] = self.VCRuntimeRedist
+ return env
+
+ def _build_paths(self, name, spec_path_lists, exists):
+ """
+ Given an environment variable name and specified paths,
+ return a pathsep-separated string of paths containing
+ unique, extant, directories from those paths and from
+ the environment variable. Raise an error if no paths
+ are resolved.
+ """
+ # flatten spec_path_lists
+ spec_paths = itertools.chain.from_iterable(spec_path_lists)
+ env_paths = safe_env.get(name, '').split(os.pathsep)
+ paths = itertools.chain(spec_paths, env_paths)
+ extant_paths = list(filter(os.path.isdir, paths)) if exists else paths
+ if not extant_paths:
+ msg = "%s environment variable is empty" % name.upper()
+ raise distutils.errors.DistutilsPlatformError(msg)
+ unique_paths = self._unique_everseen(extant_paths)
+ return os.pathsep.join(unique_paths)
+
+ # from Python docs
+ def _unique_everseen(self, iterable, key=None):
+ """
+ List unique elements, preserving order.
+ Remember all elements ever seen.
+
+ _unique_everseen('AAAABBBCCDAABBB') --> A B C D
+
+ _unique_everseen('ABBCcAD', str.lower) --> A B C D
+ """
+ seen = set()
+ seen_add = seen.add
+ if key is None:
+ for element in filterfalse(seen.__contains__, iterable):
+ seen_add(element)
+ yield element
+ else:
+ for element in iterable:
+ k = key(element)
+ if k not in seen:
+ seen_add(k)
+ yield element
diff --git a/setuptools/namespaces.py b/setuptools/namespaces.py
new file mode 100755
index 0000000..dc16106
--- /dev/null
+++ b/setuptools/namespaces.py
@@ -0,0 +1,107 @@
+import os
+from distutils import log
+import itertools
+
+from setuptools.extern.six.moves import map
+
+
+flatten = itertools.chain.from_iterable
+
+
+class Installer:
+
+ nspkg_ext = '-nspkg.pth'
+
+ def install_namespaces(self):
+ nsp = self._get_all_ns_packages()
+ if not nsp:
+ return
+ filename, ext = os.path.splitext(self._get_target())
+ filename += self.nspkg_ext
+ self.outputs.append(filename)
+ log.info("Installing %s", filename)
+ lines = map(self._gen_nspkg_line, nsp)
+
+ if self.dry_run:
+ # always generate the lines, even in dry run
+ list(lines)
+ return
+
+ with open(filename, 'wt') as f:
+ f.writelines(lines)
+
+ def uninstall_namespaces(self):
+ filename, ext = os.path.splitext(self._get_target())
+ filename += self.nspkg_ext
+ if not os.path.exists(filename):
+ return
+ log.info("Removing %s", filename)
+ os.remove(filename)
+
+ def _get_target(self):
+ return self.target
+
+ _nspkg_tmpl = (
+ "import sys, types, os",
+ "has_mfs = sys.version_info > (3, 5)",
+ "p = os.path.join(%(root)s, *%(pth)r)",
+ "importlib = has_mfs and __import__('importlib.util')",
+ "has_mfs and __import__('importlib.machinery')",
+ "m = has_mfs and "
+ "sys.modules.setdefault(%(pkg)r, "
+ "importlib.util.module_from_spec("
+ "importlib.machinery.PathFinder.find_spec(%(pkg)r, "
+ "[os.path.dirname(p)])))",
+ "m = m or "
+ "sys.modules.setdefault(%(pkg)r, types.ModuleType(%(pkg)r))",
+ "mp = (m or []) and m.__dict__.setdefault('__path__',[])",
+ "(p not in mp) and mp.append(p)",
+ )
+ "lines for the namespace installer"
+
+ _nspkg_tmpl_multi = (
+ 'm and setattr(sys.modules[%(parent)r], %(child)r, m)',
+ )
+ "additional line(s) when a parent package is indicated"
+
+ def _get_root(self):
+ return "sys._getframe(1).f_locals['sitedir']"
+
+ def _gen_nspkg_line(self, pkg):
+ # ensure pkg is not a unicode string under Python 2.7
+ pkg = str(pkg)
+ pth = tuple(pkg.split('.'))
+ root = self._get_root()
+ tmpl_lines = self._nspkg_tmpl
+ parent, sep, child = pkg.rpartition('.')
+ if parent:
+ tmpl_lines += self._nspkg_tmpl_multi
+ return ';'.join(tmpl_lines) % locals() + '\n'
+
+ def _get_all_ns_packages(self):
+ """Return sorted list of all package namespaces"""
+ pkgs = self.distribution.namespace_packages or []
+ return sorted(flatten(map(self._pkg_names, pkgs)))
+
+ @staticmethod
+ def _pkg_names(pkg):
+ """
+ Given a namespace package, yield the components of that
+ package.
+
+ >>> names = Installer._pkg_names('a.b.c')
+ >>> set(names) == set(['a', 'a.b', 'a.b.c'])
+ True
+ """
+ parts = pkg.split('.')
+ while parts:
+ yield '.'.join(parts)
+ parts.pop()
+
+
+class DevelopInstaller(Installer):
+ def _get_root(self):
+ return repr(str(self.egg_path))
+
+ def _get_target(self):
+ return self.egg_link
diff --git a/setuptools/package_index.py b/setuptools/package_index.py
new file mode 100755
index 0000000..b6407be
--- /dev/null
+++ b/setuptools/package_index.py
@@ -0,0 +1,1119 @@
+"""PyPI and direct package downloading"""
+import sys
+import os
+import re
+import shutil
+import socket
+import base64
+import hashlib
+import itertools
+from functools import wraps
+
+from setuptools.extern import six
+from setuptools.extern.six.moves import urllib, http_client, configparser, map
+
+import setuptools
+from pkg_resources import (
+ CHECKOUT_DIST, Distribution, BINARY_DIST, normalize_path, SOURCE_DIST,
+ Environment, find_distributions, safe_name, safe_version,
+ to_filename, Requirement, DEVELOP_DIST, EGG_DIST,
+)
+from setuptools import ssl_support
+from distutils import log
+from distutils.errors import DistutilsError
+from fnmatch import translate
+from setuptools.py27compat import get_all_headers
+from setuptools.py33compat import unescape
+from setuptools.wheel import Wheel
+
+EGG_FRAGMENT = re.compile(r'^egg=([-A-Za-z0-9_.+!]+)$')
+HREF = re.compile("""href\\s*=\\s*['"]?([^'"> ]+)""", re.I)
+# this is here to fix emacs' cruddy broken syntax highlighting
+PYPI_MD5 = re.compile(
+ '<a href="([^"#]+)">([^<]+)</a>\n\\s+\\(<a (?:title="MD5 hash"\n\\s+)'
+ 'href="[^?]+\\?:action=show_md5&amp;digest=([0-9a-f]{32})">md5</a>\\)'
+)
+URL_SCHEME = re.compile('([-+.a-z0-9]{2,}):', re.I).match
+EXTENSIONS = ".tar.gz .tar.bz2 .tar .zip .tgz".split()
+
+__all__ = [
+ 'PackageIndex', 'distros_for_url', 'parse_bdist_wininst',
+ 'interpret_distro_name',
+]
+
+_SOCKET_TIMEOUT = 15
+
+_tmpl = "setuptools/{setuptools.__version__} Python-urllib/{py_major}"
+user_agent = _tmpl.format(py_major=sys.version[:3], setuptools=setuptools)
+
+
+def parse_requirement_arg(spec):
+ try:
+ return Requirement.parse(spec)
+ except ValueError:
+ raise DistutilsError(
+ "Not a URL, existing file, or requirement spec: %r" % (spec,)
+ )
+
+
+def parse_bdist_wininst(name):
+ """Return (base,pyversion) or (None,None) for possible .exe name"""
+
+ lower = name.lower()
+ base, py_ver, plat = None, None, None
+
+ if lower.endswith('.exe'):
+ if lower.endswith('.win32.exe'):
+ base = name[:-10]
+ plat = 'win32'
+ elif lower.startswith('.win32-py', -16):
+ py_ver = name[-7:-4]
+ base = name[:-16]
+ plat = 'win32'
+ elif lower.endswith('.win-amd64.exe'):
+ base = name[:-14]
+ plat = 'win-amd64'
+ elif lower.startswith('.win-amd64-py', -20):
+ py_ver = name[-7:-4]
+ base = name[:-20]
+ plat = 'win-amd64'
+ return base, py_ver, plat
+
+
+def egg_info_for_url(url):
+ parts = urllib.parse.urlparse(url)
+ scheme, server, path, parameters, query, fragment = parts
+ base = urllib.parse.unquote(path.split('/')[-1])
+ if server == 'sourceforge.net' and base == 'download': # XXX Yuck
+ base = urllib.parse.unquote(path.split('/')[-2])
+ if '#' in base:
+ base, fragment = base.split('#', 1)
+ return base, fragment
+
+
+def distros_for_url(url, metadata=None):
+ """Yield egg or source distribution objects that might be found at a URL"""
+ base, fragment = egg_info_for_url(url)
+ for dist in distros_for_location(url, base, metadata):
+ yield dist
+ if fragment:
+ match = EGG_FRAGMENT.match(fragment)
+ if match:
+ for dist in interpret_distro_name(
+ url, match.group(1), metadata, precedence=CHECKOUT_DIST
+ ):
+ yield dist
+
+
+def distros_for_location(location, basename, metadata=None):
+ """Yield egg or source distribution objects based on basename"""
+ if basename.endswith('.egg.zip'):
+ basename = basename[:-4] # strip the .zip
+ if basename.endswith('.egg') and '-' in basename:
+ # only one, unambiguous interpretation
+ return [Distribution.from_location(location, basename, metadata)]
+ if basename.endswith('.whl') and '-' in basename:
+ wheel = Wheel(basename)
+ if not wheel.is_compatible():
+ return []
+ return [Distribution(
+ location=location,
+ project_name=wheel.project_name,
+ version=wheel.version,
+ # Increase priority over eggs.
+ precedence=EGG_DIST + 1,
+ )]
+ if basename.endswith('.exe'):
+ win_base, py_ver, platform = parse_bdist_wininst(basename)
+ if win_base is not None:
+ return interpret_distro_name(
+ location, win_base, metadata, py_ver, BINARY_DIST, platform
+ )
+ # Try source distro extensions (.zip, .tgz, etc.)
+ #
+ for ext in EXTENSIONS:
+ if basename.endswith(ext):
+ basename = basename[:-len(ext)]
+ return interpret_distro_name(location, basename, metadata)
+ return [] # no extension matched
+
+
+def distros_for_filename(filename, metadata=None):
+ """Yield possible egg or source distribution objects based on a filename"""
+ return distros_for_location(
+ normalize_path(filename), os.path.basename(filename), metadata
+ )
+
+
+def interpret_distro_name(
+ location, basename, metadata, py_version=None, precedence=SOURCE_DIST,
+ platform=None
+):
+ """Generate alternative interpretations of a source distro name
+
+ Note: if `location` is a filesystem filename, you should call
+ ``pkg_resources.normalize_path()`` on it before passing it to this
+ routine!
+ """
+ # Generate alternative interpretations of a source distro name
+ # Because some packages are ambiguous as to name/versions split
+ # e.g. "adns-python-1.1.0", "egenix-mx-commercial", etc.
+ # So, we generate each possible interepretation (e.g. "adns, python-1.1.0"
+ # "adns-python, 1.1.0", and "adns-python-1.1.0, no version"). In practice,
+ # the spurious interpretations should be ignored, because in the event
+ # there's also an "adns" package, the spurious "python-1.1.0" version will
+ # compare lower than any numeric version number, and is therefore unlikely
+ # to match a request for it. It's still a potential problem, though, and
+ # in the long run PyPI and the distutils should go for "safe" names and
+ # versions in distribution archive names (sdist and bdist).
+
+ parts = basename.split('-')
+ if not py_version and any(re.match(r'py\d\.\d$', p) for p in parts[2:]):
+ # it is a bdist_dumb, not an sdist -- bail out
+ return
+
+ for p in range(1, len(parts) + 1):
+ yield Distribution(
+ location, metadata, '-'.join(parts[:p]), '-'.join(parts[p:]),
+ py_version=py_version, precedence=precedence,
+ platform=platform
+ )
+
+
+# From Python 2.7 docs
+def unique_everseen(iterable, key=None):
+ "List unique elements, preserving order. Remember all elements ever seen."
+ # unique_everseen('AAAABBBCCDAABBB') --> A B C D
+ # unique_everseen('ABBCcAD', str.lower) --> A B C D
+ seen = set()
+ seen_add = seen.add
+ if key is None:
+ for element in six.moves.filterfalse(seen.__contains__, iterable):
+ seen_add(element)
+ yield element
+ else:
+ for element in iterable:
+ k = key(element)
+ if k not in seen:
+ seen_add(k)
+ yield element
+
+
+def unique_values(func):
+ """
+ Wrap a function returning an iterable such that the resulting iterable
+ only ever yields unique items.
+ """
+
+ @wraps(func)
+ def wrapper(*args, **kwargs):
+ return unique_everseen(func(*args, **kwargs))
+
+ return wrapper
+
+
+REL = re.compile(r"""<([^>]*\srel\s*=\s*['"]?([^'">]+)[^>]*)>""", re.I)
+# this line is here to fix emacs' cruddy broken syntax highlighting
+
+
+@unique_values
+def find_external_links(url, page):
+ """Find rel="homepage" and rel="download" links in `page`, yielding URLs"""
+
+ for match in REL.finditer(page):
+ tag, rel = match.groups()
+ rels = set(map(str.strip, rel.lower().split(',')))
+ if 'homepage' in rels or 'download' in rels:
+ for match in HREF.finditer(tag):
+ yield urllib.parse.urljoin(url, htmldecode(match.group(1)))
+
+ for tag in ("<th>Home Page", "<th>Download URL"):
+ pos = page.find(tag)
+ if pos != -1:
+ match = HREF.search(page, pos)
+ if match:
+ yield urllib.parse.urljoin(url, htmldecode(match.group(1)))
+
+
+class ContentChecker(object):
+ """
+ A null content checker that defines the interface for checking content
+ """
+
+ def feed(self, block):
+ """
+ Feed a block of data to the hash.
+ """
+ return
+
+ def is_valid(self):
+ """
+ Check the hash. Return False if validation fails.
+ """
+ return True
+
+ def report(self, reporter, template):
+ """
+ Call reporter with information about the checker (hash name)
+ substituted into the template.
+ """
+ return
+
+
+class HashChecker(ContentChecker):
+ pattern = re.compile(
+ r'(?P<hash_name>sha1|sha224|sha384|sha256|sha512|md5)='
+ r'(?P<expected>[a-f0-9]+)'
+ )
+
+ def __init__(self, hash_name, expected):
+ self.hash_name = hash_name
+ self.hash = hashlib.new(hash_name)
+ self.expected = expected
+
+ @classmethod
+ def from_url(cls, url):
+ "Construct a (possibly null) ContentChecker from a URL"
+ fragment = urllib.parse.urlparse(url)[-1]
+ if not fragment:
+ return ContentChecker()
+ match = cls.pattern.search(fragment)
+ if not match:
+ return ContentChecker()
+ return cls(**match.groupdict())
+
+ def feed(self, block):
+ self.hash.update(block)
+
+ def is_valid(self):
+ return self.hash.hexdigest() == self.expected
+
+ def report(self, reporter, template):
+ msg = template % self.hash_name
+ return reporter(msg)
+
+
+class PackageIndex(Environment):
+ """A distribution index that scans web pages for download URLs"""
+
+ def __init__(
+ self, index_url="https://pypi.org/simple/", hosts=('*',),
+ ca_bundle=None, verify_ssl=True, *args, **kw
+ ):
+ Environment.__init__(self, *args, **kw)
+ self.index_url = index_url + "/" [:not index_url.endswith('/')]
+ self.scanned_urls = {}
+ self.fetched_urls = {}
+ self.package_pages = {}
+ self.allows = re.compile('|'.join(map(translate, hosts))).match
+ self.to_scan = []
+ use_ssl = (
+ verify_ssl
+ and ssl_support.is_available
+ and (ca_bundle or ssl_support.find_ca_bundle())
+ )
+ if use_ssl:
+ self.opener = ssl_support.opener_for(ca_bundle)
+ else:
+ self.opener = urllib.request.urlopen
+
+ def process_url(self, url, retrieve=False):
+ """Evaluate a URL as a possible download, and maybe retrieve it"""
+ if url in self.scanned_urls and not retrieve:
+ return
+ self.scanned_urls[url] = True
+ if not URL_SCHEME(url):
+ self.process_filename(url)
+ return
+ else:
+ dists = list(distros_for_url(url))
+ if dists:
+ if not self.url_ok(url):
+ return
+ self.debug("Found link: %s", url)
+
+ if dists or not retrieve or url in self.fetched_urls:
+ list(map(self.add, dists))
+ return # don't need the actual page
+
+ if not self.url_ok(url):
+ self.fetched_urls[url] = True
+ return
+
+ self.info("Reading %s", url)
+ self.fetched_urls[url] = True # prevent multiple fetch attempts
+ tmpl = "Download error on %s: %%s -- Some packages may not be found!"
+ f = self.open_url(url, tmpl % url)
+ if f is None:
+ return
+ self.fetched_urls[f.url] = True
+ if 'html' not in f.headers.get('content-type', '').lower():
+ f.close() # not html, we can't process it
+ return
+
+ base = f.url # handle redirects
+ page = f.read()
+ if not isinstance(page, str):
+ # In Python 3 and got bytes but want str.
+ if isinstance(f, urllib.error.HTTPError):
+ # Errors have no charset, assume latin1:
+ charset = 'latin-1'
+ else:
+ charset = f.headers.get_param('charset') or 'latin-1'
+ page = page.decode(charset, "ignore")
+ f.close()
+ for match in HREF.finditer(page):
+ link = urllib.parse.urljoin(base, htmldecode(match.group(1)))
+ self.process_url(link)
+ if url.startswith(self.index_url) and getattr(f, 'code', None) != 404:
+ page = self.process_index(url, page)
+
+ def process_filename(self, fn, nested=False):
+ # process filenames or directories
+ if not os.path.exists(fn):
+ self.warn("Not found: %s", fn)
+ return
+
+ if os.path.isdir(fn) and not nested:
+ path = os.path.realpath(fn)
+ for item in os.listdir(path):
+ self.process_filename(os.path.join(path, item), True)
+
+ dists = distros_for_filename(fn)
+ if dists:
+ self.debug("Found: %s", fn)
+ list(map(self.add, dists))
+
+ def url_ok(self, url, fatal=False):
+ s = URL_SCHEME(url)
+ is_file = s and s.group(1).lower() == 'file'
+ if is_file or self.allows(urllib.parse.urlparse(url)[1]):
+ return True
+ msg = (
+ "\nNote: Bypassing %s (disallowed host; see "
+ "http://bit.ly/2hrImnY for details).\n")
+ if fatal:
+ raise DistutilsError(msg % url)
+ else:
+ self.warn(msg, url)
+
+ def scan_egg_links(self, search_path):
+ dirs = filter(os.path.isdir, search_path)
+ egg_links = (
+ (path, entry)
+ for path in dirs
+ for entry in os.listdir(path)
+ if entry.endswith('.egg-link')
+ )
+ list(itertools.starmap(self.scan_egg_link, egg_links))
+
+ def scan_egg_link(self, path, entry):
+ with open(os.path.join(path, entry)) as raw_lines:
+ # filter non-empty lines
+ lines = list(filter(None, map(str.strip, raw_lines)))
+
+ if len(lines) != 2:
+ # format is not recognized; punt
+ return
+
+ egg_path, setup_path = lines
+
+ for dist in find_distributions(os.path.join(path, egg_path)):
+ dist.location = os.path.join(path, *lines)
+ dist.precedence = SOURCE_DIST
+ self.add(dist)
+
+ def process_index(self, url, page):
+ """Process the contents of a PyPI page"""
+
+ def scan(link):
+ # Process a URL to see if it's for a package page
+ if link.startswith(self.index_url):
+ parts = list(map(
+ urllib.parse.unquote, link[len(self.index_url):].split('/')
+ ))
+ if len(parts) == 2 and '#' not in parts[1]:
+ # it's a package page, sanitize and index it
+ pkg = safe_name(parts[0])
+ ver = safe_version(parts[1])
+ self.package_pages.setdefault(pkg.lower(), {})[link] = True
+ return to_filename(pkg), to_filename(ver)
+ return None, None
+
+ # process an index page into the package-page index
+ for match in HREF.finditer(page):
+ try:
+ scan(urllib.parse.urljoin(url, htmldecode(match.group(1))))
+ except ValueError:
+ pass
+
+ pkg, ver = scan(url) # ensure this page is in the page index
+ if pkg:
+ # process individual package page
+ for new_url in find_external_links(url, page):
+ # Process the found URL
+ base, frag = egg_info_for_url(new_url)
+ if base.endswith('.py') and not frag:
+ if ver:
+ new_url += '#egg=%s-%s' % (pkg, ver)
+ else:
+ self.need_version_info(url)
+ self.scan_url(new_url)
+
+ return PYPI_MD5.sub(
+ lambda m: '<a href="%s#md5=%s">%s</a>' % m.group(1, 3, 2), page
+ )
+ else:
+ return "" # no sense double-scanning non-package pages
+
+ def need_version_info(self, url):
+ self.scan_all(
+ "Page at %s links to .py file(s) without version info; an index "
+ "scan is required.", url
+ )
+
+ def scan_all(self, msg=None, *args):
+ if self.index_url not in self.fetched_urls:
+ if msg:
+ self.warn(msg, *args)
+ self.info(
+ "Scanning index of all packages (this may take a while)"
+ )
+ self.scan_url(self.index_url)
+
+ def find_packages(self, requirement):
+ self.scan_url(self.index_url + requirement.unsafe_name + '/')
+
+ if not self.package_pages.get(requirement.key):
+ # Fall back to safe version of the name
+ self.scan_url(self.index_url + requirement.project_name + '/')
+
+ if not self.package_pages.get(requirement.key):
+ # We couldn't find the target package, so search the index page too
+ self.not_found_in_index(requirement)
+
+ for url in list(self.package_pages.get(requirement.key, ())):
+ # scan each page that might be related to the desired package
+ self.scan_url(url)
+
+ def obtain(self, requirement, installer=None):
+ self.prescan()
+ self.find_packages(requirement)
+ for dist in self[requirement.key]:
+ if dist in requirement:
+ return dist
+ self.debug("%s does not match %s", requirement, dist)
+ return super(PackageIndex, self).obtain(requirement, installer)
+
+ def check_hash(self, checker, filename, tfp):
+ """
+ checker is a ContentChecker
+ """
+ checker.report(
+ self.debug,
+ "Validating %%s checksum for %s" % filename)
+ if not checker.is_valid():
+ tfp.close()
+ os.unlink(filename)
+ raise DistutilsError(
+ "%s validation failed for %s; "
+ "possible download problem?"
+ % (checker.hash.name, os.path.basename(filename))
+ )
+
+ def add_find_links(self, urls):
+ """Add `urls` to the list that will be prescanned for searches"""
+ for url in urls:
+ if (
+ self.to_scan is None # if we have already "gone online"
+ or not URL_SCHEME(url) # or it's a local file/directory
+ or url.startswith('file:')
+ or list(distros_for_url(url)) # or a direct package link
+ ):
+ # then go ahead and process it now
+ self.scan_url(url)
+ else:
+ # otherwise, defer retrieval till later
+ self.to_scan.append(url)
+
+ def prescan(self):
+ """Scan urls scheduled for prescanning (e.g. --find-links)"""
+ if self.to_scan:
+ list(map(self.scan_url, self.to_scan))
+ self.to_scan = None # from now on, go ahead and process immediately
+
+ def not_found_in_index(self, requirement):
+ if self[requirement.key]: # we've seen at least one distro
+ meth, msg = self.info, "Couldn't retrieve index page for %r"
+ else: # no distros seen for this name, might be misspelled
+ meth, msg = (
+ self.warn,
+ "Couldn't find index page for %r (maybe misspelled?)")
+ meth(msg, requirement.unsafe_name)
+ self.scan_all()
+
+ def download(self, spec, tmpdir):
+ """Locate and/or download `spec` to `tmpdir`, returning a local path
+
+ `spec` may be a ``Requirement`` object, or a string containing a URL,
+ an existing local filename, or a project/version requirement spec
+ (i.e. the string form of a ``Requirement`` object). If it is the URL
+ of a .py file with an unambiguous ``#egg=name-version`` tag (i.e., one
+ that escapes ``-`` as ``_`` throughout), a trivial ``setup.py`` is
+ automatically created alongside the downloaded file.
+
+ If `spec` is a ``Requirement`` object or a string containing a
+ project/version requirement spec, this method returns the location of
+ a matching distribution (possibly after downloading it to `tmpdir`).
+ If `spec` is a locally existing file or directory name, it is simply
+ returned unchanged. If `spec` is a URL, it is downloaded to a subpath
+ of `tmpdir`, and the local filename is returned. Various errors may be
+ raised if a problem occurs during downloading.
+ """
+ if not isinstance(spec, Requirement):
+ scheme = URL_SCHEME(spec)
+ if scheme:
+ # It's a url, download it to tmpdir
+ found = self._download_url(scheme.group(1), spec, tmpdir)
+ base, fragment = egg_info_for_url(spec)
+ if base.endswith('.py'):
+ found = self.gen_setup(found, fragment, tmpdir)
+ return found
+ elif os.path.exists(spec):
+ # Existing file or directory, just return it
+ return spec
+ else:
+ spec = parse_requirement_arg(spec)
+ return getattr(self.fetch_distribution(spec, tmpdir), 'location', None)
+
+ def fetch_distribution(
+ self, requirement, tmpdir, force_scan=False, source=False,
+ develop_ok=False, local_index=None):
+ """Obtain a distribution suitable for fulfilling `requirement`
+
+ `requirement` must be a ``pkg_resources.Requirement`` instance.
+ If necessary, or if the `force_scan` flag is set, the requirement is
+ searched for in the (online) package index as well as the locally
+ installed packages. If a distribution matching `requirement` is found,
+ the returned distribution's ``location`` is the value you would have
+ gotten from calling the ``download()`` method with the matching
+ distribution's URL or filename. If no matching distribution is found,
+ ``None`` is returned.
+
+ If the `source` flag is set, only source distributions and source
+ checkout links will be considered. Unless the `develop_ok` flag is
+ set, development and system eggs (i.e., those using the ``.egg-info``
+ format) will be ignored.
+ """
+ # process a Requirement
+ self.info("Searching for %s", requirement)
+ skipped = {}
+ dist = None
+
+ def find(req, env=None):
+ if env is None:
+ env = self
+ # Find a matching distribution; may be called more than once
+
+ for dist in env[req.key]:
+
+ if dist.precedence == DEVELOP_DIST and not develop_ok:
+ if dist not in skipped:
+ self.warn(
+ "Skipping development or system egg: %s", dist,
+ )
+ skipped[dist] = 1
+ continue
+
+ test = (
+ dist in req
+ and (dist.precedence <= SOURCE_DIST or not source)
+ )
+ if test:
+ loc = self.download(dist.location, tmpdir)
+ dist.download_location = loc
+ if os.path.exists(dist.download_location):
+ return dist
+
+ if force_scan:
+ self.prescan()
+ self.find_packages(requirement)
+ dist = find(requirement)
+
+ if not dist and local_index is not None:
+ dist = find(requirement, local_index)
+
+ if dist is None:
+ if self.to_scan is not None:
+ self.prescan()
+ dist = find(requirement)
+
+ if dist is None and not force_scan:
+ self.find_packages(requirement)
+ dist = find(requirement)
+
+ if dist is None:
+ self.warn(
+ "No local packages or working download links found for %s%s",
+ (source and "a source distribution of " or ""),
+ requirement,
+ )
+ else:
+ self.info("Best match: %s", dist)
+ return dist.clone(location=dist.download_location)
+
+ def fetch(self, requirement, tmpdir, force_scan=False, source=False):
+ """Obtain a file suitable for fulfilling `requirement`
+
+ DEPRECATED; use the ``fetch_distribution()`` method now instead. For
+ backward compatibility, this routine is identical but returns the
+ ``location`` of the downloaded distribution instead of a distribution
+ object.
+ """
+ dist = self.fetch_distribution(requirement, tmpdir, force_scan, source)
+ if dist is not None:
+ return dist.location
+ return None
+
+ def gen_setup(self, filename, fragment, tmpdir):
+ match = EGG_FRAGMENT.match(fragment)
+ dists = match and [
+ d for d in
+ interpret_distro_name(filename, match.group(1), None) if d.version
+ ] or []
+
+ if len(dists) == 1: # unambiguous ``#egg`` fragment
+ basename = os.path.basename(filename)
+
+ # Make sure the file has been downloaded to the temp dir.
+ if os.path.dirname(filename) != tmpdir:
+ dst = os.path.join(tmpdir, basename)
+ from setuptools.command.easy_install import samefile
+ if not samefile(filename, dst):
+ shutil.copy2(filename, dst)
+ filename = dst
+
+ with open(os.path.join(tmpdir, 'setup.py'), 'w') as file:
+ file.write(
+ "from setuptools import setup\n"
+ "setup(name=%r, version=%r, py_modules=[%r])\n"
+ % (
+ dists[0].project_name, dists[0].version,
+ os.path.splitext(basename)[0]
+ )
+ )
+ return filename
+
+ elif match:
+ raise DistutilsError(
+ "Can't unambiguously interpret project/version identifier %r; "
+ "any dashes in the name or version should be escaped using "
+ "underscores. %r" % (fragment, dists)
+ )
+ else:
+ raise DistutilsError(
+ "Can't process plain .py files without an '#egg=name-version'"
+ " suffix to enable automatic setup script generation."
+ )
+
+ dl_blocksize = 8192
+
+ def _download_to(self, url, filename):
+ self.info("Downloading %s", url)
+ # Download the file
+ fp = None
+ try:
+ checker = HashChecker.from_url(url)
+ fp = self.open_url(url)
+ if isinstance(fp, urllib.error.HTTPError):
+ raise DistutilsError(
+ "Can't download %s: %s %s" % (url, fp.code, fp.msg)
+ )
+ headers = fp.info()
+ blocknum = 0
+ bs = self.dl_blocksize
+ size = -1
+ if "content-length" in headers:
+ # Some servers return multiple Content-Length headers :(
+ sizes = get_all_headers(headers, 'Content-Length')
+ size = max(map(int, sizes))
+ self.reporthook(url, filename, blocknum, bs, size)
+ with open(filename, 'wb') as tfp:
+ while True:
+ block = fp.read(bs)
+ if block:
+ checker.feed(block)
+ tfp.write(block)
+ blocknum += 1
+ self.reporthook(url, filename, blocknum, bs, size)
+ else:
+ break
+ self.check_hash(checker, filename, tfp)
+ return headers
+ finally:
+ if fp:
+ fp.close()
+
+ def reporthook(self, url, filename, blocknum, blksize, size):
+ pass # no-op
+
+ def open_url(self, url, warning=None):
+ if url.startswith('file:'):
+ return local_open(url)
+ try:
+ return open_with_auth(url, self.opener)
+ except (ValueError, http_client.InvalidURL) as v:
+ msg = ' '.join([str(arg) for arg in v.args])
+ if warning:
+ self.warn(warning, msg)
+ else:
+ raise DistutilsError('%s %s' % (url, msg))
+ except urllib.error.HTTPError as v:
+ return v
+ except urllib.error.URLError as v:
+ if warning:
+ self.warn(warning, v.reason)
+ else:
+ raise DistutilsError("Download error for %s: %s"
+ % (url, v.reason))
+ except http_client.BadStatusLine as v:
+ if warning:
+ self.warn(warning, v.line)
+ else:
+ raise DistutilsError(
+ '%s returned a bad status line. The server might be '
+ 'down, %s' %
+ (url, v.line)
+ )
+ except (http_client.HTTPException, socket.error) as v:
+ if warning:
+ self.warn(warning, v)
+ else:
+ raise DistutilsError("Download error for %s: %s"
+ % (url, v))
+
+ def _download_url(self, scheme, url, tmpdir):
+ # Determine download filename
+ #
+ name, fragment = egg_info_for_url(url)
+ if name:
+ while '..' in name:
+ name = name.replace('..', '.').replace('\\', '_')
+ else:
+ name = "__downloaded__" # default if URL has no path contents
+
+ if name.endswith('.egg.zip'):
+ name = name[:-4] # strip the extra .zip before download
+
+ filename = os.path.join(tmpdir, name)
+
+ # Download the file
+ #
+ if scheme == 'svn' or scheme.startswith('svn+'):
+ return self._download_svn(url, filename)
+ elif scheme == 'git' or scheme.startswith('git+'):
+ return self._download_git(url, filename)
+ elif scheme.startswith('hg+'):
+ return self._download_hg(url, filename)
+ elif scheme == 'file':
+ return urllib.request.url2pathname(urllib.parse.urlparse(url)[2])
+ else:
+ self.url_ok(url, True) # raises error if not allowed
+ return self._attempt_download(url, filename)
+
+ def scan_url(self, url):
+ self.process_url(url, True)
+
+ def _attempt_download(self, url, filename):
+ headers = self._download_to(url, filename)
+ if 'html' in headers.get('content-type', '').lower():
+ return self._download_html(url, headers, filename)
+ else:
+ return filename
+
+ def _download_html(self, url, headers, filename):
+ file = open(filename)
+ for line in file:
+ if line.strip():
+ # Check for a subversion index page
+ if re.search(r'<title>([^- ]+ - )?Revision \d+:', line):
+ # it's a subversion index page:
+ file.close()
+ os.unlink(filename)
+ return self._download_svn(url, filename)
+ break # not an index page
+ file.close()
+ os.unlink(filename)
+ raise DistutilsError("Unexpected HTML page found at " + url)
+
+ def _download_svn(self, url, filename):
+ url = url.split('#', 1)[0] # remove any fragment for svn's sake
+ creds = ''
+ if url.lower().startswith('svn:') and '@' in url:
+ scheme, netloc, path, p, q, f = urllib.parse.urlparse(url)
+ if not netloc and path.startswith('//') and '/' in path[2:]:
+ netloc, path = path[2:].split('/', 1)
+ auth, host = urllib.parse.splituser(netloc)
+ if auth:
+ if ':' in auth:
+ user, pw = auth.split(':', 1)
+ creds = " --username=%s --password=%s" % (user, pw)
+ else:
+ creds = " --username=" + auth
+ netloc = host
+ parts = scheme, netloc, url, p, q, f
+ url = urllib.parse.urlunparse(parts)
+ self.info("Doing subversion checkout from %s to %s", url, filename)
+ os.system("svn checkout%s -q %s %s" % (creds, url, filename))
+ return filename
+
+ @staticmethod
+ def _vcs_split_rev_from_url(url, pop_prefix=False):
+ scheme, netloc, path, query, frag = urllib.parse.urlsplit(url)
+
+ scheme = scheme.split('+', 1)[-1]
+
+ # Some fragment identification fails
+ path = path.split('#', 1)[0]
+
+ rev = None
+ if '@' in path:
+ path, rev = path.rsplit('@', 1)
+
+ # Also, discard fragment
+ url = urllib.parse.urlunsplit((scheme, netloc, path, query, ''))
+
+ return url, rev
+
+ def _download_git(self, url, filename):
+ filename = filename.split('#', 1)[0]
+ url, rev = self._vcs_split_rev_from_url(url, pop_prefix=True)
+
+ self.info("Doing git clone from %s to %s", url, filename)
+ os.system("git clone --quiet %s %s" % (url, filename))
+
+ if rev is not None:
+ self.info("Checking out %s", rev)
+ os.system("(cd %s && git checkout --quiet %s)" % (
+ filename,
+ rev,
+ ))
+
+ return filename
+
+ def _download_hg(self, url, filename):
+ filename = filename.split('#', 1)[0]
+ url, rev = self._vcs_split_rev_from_url(url, pop_prefix=True)
+
+ self.info("Doing hg clone from %s to %s", url, filename)
+ os.system("hg clone --quiet %s %s" % (url, filename))
+
+ if rev is not None:
+ self.info("Updating to %s", rev)
+ os.system("(cd %s && hg up -C -r %s -q)" % (
+ filename,
+ rev,
+ ))
+
+ return filename
+
+ def debug(self, msg, *args):
+ log.debug(msg, *args)
+
+ def info(self, msg, *args):
+ log.info(msg, *args)
+
+ def warn(self, msg, *args):
+ log.warn(msg, *args)
+
+
+# This pattern matches a character entity reference (a decimal numeric
+# references, a hexadecimal numeric reference, or a named reference).
+entity_sub = re.compile(r'&(#(\d+|x[\da-fA-F]+)|[\w.:-]+);?').sub
+
+
+def decode_entity(match):
+ what = match.group(1)
+ return unescape(what)
+
+
+def htmldecode(text):
+ """Decode HTML entities in the given text."""
+ return entity_sub(decode_entity, text)
+
+
+def socket_timeout(timeout=15):
+ def _socket_timeout(func):
+ def _socket_timeout(*args, **kwargs):
+ old_timeout = socket.getdefaulttimeout()
+ socket.setdefaulttimeout(timeout)
+ try:
+ return func(*args, **kwargs)
+ finally:
+ socket.setdefaulttimeout(old_timeout)
+
+ return _socket_timeout
+
+ return _socket_timeout
+
+
+def _encode_auth(auth):
+ """
+ A function compatible with Python 2.3-3.3 that will encode
+ auth from a URL suitable for an HTTP header.
+ >>> str(_encode_auth('username%3Apassword'))
+ 'dXNlcm5hbWU6cGFzc3dvcmQ='
+
+ Long auth strings should not cause a newline to be inserted.
+ >>> long_auth = 'username:' + 'password'*10
+ >>> chr(10) in str(_encode_auth(long_auth))
+ False
+ """
+ auth_s = urllib.parse.unquote(auth)
+ # convert to bytes
+ auth_bytes = auth_s.encode()
+ # use the legacy interface for Python 2.3 support
+ encoded_bytes = base64.encodestring(auth_bytes)
+ # convert back to a string
+ encoded = encoded_bytes.decode()
+ # strip the trailing carriage return
+ return encoded.replace('\n', '')
+
+
+class Credential(object):
+ """
+ A username/password pair. Use like a namedtuple.
+ """
+
+ def __init__(self, username, password):
+ self.username = username
+ self.password = password
+
+ def __iter__(self):
+ yield self.username
+ yield self.password
+
+ def __str__(self):
+ return '%(username)s:%(password)s' % vars(self)
+
+
+class PyPIConfig(configparser.RawConfigParser):
+ def __init__(self):
+ """
+ Load from ~/.pypirc
+ """
+ defaults = dict.fromkeys(['username', 'password', 'repository'], '')
+ configparser.RawConfigParser.__init__(self, defaults)
+
+ rc = os.path.join(os.path.expanduser('~'), '.pypirc')
+ if os.path.exists(rc):
+ self.read(rc)
+
+ @property
+ def creds_by_repository(self):
+ sections_with_repositories = [
+ section for section in self.sections()
+ if self.get(section, 'repository').strip()
+ ]
+
+ return dict(map(self._get_repo_cred, sections_with_repositories))
+
+ def _get_repo_cred(self, section):
+ repo = self.get(section, 'repository').strip()
+ return repo, Credential(
+ self.get(section, 'username').strip(),
+ self.get(section, 'password').strip(),
+ )
+
+ def find_credential(self, url):
+ """
+ If the URL indicated appears to be a repository defined in this
+ config, return the credential for that repository.
+ """
+ for repository, cred in self.creds_by_repository.items():
+ if url.startswith(repository):
+ return cred
+
+
+def open_with_auth(url, opener=urllib.request.urlopen):
+ """Open a urllib2 request, handling HTTP authentication"""
+
+ scheme, netloc, path, params, query, frag = urllib.parse.urlparse(url)
+
+ # Double scheme does not raise on Mac OS X as revealed by a
+ # failing test. We would expect "nonnumeric port". Refs #20.
+ if netloc.endswith(':'):
+ raise http_client.InvalidURL("nonnumeric port: ''")
+
+ if scheme in ('http', 'https'):
+ auth, host = urllib.parse.splituser(netloc)
+ else:
+ auth = None
+
+ if not auth:
+ cred = PyPIConfig().find_credential(url)
+ if cred:
+ auth = str(cred)
+ info = cred.username, url
+ log.info('Authenticating as %s for %s (from .pypirc)', *info)
+
+ if auth:
+ auth = "Basic " + _encode_auth(auth)
+ parts = scheme, host, path, params, query, frag
+ new_url = urllib.parse.urlunparse(parts)
+ request = urllib.request.Request(new_url)
+ request.add_header("Authorization", auth)
+ else:
+ request = urllib.request.Request(url)
+
+ request.add_header('User-Agent', user_agent)
+ fp = opener(request)
+
+ if auth:
+ # Put authentication info back into request URL if same host,
+ # so that links found on the page will work
+ s2, h2, path2, param2, query2, frag2 = urllib.parse.urlparse(fp.url)
+ if s2 == scheme and h2 == host:
+ parts = s2, netloc, path2, param2, query2, frag2
+ fp.url = urllib.parse.urlunparse(parts)
+
+ return fp
+
+
+# adding a timeout to avoid freezing package_index
+open_with_auth = socket_timeout(_SOCKET_TIMEOUT)(open_with_auth)
+
+
+def fix_sf_url(url):
+ return url # backward compatibility
+
+
+def local_open(url):
+ """Read a local path, with special support for directories"""
+ scheme, server, path, param, query, frag = urllib.parse.urlparse(url)
+ filename = urllib.request.url2pathname(path)
+ if os.path.isfile(filename):
+ return urllib.request.urlopen(url)
+ elif path.endswith('/') and os.path.isdir(filename):
+ files = []
+ for f in os.listdir(filename):
+ filepath = os.path.join(filename, f)
+ if f == 'index.html':
+ with open(filepath, 'r') as fp:
+ body = fp.read()
+ break
+ elif os.path.isdir(filepath):
+ f += '/'
+ files.append('<a href="{name}">{name}</a>'.format(name=f))
+ else:
+ tmpl = (
+ "<html><head><title>{url}</title>"
+ "</head><body>{files}</body></html>")
+ body = tmpl.format(url=url, files='\n'.join(files))
+ status, message = 200, "OK"
+ else:
+ status, message, body = 404, "Path not found", "Not found"
+
+ headers = {'content-type': 'text/html'}
+ body_stream = six.StringIO(body)
+ return urllib.error.HTTPError(url, status, message, headers, body_stream)
diff --git a/setuptools/pep425tags.py b/setuptools/pep425tags.py
new file mode 100644
index 0000000..3bdd328
--- /dev/null
+++ b/setuptools/pep425tags.py
@@ -0,0 +1,317 @@
+# This file originally from pip:
+# https://github.com/pypa/pip/blob/8f4f15a5a95d7d5b511ceaee9ed261176c181970/src/pip/_internal/pep425tags.py
+"""Generate and work with PEP 425 Compatibility Tags."""
+from __future__ import absolute_import
+
+import distutils.util
+from distutils import log
+import platform
+import re
+import sys
+import sysconfig
+import warnings
+from collections import OrderedDict
+
+from . import glibc
+
+_osx_arch_pat = re.compile(r'(.+)_(\d+)_(\d+)_(.+)')
+
+
+def get_config_var(var):
+ try:
+ return sysconfig.get_config_var(var)
+ except IOError as e: # Issue #1074
+ warnings.warn("{}".format(e), RuntimeWarning)
+ return None
+
+
+def get_abbr_impl():
+ """Return abbreviated implementation name."""
+ if hasattr(sys, 'pypy_version_info'):
+ pyimpl = 'pp'
+ elif sys.platform.startswith('java'):
+ pyimpl = 'jy'
+ elif sys.platform == 'cli':
+ pyimpl = 'ip'
+ else:
+ pyimpl = 'cp'
+ return pyimpl
+
+
+def get_impl_ver():
+ """Return implementation version."""
+ impl_ver = get_config_var("py_version_nodot")
+ if not impl_ver or get_abbr_impl() == 'pp':
+ impl_ver = ''.join(map(str, get_impl_version_info()))
+ return impl_ver
+
+
+def get_impl_version_info():
+ """Return sys.version_info-like tuple for use in decrementing the minor
+ version."""
+ if get_abbr_impl() == 'pp':
+ # as per https://github.com/pypa/pip/issues/2882
+ return (sys.version_info[0], sys.pypy_version_info.major,
+ sys.pypy_version_info.minor)
+ else:
+ return sys.version_info[0], sys.version_info[1]
+
+
+def get_impl_tag():
+ """
+ Returns the Tag for this specific implementation.
+ """
+ return "{}{}".format(get_abbr_impl(), get_impl_ver())
+
+
+def get_flag(var, fallback, expected=True, warn=True):
+ """Use a fallback method for determining SOABI flags if the needed config
+ var is unset or unavailable."""
+ val = get_config_var(var)
+ if val is None:
+ if warn:
+ log.debug("Config variable '%s' is unset, Python ABI tag may "
+ "be incorrect", var)
+ return fallback()
+ return val == expected
+
+
+def get_abi_tag():
+ """Return the ABI tag based on SOABI (if available) or emulate SOABI
+ (CPython 2, PyPy)."""
+ soabi = get_config_var('SOABI')
+ impl = get_abbr_impl()
+ if not soabi and impl in {'cp', 'pp'} and hasattr(sys, 'maxunicode'):
+ d = ''
+ m = ''
+ u = ''
+ if get_flag('Py_DEBUG',
+ lambda: hasattr(sys, 'gettotalrefcount'),
+ warn=(impl == 'cp')):
+ d = 'd'
+ if get_flag('WITH_PYMALLOC',
+ lambda: impl == 'cp',
+ warn=(impl == 'cp')):
+ m = 'm'
+ if get_flag('Py_UNICODE_SIZE',
+ lambda: sys.maxunicode == 0x10ffff,
+ expected=4,
+ warn=(impl == 'cp' and
+ sys.version_info < (3, 3))) \
+ and sys.version_info < (3, 3):
+ u = 'u'
+ abi = '%s%s%s%s%s' % (impl, get_impl_ver(), d, m, u)
+ elif soabi and soabi.startswith('cpython-'):
+ abi = 'cp' + soabi.split('-')[1]
+ elif soabi:
+ abi = soabi.replace('.', '_').replace('-', '_')
+ else:
+ abi = None
+ return abi
+
+
+def _is_running_32bit():
+ return sys.maxsize == 2147483647
+
+
+def get_platform():
+ """Return our platform name 'win32', 'linux_x86_64'"""
+ if sys.platform == 'darwin':
+ # distutils.util.get_platform() returns the release based on the value
+ # of MACOSX_DEPLOYMENT_TARGET on which Python was built, which may
+ # be significantly older than the user's current machine.
+ release, _, machine = platform.mac_ver()
+ split_ver = release.split('.')
+
+ if machine == "x86_64" and _is_running_32bit():
+ machine = "i386"
+ elif machine == "ppc64" and _is_running_32bit():
+ machine = "ppc"
+
+ return 'macosx_{}_{}_{}'.format(split_ver[0], split_ver[1], machine)
+
+ # XXX remove distutils dependency
+ result = distutils.util.get_platform().replace('.', '_').replace('-', '_')
+ if result == "linux_x86_64" and _is_running_32bit():
+ # 32 bit Python program (running on a 64 bit Linux): pip should only
+ # install and run 32 bit compiled extensions in that case.
+ result = "linux_i686"
+
+ return result
+
+
+def is_manylinux1_compatible():
+ # Only Linux, and only x86-64 / i686
+ if get_platform() not in {"linux_x86_64", "linux_i686"}:
+ return False
+
+ # Check for presence of _manylinux module
+ try:
+ import _manylinux
+ return bool(_manylinux.manylinux1_compatible)
+ except (ImportError, AttributeError):
+ # Fall through to heuristic check below
+ pass
+
+ # Check glibc version. CentOS 5 uses glibc 2.5.
+ return glibc.have_compatible_glibc(2, 5)
+
+
+def get_darwin_arches(major, minor, machine):
+ """Return a list of supported arches (including group arches) for
+ the given major, minor and machine architecture of an macOS machine.
+ """
+ arches = []
+
+ def _supports_arch(major, minor, arch):
+ # Looking at the application support for macOS versions in the chart
+ # provided by https://en.wikipedia.org/wiki/OS_X#Versions it appears
+ # our timeline looks roughly like:
+ #
+ # 10.0 - Introduces ppc support.
+ # 10.4 - Introduces ppc64, i386, and x86_64 support, however the ppc64
+ # and x86_64 support is CLI only, and cannot be used for GUI
+ # applications.
+ # 10.5 - Extends ppc64 and x86_64 support to cover GUI applications.
+ # 10.6 - Drops support for ppc64
+ # 10.7 - Drops support for ppc
+ #
+ # Given that we do not know if we're installing a CLI or a GUI
+ # application, we must be conservative and assume it might be a GUI
+ # application and behave as if ppc64 and x86_64 support did not occur
+ # until 10.5.
+ #
+ # Note: The above information is taken from the "Application support"
+ # column in the chart not the "Processor support" since I believe
+ # that we care about what instruction sets an application can use
+ # not which processors the OS supports.
+ if arch == 'ppc':
+ return (major, minor) <= (10, 5)
+ if arch == 'ppc64':
+ return (major, minor) == (10, 5)
+ if arch == 'i386':
+ return (major, minor) >= (10, 4)
+ if arch == 'x86_64':
+ return (major, minor) >= (10, 5)
+ if arch in groups:
+ for garch in groups[arch]:
+ if _supports_arch(major, minor, garch):
+ return True
+ return False
+
+ groups = OrderedDict([
+ ("fat", ("i386", "ppc")),
+ ("intel", ("x86_64", "i386")),
+ ("fat64", ("x86_64", "ppc64")),
+ ("fat32", ("x86_64", "i386", "ppc")),
+ ])
+
+ if _supports_arch(major, minor, machine):
+ arches.append(machine)
+
+ for garch in groups:
+ if machine in groups[garch] and _supports_arch(major, minor, garch):
+ arches.append(garch)
+
+ arches.append('universal')
+
+ return arches
+
+
+def get_supported(versions=None, noarch=False, platform=None,
+ impl=None, abi=None):
+ """Return a list of supported tags for each version specified in
+ `versions`.
+
+ :param versions: a list of string versions, of the form ["33", "32"],
+ or None. The first version will be assumed to support our ABI.
+ :param platform: specify the exact platform you want valid
+ tags for, or None. If None, use the local system platform.
+ :param impl: specify the exact implementation you want valid
+ tags for, or None. If None, use the local interpreter impl.
+ :param abi: specify the exact abi you want valid
+ tags for, or None. If None, use the local interpreter abi.
+ """
+ supported = []
+
+ # Versions must be given with respect to the preference
+ if versions is None:
+ versions = []
+ version_info = get_impl_version_info()
+ major = version_info[:-1]
+ # Support all previous minor Python versions.
+ for minor in range(version_info[-1], -1, -1):
+ versions.append(''.join(map(str, major + (minor,))))
+
+ impl = impl or get_abbr_impl()
+
+ abis = []
+
+ abi = abi or get_abi_tag()
+ if abi:
+ abis[0:0] = [abi]
+
+ abi3s = set()
+ import imp
+ for suffix in imp.get_suffixes():
+ if suffix[0].startswith('.abi'):
+ abi3s.add(suffix[0].split('.', 2)[1])
+
+ abis.extend(sorted(list(abi3s)))
+
+ abis.append('none')
+
+ if not noarch:
+ arch = platform or get_platform()
+ if arch.startswith('macosx'):
+ # support macosx-10.6-intel on macosx-10.9-x86_64
+ match = _osx_arch_pat.match(arch)
+ if match:
+ name, major, minor, actual_arch = match.groups()
+ tpl = '{}_{}_%i_%s'.format(name, major)
+ arches = []
+ for m in reversed(range(int(minor) + 1)):
+ for a in get_darwin_arches(int(major), m, actual_arch):
+ arches.append(tpl % (m, a))
+ else:
+ # arch pattern didn't match (?!)
+ arches = [arch]
+ elif platform is None and is_manylinux1_compatible():
+ arches = [arch.replace('linux', 'manylinux1'), arch]
+ else:
+ arches = [arch]
+
+ # Current version, current API (built specifically for our Python):
+ for abi in abis:
+ for arch in arches:
+ supported.append(('%s%s' % (impl, versions[0]), abi, arch))
+
+ # abi3 modules compatible with older version of Python
+ for version in versions[1:]:
+ # abi3 was introduced in Python 3.2
+ if version in {'31', '30'}:
+ break
+ for abi in abi3s: # empty set if not Python 3
+ for arch in arches:
+ supported.append(("%s%s" % (impl, version), abi, arch))
+
+ # Has binaries, does not use the Python API:
+ for arch in arches:
+ supported.append(('py%s' % (versions[0][0]), 'none', arch))
+
+ # No abi / arch, but requires our implementation:
+ supported.append(('%s%s' % (impl, versions[0]), 'none', 'any'))
+ # Tagged specifically as being cross-version compatible
+ # (with just the major version specified)
+ supported.append(('%s%s' % (impl, versions[0][0]), 'none', 'any'))
+
+ # No abi / arch, generic Python
+ for i, version in enumerate(versions):
+ supported.append(('py%s' % (version,), 'none', 'any'))
+ if i == 0:
+ supported.append(('py%s' % (version[0]), 'none', 'any'))
+
+ return supported
+
+
+implementation_tag = get_impl_tag()
diff --git a/setuptools/py27compat.py b/setuptools/py27compat.py
new file mode 100644
index 0000000..2985011
--- /dev/null
+++ b/setuptools/py27compat.py
@@ -0,0 +1,28 @@
+"""
+Compatibility Support for Python 2.7 and earlier
+"""
+
+import platform
+
+from setuptools.extern import six
+
+
+def get_all_headers(message, key):
+ """
+ Given an HTTPMessage, return all headers matching a given key.
+ """
+ return message.get_all(key)
+
+
+if six.PY2:
+ def get_all_headers(message, key):
+ return message.getheaders(key)
+
+
+linux_py2_ascii = (
+ platform.system() == 'Linux' and
+ six.PY2
+)
+
+rmtree_safe = str if linux_py2_ascii else lambda x: x
+"""Workaround for http://bugs.python.org/issue24672"""
diff --git a/setuptools/py31compat.py b/setuptools/py31compat.py
new file mode 100644
index 0000000..4ea9532
--- /dev/null
+++ b/setuptools/py31compat.py
@@ -0,0 +1,41 @@
+__all__ = ['get_config_vars', 'get_path']
+
+try:
+ # Python 2.7 or >=3.2
+ from sysconfig import get_config_vars, get_path
+except ImportError:
+ from distutils.sysconfig import get_config_vars, get_python_lib
+
+ def get_path(name):
+ if name not in ('platlib', 'purelib'):
+ raise ValueError("Name must be purelib or platlib")
+ return get_python_lib(name == 'platlib')
+
+
+try:
+ # Python >=3.2
+ from tempfile import TemporaryDirectory
+except ImportError:
+ import shutil
+ import tempfile
+
+ class TemporaryDirectory(object):
+ """
+ Very simple temporary directory context manager.
+ Will try to delete afterward, but will also ignore OS and similar
+ errors on deletion.
+ """
+
+ def __init__(self):
+ self.name = None # Handle mkdtemp raising an exception
+ self.name = tempfile.mkdtemp()
+
+ def __enter__(self):
+ return self.name
+
+ def __exit__(self, exctype, excvalue, exctrace):
+ try:
+ shutil.rmtree(self.name, True)
+ except OSError: # removal errors are not the only possible
+ pass
+ self.name = None
diff --git a/setuptools/py33compat.py b/setuptools/py33compat.py
new file mode 100644
index 0000000..2a73ebb
--- /dev/null
+++ b/setuptools/py33compat.py
@@ -0,0 +1,54 @@
+import dis
+import array
+import collections
+
+try:
+ import html
+except ImportError:
+ html = None
+
+from setuptools.extern import six
+from setuptools.extern.six.moves import html_parser
+
+
+OpArg = collections.namedtuple('OpArg', 'opcode arg')
+
+
+class Bytecode_compat(object):
+ def __init__(self, code):
+ self.code = code
+
+ def __iter__(self):
+ """Yield '(op,arg)' pair for each operation in code object 'code'"""
+
+ bytes = array.array('b', self.code.co_code)
+ eof = len(self.code.co_code)
+
+ ptr = 0
+ extended_arg = 0
+
+ while ptr < eof:
+
+ op = bytes[ptr]
+
+ if op >= dis.HAVE_ARGUMENT:
+
+ arg = bytes[ptr + 1] + bytes[ptr + 2] * 256 + extended_arg
+ ptr += 3
+
+ if op == dis.EXTENDED_ARG:
+ long_type = six.integer_types[-1]
+ extended_arg = arg * long_type(65536)
+ continue
+
+ else:
+ arg = None
+ ptr += 1
+
+ yield OpArg(op, arg)
+
+
+Bytecode = getattr(dis, 'Bytecode', Bytecode_compat)
+
+
+unescape = getattr(html, 'unescape', html_parser.HTMLParser().unescape)
diff --git a/setuptools/py36compat.py b/setuptools/py36compat.py
new file mode 100644
index 0000000..f527969
--- /dev/null
+++ b/setuptools/py36compat.py
@@ -0,0 +1,82 @@
+import sys
+from distutils.errors import DistutilsOptionError
+from distutils.util import strtobool
+from distutils.debug import DEBUG
+
+
+class Distribution_parse_config_files:
+ """
+ Mix-in providing forward-compatibility for functionality to be
+ included by default on Python 3.7.
+
+ Do not edit the code in this class except to update functionality
+ as implemented in distutils.
+ """
+ def parse_config_files(self, filenames=None):
+ from configparser import ConfigParser
+
+ # Ignore install directory options if we have a venv
+ if sys.prefix != sys.base_prefix:
+ ignore_options = [
+ 'install-base', 'install-platbase', 'install-lib',
+ 'install-platlib', 'install-purelib', 'install-headers',
+ 'install-scripts', 'install-data', 'prefix', 'exec-prefix',
+ 'home', 'user', 'root']
+ else:
+ ignore_options = []
+
+ ignore_options = frozenset(ignore_options)
+
+ if filenames is None:
+ filenames = self.find_config_files()
+
+ if DEBUG:
+ self.announce("Distribution.parse_config_files():")
+
+ parser = ConfigParser(interpolation=None)
+ for filename in filenames:
+ if DEBUG:
+ self.announce(" reading %s" % filename)
+ parser.read(filename)
+ for section in parser.sections():
+ options = parser.options(section)
+ opt_dict = self.get_option_dict(section)
+
+ for opt in options:
+ if opt != '__name__' and opt not in ignore_options:
+ val = parser.get(section,opt)
+ opt = opt.replace('-', '_')
+ opt_dict[opt] = (filename, val)
+
+ # Make the ConfigParser forget everything (so we retain
+ # the original filenames that options come from)
+ parser.__init__()
+
+ # If there was a "global" section in the config file, use it
+ # to set Distribution options.
+
+ if 'global' in self.command_options:
+ for (opt, (src, val)) in self.command_options['global'].items():
+ alias = self.negative_opt.get(opt)
+ try:
+ if alias:
+ setattr(self, alias, not strtobool(val))
+ elif opt in ('verbose', 'dry_run'): # ugh!
+ setattr(self, opt, strtobool(val))
+ else:
+ setattr(self, opt, val)
+ except ValueError as msg:
+ raise DistutilsOptionError(msg)
+
+
+if sys.version_info < (3,):
+ # Python 2 behavior is sufficient
+ class Distribution_parse_config_files:
+ pass
+
+
+if False:
+ # When updated behavior is available upstream,
+ # disable override here.
+ class Distribution_parse_config_files:
+ pass
diff --git a/setuptools/sandbox.py b/setuptools/sandbox.py
new file mode 100755
index 0000000..685f3f7
--- /dev/null
+++ b/setuptools/sandbox.py
@@ -0,0 +1,491 @@
+import os
+import sys
+import tempfile
+import operator
+import functools
+import itertools
+import re
+import contextlib
+import pickle
+import textwrap
+
+from setuptools.extern import six
+from setuptools.extern.six.moves import builtins, map
+
+import pkg_resources.py31compat
+
+if sys.platform.startswith('java'):
+ import org.python.modules.posix.PosixModule as _os
+else:
+ _os = sys.modules[os.name]
+try:
+ _file = file
+except NameError:
+ _file = None
+_open = open
+from distutils.errors import DistutilsError
+from pkg_resources import working_set
+
+
+__all__ = [
+ "AbstractSandbox", "DirectorySandbox", "SandboxViolation", "run_setup",
+]
+
+
+def _execfile(filename, globals, locals=None):
+ """
+ Python 3 implementation of execfile.
+ """
+ mode = 'rb'
+ with open(filename, mode) as stream:
+ script = stream.read()
+ if locals is None:
+ locals = globals
+ code = compile(script, filename, 'exec')
+ exec(code, globals, locals)
+
+
+@contextlib.contextmanager
+def save_argv(repl=None):
+ saved = sys.argv[:]
+ if repl is not None:
+ sys.argv[:] = repl
+ try:
+ yield saved
+ finally:
+ sys.argv[:] = saved
+
+
+@contextlib.contextmanager
+def save_path():
+ saved = sys.path[:]
+ try:
+ yield saved
+ finally:
+ sys.path[:] = saved
+
+
+@contextlib.contextmanager
+def override_temp(replacement):
+ """
+ Monkey-patch tempfile.tempdir with replacement, ensuring it exists
+ """
+ pkg_resources.py31compat.makedirs(replacement, exist_ok=True)
+
+ saved = tempfile.tempdir
+
+ tempfile.tempdir = replacement
+
+ try:
+ yield
+ finally:
+ tempfile.tempdir = saved
+
+
+@contextlib.contextmanager
+def pushd(target):
+ saved = os.getcwd()
+ os.chdir(target)
+ try:
+ yield saved
+ finally:
+ os.chdir(saved)
+
+
+class UnpickleableException(Exception):
+ """
+ An exception representing another Exception that could not be pickled.
+ """
+
+ @staticmethod
+ def dump(type, exc):
+ """
+ Always return a dumped (pickled) type and exc. If exc can't be pickled,
+ wrap it in UnpickleableException first.
+ """
+ try:
+ return pickle.dumps(type), pickle.dumps(exc)
+ except Exception:
+ # get UnpickleableException inside the sandbox
+ from setuptools.sandbox import UnpickleableException as cls
+ return cls.dump(cls, cls(repr(exc)))
+
+
+class ExceptionSaver:
+ """
+ A Context Manager that will save an exception, serialized, and restore it
+ later.
+ """
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, type, exc, tb):
+ if not exc:
+ return
+
+ # dump the exception
+ self._saved = UnpickleableException.dump(type, exc)
+ self._tb = tb
+
+ # suppress the exception
+ return True
+
+ def resume(self):
+ "restore and re-raise any exception"
+
+ if '_saved' not in vars(self):
+ return
+
+ type, exc = map(pickle.loads, self._saved)
+ six.reraise(type, exc, self._tb)
+
+
+@contextlib.contextmanager
+def save_modules():
+ """
+ Context in which imported modules are saved.
+
+ Translates exceptions internal to the context into the equivalent exception
+ outside the context.
+ """
+ saved = sys.modules.copy()
+ with ExceptionSaver() as saved_exc:
+ yield saved
+
+ sys.modules.update(saved)
+ # remove any modules imported since
+ del_modules = (
+ mod_name for mod_name in sys.modules
+ if mod_name not in saved
+ # exclude any encodings modules. See #285
+ and not mod_name.startswith('encodings.')
+ )
+ _clear_modules(del_modules)
+
+ saved_exc.resume()
+
+
+def _clear_modules(module_names):
+ for mod_name in list(module_names):
+ del sys.modules[mod_name]
+
+
+@contextlib.contextmanager
+def save_pkg_resources_state():
+ saved = pkg_resources.__getstate__()
+ try:
+ yield saved
+ finally:
+ pkg_resources.__setstate__(saved)
+
+
+@contextlib.contextmanager
+def setup_context(setup_dir):
+ temp_dir = os.path.join(setup_dir, 'temp')
+ with save_pkg_resources_state():
+ with save_modules():
+ hide_setuptools()
+ with save_path():
+ with save_argv():
+ with override_temp(temp_dir):
+ with pushd(setup_dir):
+ # ensure setuptools commands are available
+ __import__('setuptools')
+ yield
+
+
+def _needs_hiding(mod_name):
+ """
+ >>> _needs_hiding('setuptools')
+ True
+ >>> _needs_hiding('pkg_resources')
+ True
+ >>> _needs_hiding('setuptools_plugin')
+ False
+ >>> _needs_hiding('setuptools.__init__')
+ True
+ >>> _needs_hiding('distutils')
+ True
+ >>> _needs_hiding('os')
+ False
+ >>> _needs_hiding('Cython')
+ True
+ """
+ pattern = re.compile(r'(setuptools|pkg_resources|distutils|Cython)(\.|$)')
+ return bool(pattern.match(mod_name))
+
+
+def hide_setuptools():
+ """
+ Remove references to setuptools' modules from sys.modules to allow the
+ invocation to import the most appropriate setuptools. This technique is
+ necessary to avoid issues such as #315 where setuptools upgrading itself
+ would fail to find a function declared in the metadata.
+ """
+ modules = filter(_needs_hiding, sys.modules)
+ _clear_modules(modules)
+
+
+def run_setup(setup_script, args):
+ """Run a distutils setup script, sandboxed in its directory"""
+ setup_dir = os.path.abspath(os.path.dirname(setup_script))
+ with setup_context(setup_dir):
+ try:
+ sys.argv[:] = [setup_script] + list(args)
+ sys.path.insert(0, setup_dir)
+ # reset to include setup dir, w/clean callback list
+ working_set.__init__()
+ working_set.callbacks.append(lambda dist: dist.activate())
+
+ # __file__ should be a byte string on Python 2 (#712)
+ dunder_file = (
+ setup_script
+ if isinstance(setup_script, str) else
+ setup_script.encode(sys.getfilesystemencoding())
+ )
+
+ with DirectorySandbox(setup_dir):
+ ns = dict(__file__=dunder_file, __name__='__main__')
+ _execfile(setup_script, ns)
+ except SystemExit as v:
+ if v.args and v.args[0]:
+ raise
+ # Normal exit, just return
+
+
+class AbstractSandbox:
+ """Wrap 'os' module and 'open()' builtin for virtualizing setup scripts"""
+
+ _active = False
+
+ def __init__(self):
+ self._attrs = [
+ name for name in dir(_os)
+ if not name.startswith('_') and hasattr(self, name)
+ ]
+
+ def _copy(self, source):
+ for name in self._attrs:
+ setattr(os, name, getattr(source, name))
+
+ def __enter__(self):
+ self._copy(self)
+ if _file:
+ builtins.file = self._file
+ builtins.open = self._open
+ self._active = True
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ self._active = False
+ if _file:
+ builtins.file = _file
+ builtins.open = _open
+ self._copy(_os)
+
+ def run(self, func):
+ """Run 'func' under os sandboxing"""
+ with self:
+ return func()
+
+ def _mk_dual_path_wrapper(name):
+ original = getattr(_os, name)
+
+ def wrap(self, src, dst, *args, **kw):
+ if self._active:
+ src, dst = self._remap_pair(name, src, dst, *args, **kw)
+ return original(src, dst, *args, **kw)
+
+ return wrap
+
+ for name in ["rename", "link", "symlink"]:
+ if hasattr(_os, name):
+ locals()[name] = _mk_dual_path_wrapper(name)
+
+ def _mk_single_path_wrapper(name, original=None):
+ original = original or getattr(_os, name)
+
+ def wrap(self, path, *args, **kw):
+ if self._active:
+ path = self._remap_input(name, path, *args, **kw)
+ return original(path, *args, **kw)
+
+ return wrap
+
+ if _file:
+ _file = _mk_single_path_wrapper('file', _file)
+ _open = _mk_single_path_wrapper('open', _open)
+ for name in [
+ "stat", "listdir", "chdir", "open", "chmod", "chown", "mkdir",
+ "remove", "unlink", "rmdir", "utime", "lchown", "chroot", "lstat",
+ "startfile", "mkfifo", "mknod", "pathconf", "access"
+ ]:
+ if hasattr(_os, name):
+ locals()[name] = _mk_single_path_wrapper(name)
+
+ def _mk_single_with_return(name):
+ original = getattr(_os, name)
+
+ def wrap(self, path, *args, **kw):
+ if self._active:
+ path = self._remap_input(name, path, *args, **kw)
+ return self._remap_output(name, original(path, *args, **kw))
+ return original(path, *args, **kw)
+
+ return wrap
+
+ for name in ['readlink', 'tempnam']:
+ if hasattr(_os, name):
+ locals()[name] = _mk_single_with_return(name)
+
+ def _mk_query(name):
+ original = getattr(_os, name)
+
+ def wrap(self, *args, **kw):
+ retval = original(*args, **kw)
+ if self._active:
+ return self._remap_output(name, retval)
+ return retval
+
+ return wrap
+
+ for name in ['getcwd', 'tmpnam']:
+ if hasattr(_os, name):
+ locals()[name] = _mk_query(name)
+
+ def _validate_path(self, path):
+ """Called to remap or validate any path, whether input or output"""
+ return path
+
+ def _remap_input(self, operation, path, *args, **kw):
+ """Called for path inputs"""
+ return self._validate_path(path)
+
+ def _remap_output(self, operation, path):
+ """Called for path outputs"""
+ return self._validate_path(path)
+
+ def _remap_pair(self, operation, src, dst, *args, **kw):
+ """Called for path pairs like rename, link, and symlink operations"""
+ return (
+ self._remap_input(operation + '-from', src, *args, **kw),
+ self._remap_input(operation + '-to', dst, *args, **kw)
+ )
+
+
+if hasattr(os, 'devnull'):
+ _EXCEPTIONS = [os.devnull,]
+else:
+ _EXCEPTIONS = []
+
+
+class DirectorySandbox(AbstractSandbox):
+ """Restrict operations to a single subdirectory - pseudo-chroot"""
+
+ write_ops = dict.fromkeys([
+ "open", "chmod", "chown", "mkdir", "remove", "unlink", "rmdir",
+ "utime", "lchown", "chroot", "mkfifo", "mknod", "tempnam",
+ ])
+
+ _exception_patterns = [
+ # Allow lib2to3 to attempt to save a pickled grammar object (#121)
+ r'.*lib2to3.*\.pickle$',
+ ]
+ "exempt writing to paths that match the pattern"
+
+ def __init__(self, sandbox, exceptions=_EXCEPTIONS):
+ self._sandbox = os.path.normcase(os.path.realpath(sandbox))
+ self._prefix = os.path.join(self._sandbox, '')
+ self._exceptions = [
+ os.path.normcase(os.path.realpath(path))
+ for path in exceptions
+ ]
+ AbstractSandbox.__init__(self)
+
+ def _violation(self, operation, *args, **kw):
+ from setuptools.sandbox import SandboxViolation
+ raise SandboxViolation(operation, args, kw)
+
+ if _file:
+
+ def _file(self, path, mode='r', *args, **kw):
+ if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path):
+ self._violation("file", path, mode, *args, **kw)
+ return _file(path, mode, *args, **kw)
+
+ def _open(self, path, mode='r', *args, **kw):
+ if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path):
+ self._violation("open", path, mode, *args, **kw)
+ return _open(path, mode, *args, **kw)
+
+ def tmpnam(self):
+ self._violation("tmpnam")
+
+ def _ok(self, path):
+ active = self._active
+ try:
+ self._active = False
+ realpath = os.path.normcase(os.path.realpath(path))
+ return (
+ self._exempted(realpath)
+ or realpath == self._sandbox
+ or realpath.startswith(self._prefix)
+ )
+ finally:
+ self._active = active
+
+ def _exempted(self, filepath):
+ start_matches = (
+ filepath.startswith(exception)
+ for exception in self._exceptions
+ )
+ pattern_matches = (
+ re.match(pattern, filepath)
+ for pattern in self._exception_patterns
+ )
+ candidates = itertools.chain(start_matches, pattern_matches)
+ return any(candidates)
+
+ def _remap_input(self, operation, path, *args, **kw):
+ """Called for path inputs"""
+ if operation in self.write_ops and not self._ok(path):
+ self._violation(operation, os.path.realpath(path), *args, **kw)
+ return path
+
+ def _remap_pair(self, operation, src, dst, *args, **kw):
+ """Called for path pairs like rename, link, and symlink operations"""
+ if not self._ok(src) or not self._ok(dst):
+ self._violation(operation, src, dst, *args, **kw)
+ return (src, dst)
+
+ def open(self, file, flags, mode=0o777, *args, **kw):
+ """Called for low-level os.open()"""
+ if flags & WRITE_FLAGS and not self._ok(file):
+ self._violation("os.open", file, flags, mode, *args, **kw)
+ return _os.open(file, flags, mode, *args, **kw)
+
+
+WRITE_FLAGS = functools.reduce(
+ operator.or_, [getattr(_os, a, 0) for a in
+ "O_WRONLY O_RDWR O_APPEND O_CREAT O_TRUNC O_TEMPORARY".split()]
+)
+
+
+class SandboxViolation(DistutilsError):
+ """A setup script attempted to modify the filesystem outside the sandbox"""
+
+ tmpl = textwrap.dedent("""
+ SandboxViolation: {cmd}{args!r} {kwargs}
+
+ The package setup script has attempted to modify files on your system
+ that are not within the EasyInstall build area, and has been aborted.
+
+ This package cannot be safely installed by EasyInstall, and may not
+ support alternate installation locations even if you run its setup
+ script by hand. Please inform the package's author and the EasyInstall
+ maintainers to find out if a fix or workaround is available.
+ """).lstrip()
+
+ def __str__(self):
+ cmd, args, kwargs = self.args
+ return self.tmpl.format(**locals())
diff --git a/setuptools/script (dev).tmpl b/setuptools/script (dev).tmpl
new file mode 100644
index 0000000..d58b1bb
--- /dev/null
+++ b/setuptools/script (dev).tmpl
@@ -0,0 +1,5 @@
+# EASY-INSTALL-DEV-SCRIPT: %(spec)r,%(script_name)r
+__requires__ = %(spec)r
+__import__('pkg_resources').require(%(spec)r)
+__file__ = %(dev_path)r
+exec(compile(open(__file__).read(), __file__, 'exec'))
diff --git a/setuptools/script.tmpl b/setuptools/script.tmpl
new file mode 100644
index 0000000..ff5efbc
--- /dev/null
+++ b/setuptools/script.tmpl
@@ -0,0 +1,3 @@
+# EASY-INSTALL-SCRIPT: %(spec)r,%(script_name)r
+__requires__ = %(spec)r
+__import__('pkg_resources').run_script(%(spec)r, %(script_name)r)
diff --git a/setuptools/site-patch.py b/setuptools/site-patch.py
new file mode 100644
index 0000000..0d2d2ff
--- /dev/null
+++ b/setuptools/site-patch.py
@@ -0,0 +1,74 @@
+def __boot():
+ import sys
+ import os
+ PYTHONPATH = os.environ.get('PYTHONPATH')
+ if PYTHONPATH is None or (sys.platform == 'win32' and not PYTHONPATH):
+ PYTHONPATH = []
+ else:
+ PYTHONPATH = PYTHONPATH.split(os.pathsep)
+
+ pic = getattr(sys, 'path_importer_cache', {})
+ stdpath = sys.path[len(PYTHONPATH):]
+ mydir = os.path.dirname(__file__)
+
+ for item in stdpath:
+ if item == mydir or not item:
+ continue # skip if current dir. on Windows, or my own directory
+ importer = pic.get(item)
+ if importer is not None:
+ loader = importer.find_module('site')
+ if loader is not None:
+ # This should actually reload the current module
+ loader.load_module('site')
+ break
+ else:
+ try:
+ import imp # Avoid import loop in Python >= 3.3
+ stream, path, descr = imp.find_module('site', [item])
+ except ImportError:
+ continue
+ if stream is None:
+ continue
+ try:
+ # This should actually reload the current module
+ imp.load_module('site', stream, path, descr)
+ finally:
+ stream.close()
+ break
+ else:
+ raise ImportError("Couldn't find the real 'site' module")
+
+ known_paths = dict([(makepath(item)[1], 1) for item in sys.path]) # 2.2 comp
+
+ oldpos = getattr(sys, '__egginsert', 0) # save old insertion position
+ sys.__egginsert = 0 # and reset the current one
+
+ for item in PYTHONPATH:
+ addsitedir(item)
+
+ sys.__egginsert += oldpos # restore effective old position
+
+ d, nd = makepath(stdpath[0])
+ insert_at = None
+ new_path = []
+
+ for item in sys.path:
+ p, np = makepath(item)
+
+ if np == nd and insert_at is None:
+ # We've hit the first 'system' path entry, so added entries go here
+ insert_at = len(new_path)
+
+ if np in known_paths or insert_at is None:
+ new_path.append(item)
+ else:
+ # new path after the insert point, back-insert it
+ new_path.insert(insert_at, item)
+ insert_at += 1
+
+ sys.path[:] = new_path
+
+
+if __name__ == 'site':
+ __boot()
+ del __boot
diff --git a/setuptools/ssl_support.py b/setuptools/ssl_support.py
new file mode 100644
index 0000000..6362f1f
--- /dev/null
+++ b/setuptools/ssl_support.py
@@ -0,0 +1,260 @@
+import os
+import socket
+import atexit
+import re
+import functools
+
+from setuptools.extern.six.moves import urllib, http_client, map, filter
+
+from pkg_resources import ResolutionError, ExtractionError
+
+try:
+ import ssl
+except ImportError:
+ ssl = None
+
+__all__ = [
+ 'VerifyingHTTPSHandler', 'find_ca_bundle', 'is_available', 'cert_paths',
+ 'opener_for'
+]
+
+cert_paths = """
+/etc/pki/tls/certs/ca-bundle.crt
+/etc/ssl/certs/ca-certificates.crt
+/usr/share/ssl/certs/ca-bundle.crt
+/usr/local/share/certs/ca-root.crt
+/etc/ssl/cert.pem
+/System/Library/OpenSSL/certs/cert.pem
+/usr/local/share/certs/ca-root-nss.crt
+/etc/ssl/ca-bundle.pem
+""".strip().split()
+
+try:
+ HTTPSHandler = urllib.request.HTTPSHandler
+ HTTPSConnection = http_client.HTTPSConnection
+except AttributeError:
+ HTTPSHandler = HTTPSConnection = object
+
+is_available = ssl is not None and object not in (HTTPSHandler, HTTPSConnection)
+
+
+try:
+ from ssl import CertificateError, match_hostname
+except ImportError:
+ try:
+ from backports.ssl_match_hostname import CertificateError
+ from backports.ssl_match_hostname import match_hostname
+ except ImportError:
+ CertificateError = None
+ match_hostname = None
+
+if not CertificateError:
+
+ class CertificateError(ValueError):
+ pass
+
+
+if not match_hostname:
+
+ def _dnsname_match(dn, hostname, max_wildcards=1):
+ """Matching according to RFC 6125, section 6.4.3
+
+ http://tools.ietf.org/html/rfc6125#section-6.4.3
+ """
+ pats = []
+ if not dn:
+ return False
+
+ # Ported from python3-syntax:
+ # leftmost, *remainder = dn.split(r'.')
+ parts = dn.split(r'.')
+ leftmost = parts[0]
+ remainder = parts[1:]
+
+ wildcards = leftmost.count('*')
+ if wildcards > max_wildcards:
+ # Issue #17980: avoid denials of service by refusing more
+ # than one wildcard per fragment. A survey of established
+ # policy among SSL implementations showed it to be a
+ # reasonable choice.
+ raise CertificateError(
+ "too many wildcards in certificate DNS name: " + repr(dn))
+
+ # speed up common case w/o wildcards
+ if not wildcards:
+ return dn.lower() == hostname.lower()
+
+ # RFC 6125, section 6.4.3, subitem 1.
+ # The client SHOULD NOT attempt to match a presented identifier in which
+ # the wildcard character comprises a label other than the left-most label.
+ if leftmost == '*':
+ # When '*' is a fragment by itself, it matches a non-empty dotless
+ # fragment.
+ pats.append('[^.]+')
+ elif leftmost.startswith('xn--') or hostname.startswith('xn--'):
+ # RFC 6125, section 6.4.3, subitem 3.
+ # The client SHOULD NOT attempt to match a presented identifier
+ # where the wildcard character is embedded within an A-label or
+ # U-label of an internationalized domain name.
+ pats.append(re.escape(leftmost))
+ else:
+ # Otherwise, '*' matches any dotless string, e.g. www*
+ pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))
+
+ # add the remaining fragments, ignore any wildcards
+ for frag in remainder:
+ pats.append(re.escape(frag))
+
+ pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
+ return pat.match(hostname)
+
+ def match_hostname(cert, hostname):
+ """Verify that *cert* (in decoded format as returned by
+ SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
+ rules are followed, but IP addresses are not accepted for *hostname*.
+
+ CertificateError is raised on failure. On success, the function
+ returns nothing.
+ """
+ if not cert:
+ raise ValueError("empty or no certificate")
+ dnsnames = []
+ san = cert.get('subjectAltName', ())
+ for key, value in san:
+ if key == 'DNS':
+ if _dnsname_match(value, hostname):
+ return
+ dnsnames.append(value)
+ if not dnsnames:
+ # The subject is only checked when there is no dNSName entry
+ # in subjectAltName
+ for sub in cert.get('subject', ()):
+ for key, value in sub:
+ # XXX according to RFC 2818, the most specific Common Name
+ # must be used.
+ if key == 'commonName':
+ if _dnsname_match(value, hostname):
+ return
+ dnsnames.append(value)
+ if len(dnsnames) > 1:
+ raise CertificateError("hostname %r "
+ "doesn't match either of %s"
+ % (hostname, ', '.join(map(repr, dnsnames))))
+ elif len(dnsnames) == 1:
+ raise CertificateError("hostname %r "
+ "doesn't match %r"
+ % (hostname, dnsnames[0]))
+ else:
+ raise CertificateError("no appropriate commonName or "
+ "subjectAltName fields were found")
+
+
+class VerifyingHTTPSHandler(HTTPSHandler):
+ """Simple verifying handler: no auth, subclasses, timeouts, etc."""
+
+ def __init__(self, ca_bundle):
+ self.ca_bundle = ca_bundle
+ HTTPSHandler.__init__(self)
+
+ def https_open(self, req):
+ return self.do_open(
+ lambda host, **kw: VerifyingHTTPSConn(host, self.ca_bundle, **kw), req
+ )
+
+
+class VerifyingHTTPSConn(HTTPSConnection):
+ """Simple verifying connection: no auth, subclasses, timeouts, etc."""
+
+ def __init__(self, host, ca_bundle, **kw):
+ HTTPSConnection.__init__(self, host, **kw)
+ self.ca_bundle = ca_bundle
+
+ def connect(self):
+ sock = socket.create_connection(
+ (self.host, self.port), getattr(self, 'source_address', None)
+ )
+
+ # Handle the socket if a (proxy) tunnel is present
+ if hasattr(self, '_tunnel') and getattr(self, '_tunnel_host', None):
+ self.sock = sock
+ self._tunnel()
+ # http://bugs.python.org/issue7776: Python>=3.4.1 and >=2.7.7
+ # change self.host to mean the proxy server host when tunneling is
+ # being used. Adapt, since we are interested in the destination
+ # host for the match_hostname() comparison.
+ actual_host = self._tunnel_host
+ else:
+ actual_host = self.host
+
+ if hasattr(ssl, 'create_default_context'):
+ ctx = ssl.create_default_context(cafile=self.ca_bundle)
+ self.sock = ctx.wrap_socket(sock, server_hostname=actual_host)
+ else:
+ # This is for python < 2.7.9 and < 3.4?
+ self.sock = ssl.wrap_socket(
+ sock, cert_reqs=ssl.CERT_REQUIRED, ca_certs=self.ca_bundle
+ )
+ try:
+ match_hostname(self.sock.getpeercert(), actual_host)
+ except CertificateError:
+ self.sock.shutdown(socket.SHUT_RDWR)
+ self.sock.close()
+ raise
+
+
+def opener_for(ca_bundle=None):
+ """Get a urlopen() replacement that uses ca_bundle for verification"""
+ return urllib.request.build_opener(
+ VerifyingHTTPSHandler(ca_bundle or find_ca_bundle())
+ ).open
+
+
+# from jaraco.functools
+def once(func):
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ if not hasattr(func, 'always_returns'):
+ func.always_returns = func(*args, **kwargs)
+ return func.always_returns
+ return wrapper
+
+
+@once
+def get_win_certfile():
+ try:
+ import wincertstore
+ except ImportError:
+ return None
+
+ class CertFile(wincertstore.CertFile):
+ def __init__(self):
+ super(CertFile, self).__init__()
+ atexit.register(self.close)
+
+ def close(self):
+ try:
+ super(CertFile, self).close()
+ except OSError:
+ pass
+
+ _wincerts = CertFile()
+ _wincerts.addstore('CA')
+ _wincerts.addstore('ROOT')
+ return _wincerts.name
+
+
+def find_ca_bundle():
+ """Return an existing CA bundle path, or None"""
+ extant_cert_paths = filter(os.path.isfile, cert_paths)
+ return (
+ get_win_certfile()
+ or next(extant_cert_paths, None)
+ or _certifi_where()
+ )
+
+
+def _certifi_where():
+ try:
+ return __import__('certifi').where()
+ except (ImportError, ResolutionError, ExtractionError):
+ pass
diff --git a/setuptools/tests/__init__.py b/setuptools/tests/__init__.py
new file mode 100644
index 0000000..54dd7d2
--- /dev/null
+++ b/setuptools/tests/__init__.py
@@ -0,0 +1,6 @@
+import locale
+
+import pytest
+
+is_ascii = locale.getpreferredencoding() == 'ANSI_X3.4-1968'
+fail_on_ascii = pytest.mark.xfail(is_ascii, reason="Test fails in this locale")
diff --git a/setuptools/tests/contexts.py b/setuptools/tests/contexts.py
new file mode 100644
index 0000000..535ae10
--- /dev/null
+++ b/setuptools/tests/contexts.py
@@ -0,0 +1,98 @@
+import tempfile
+import os
+import shutil
+import sys
+import contextlib
+import site
+
+from setuptools.extern import six
+import pkg_resources
+
+
+@contextlib.contextmanager
+def tempdir(cd=lambda dir: None, **kwargs):
+ temp_dir = tempfile.mkdtemp(**kwargs)
+ orig_dir = os.getcwd()
+ try:
+ cd(temp_dir)
+ yield temp_dir
+ finally:
+ cd(orig_dir)
+ shutil.rmtree(temp_dir)
+
+
+@contextlib.contextmanager
+def environment(**replacements):
+ """
+ In a context, patch the environment with replacements. Pass None values
+ to clear the values.
+ """
+ saved = dict(
+ (key, os.environ[key])
+ for key in replacements
+ if key in os.environ
+ )
+
+ # remove values that are null
+ remove = (key for (key, value) in replacements.items() if value is None)
+ for key in list(remove):
+ os.environ.pop(key, None)
+ replacements.pop(key)
+
+ os.environ.update(replacements)
+
+ try:
+ yield saved
+ finally:
+ for key in replacements:
+ os.environ.pop(key, None)
+ os.environ.update(saved)
+
+
+@contextlib.contextmanager
+def quiet():
+ """
+ Redirect stdout/stderr to StringIO objects to prevent console output from
+ distutils commands.
+ """
+
+ old_stdout = sys.stdout
+ old_stderr = sys.stderr
+ new_stdout = sys.stdout = six.StringIO()
+ new_stderr = sys.stderr = six.StringIO()
+ try:
+ yield new_stdout, new_stderr
+ finally:
+ new_stdout.seek(0)
+ new_stderr.seek(0)
+ sys.stdout = old_stdout
+ sys.stderr = old_stderr
+
+
+@contextlib.contextmanager
+def save_user_site_setting():
+ saved = site.ENABLE_USER_SITE
+ try:
+ yield saved
+ finally:
+ site.ENABLE_USER_SITE = saved
+
+
+@contextlib.contextmanager
+def save_pkg_resources_state():
+ pr_state = pkg_resources.__getstate__()
+ # also save sys.path
+ sys_path = sys.path[:]
+ try:
+ yield pr_state, sys_path
+ finally:
+ sys.path[:] = sys_path
+ pkg_resources.__setstate__(pr_state)
+
+
+@contextlib.contextmanager
+def suppress_exceptions(*excs):
+ try:
+ yield
+ except excs:
+ pass
diff --git a/setuptools/tests/environment.py b/setuptools/tests/environment.py
new file mode 100644
index 0000000..c67898c
--- /dev/null
+++ b/setuptools/tests/environment.py
@@ -0,0 +1,60 @@
+import os
+import sys
+import unicodedata
+
+from subprocess import Popen as _Popen, PIPE as _PIPE
+
+
+def _which_dirs(cmd):
+ result = set()
+ for path in os.environ.get('PATH', '').split(os.pathsep):
+ filename = os.path.join(path, cmd)
+ if os.access(filename, os.X_OK):
+ result.add(path)
+ return result
+
+
+def run_setup_py(cmd, pypath=None, path=None,
+ data_stream=0, env=None):
+ """
+ Execution command for tests, separate from those used by the
+ code directly to prevent accidental behavior issues
+ """
+ if env is None:
+ env = dict()
+ for envname in os.environ:
+ env[envname] = os.environ[envname]
+
+ # override the python path if needed
+ if pypath is not None:
+ env["PYTHONPATH"] = pypath
+
+ # overide the execution path if needed
+ if path is not None:
+ env["PATH"] = path
+ if not env.get("PATH", ""):
+ env["PATH"] = _which_dirs("tar").union(_which_dirs("gzip"))
+ env["PATH"] = os.pathsep.join(env["PATH"])
+
+ cmd = [sys.executable, "setup.py"] + list(cmd)
+
+ # http://bugs.python.org/issue8557
+ shell = sys.platform == 'win32'
+
+ try:
+ proc = _Popen(
+ cmd, stdout=_PIPE, stderr=_PIPE, shell=shell, env=env,
+ )
+
+ data = proc.communicate()[data_stream]
+ except OSError:
+ return 1, ''
+
+ # decode the console string if needed
+ if hasattr(data, "decode"):
+ # use the default encoding
+ data = data.decode()
+ data = unicodedata.normalize('NFC', data)
+
+ # communicate calls wait()
+ return proc.returncode, data
diff --git a/setuptools/tests/files.py b/setuptools/tests/files.py
new file mode 100644
index 0000000..f5f0e6b
--- /dev/null
+++ b/setuptools/tests/files.py
@@ -0,0 +1,39 @@
+import os
+
+
+from setuptools.extern.six import binary_type
+import pkg_resources.py31compat
+
+
+def build_files(file_defs, prefix=""):
+ """
+ Build a set of files/directories, as described by the file_defs dictionary.
+
+ Each key/value pair in the dictionary is interpreted as a filename/contents
+ pair. If the contents value is a dictionary, a directory is created, and the
+ dictionary interpreted as the files within it, recursively.
+
+ For example:
+
+ {"README.txt": "A README file",
+ "foo": {
+ "__init__.py": "",
+ "bar": {
+ "__init__.py": "",
+ },
+ "baz.py": "# Some code",
+ }
+ }
+ """
+ for name, contents in file_defs.items():
+ full_name = os.path.join(prefix, name)
+ if isinstance(contents, dict):
+ pkg_resources.py31compat.makedirs(full_name, exist_ok=True)
+ build_files(contents, prefix=full_name)
+ else:
+ if isinstance(contents, binary_type):
+ with open(full_name, 'wb') as f:
+ f.write(contents)
+ else:
+ with open(full_name, 'w') as f:
+ f.write(contents)
diff --git a/setuptools/tests/fixtures.py b/setuptools/tests/fixtures.py
new file mode 100644
index 0000000..5204c8d
--- /dev/null
+++ b/setuptools/tests/fixtures.py
@@ -0,0 +1,23 @@
+import pytest
+
+from . import contexts
+
+
+@pytest.yield_fixture
+def user_override(monkeypatch):
+ """
+ Override site.USER_BASE and site.USER_SITE with temporary directories in
+ a context.
+ """
+ with contexts.tempdir() as user_base:
+ monkeypatch.setattr('site.USER_BASE', user_base)
+ with contexts.tempdir() as user_site:
+ monkeypatch.setattr('site.USER_SITE', user_site)
+ with contexts.save_user_site_setting():
+ yield
+
+
+@pytest.yield_fixture
+def tmpdir_cwd(tmpdir):
+ with tmpdir.as_cwd() as orig:
+ yield orig
diff --git a/setuptools/tests/indexes/test_links_priority/external.html b/setuptools/tests/indexes/test_links_priority/external.html
new file mode 100644
index 0000000..92e4702
--- /dev/null
+++ b/setuptools/tests/indexes/test_links_priority/external.html
@@ -0,0 +1,3 @@
+<html><body>
+<a href="/foobar-0.1.tar.gz#md5=1__bad_md5___">bad old link</a>
+</body></html>
diff --git a/setuptools/tests/indexes/test_links_priority/simple/foobar/index.html b/setuptools/tests/indexes/test_links_priority/simple/foobar/index.html
new file mode 100644
index 0000000..fefb028
--- /dev/null
+++ b/setuptools/tests/indexes/test_links_priority/simple/foobar/index.html
@@ -0,0 +1,4 @@
+<html><body>
+<a href="/foobar-0.1.tar.gz#md5=0_correct_md5">foobar-0.1.tar.gz</a><br/>
+<a href="../../external.html" rel="homepage">external homepage</a><br/>
+</body></html>
diff --git a/setuptools/tests/mod_with_constant.py b/setuptools/tests/mod_with_constant.py
new file mode 100644
index 0000000..ef755dd
--- /dev/null
+++ b/setuptools/tests/mod_with_constant.py
@@ -0,0 +1 @@
+value = 'three, sir!'
diff --git a/setuptools/tests/namespaces.py b/setuptools/tests/namespaces.py
new file mode 100644
index 0000000..ef5ecda
--- /dev/null
+++ b/setuptools/tests/namespaces.py
@@ -0,0 +1,42 @@
+from __future__ import absolute_import, unicode_literals
+
+import textwrap
+
+
+def build_namespace_package(tmpdir, name):
+ src_dir = tmpdir / name
+ src_dir.mkdir()
+ setup_py = src_dir / 'setup.py'
+ namespace, sep, rest = name.partition('.')
+ script = textwrap.dedent("""
+ import setuptools
+ setuptools.setup(
+ name={name!r},
+ version="1.0",
+ namespace_packages=[{namespace!r}],
+ packages=[{namespace!r}],
+ )
+ """).format(**locals())
+ setup_py.write_text(script, encoding='utf-8')
+ ns_pkg_dir = src_dir / namespace
+ ns_pkg_dir.mkdir()
+ pkg_init = ns_pkg_dir / '__init__.py'
+ tmpl = '__import__("pkg_resources").declare_namespace({namespace!r})'
+ decl = tmpl.format(**locals())
+ pkg_init.write_text(decl, encoding='utf-8')
+ pkg_mod = ns_pkg_dir / (rest + '.py')
+ some_functionality = 'name = {rest!r}'.format(**locals())
+ pkg_mod.write_text(some_functionality, encoding='utf-8')
+ return src_dir
+
+
+def make_site_dir(target):
+ """
+ Add a sitecustomize.py module in target to cause
+ target to be added to site dirs such that .pth files
+ are processed there.
+ """
+ sc = target / 'sitecustomize.py'
+ target_str = str(target)
+ tmpl = '__import__("site").addsitedir({target_str!r})'
+ sc.write_text(tmpl.format(**locals()), encoding='utf-8')
diff --git a/setuptools/tests/script-with-bom.py b/setuptools/tests/script-with-bom.py
new file mode 100644
index 0000000..22dee0d
--- /dev/null
+++ b/setuptools/tests/script-with-bom.py
@@ -0,0 +1,3 @@
+# -*- coding: utf-8 -*-
+
+result = 'passed'
diff --git a/setuptools/tests/server.py b/setuptools/tests/server.py
new file mode 100644
index 0000000..3531212
--- /dev/null
+++ b/setuptools/tests/server.py
@@ -0,0 +1,72 @@
+"""Basic http server for tests to simulate PyPI or custom indexes
+"""
+
+import time
+import threading
+
+from setuptools.extern.six.moves import BaseHTTPServer, SimpleHTTPServer
+
+
+class IndexServer(BaseHTTPServer.HTTPServer):
+ """Basic single-threaded http server simulating a package index
+
+ You can use this server in unittest like this::
+ s = IndexServer()
+ s.start()
+ index_url = s.base_url() + 'mytestindex'
+ # do some test requests to the index
+ # The index files should be located in setuptools/tests/indexes
+ s.stop()
+ """
+
+ def __init__(self, server_address=('', 0),
+ RequestHandlerClass=SimpleHTTPServer.SimpleHTTPRequestHandler):
+ BaseHTTPServer.HTTPServer.__init__(self, server_address,
+ RequestHandlerClass)
+ self._run = True
+
+ def start(self):
+ self.thread = threading.Thread(target=self.serve_forever)
+ self.thread.start()
+
+ def stop(self):
+ "Stop the server"
+
+ # Let the server finish the last request and wait for a new one.
+ time.sleep(0.1)
+
+ self.shutdown()
+ self.thread.join()
+ self.socket.close()
+
+ def base_url(self):
+ port = self.server_port
+ return 'http://127.0.0.1:%s/setuptools/tests/indexes/' % port
+
+
+class RequestRecorder(BaseHTTPServer.BaseHTTPRequestHandler):
+ def do_GET(self):
+ requests = vars(self.server).setdefault('requests', [])
+ requests.append(self)
+ self.send_response(200, 'OK')
+
+
+class MockServer(BaseHTTPServer.HTTPServer, threading.Thread):
+ """
+ A simple HTTP Server that records the requests made to it.
+ """
+
+ def __init__(self, server_address=('', 0),
+ RequestHandlerClass=RequestRecorder):
+ BaseHTTPServer.HTTPServer.__init__(self, server_address,
+ RequestHandlerClass)
+ threading.Thread.__init__(self)
+ self.setDaemon(True)
+ self.requests = []
+
+ def run(self):
+ self.serve_forever()
+
+ @property
+ def url(self):
+ return 'http://localhost:%(server_port)s/' % vars(self)
diff --git a/setuptools/tests/test_archive_util.py b/setuptools/tests/test_archive_util.py
new file mode 100644
index 0000000..b789e9a
--- /dev/null
+++ b/setuptools/tests/test_archive_util.py
@@ -0,0 +1,42 @@
+# coding: utf-8
+
+import tarfile
+import io
+
+from setuptools.extern import six
+
+import pytest
+
+from setuptools import archive_util
+
+
+@pytest.fixture
+def tarfile_with_unicode(tmpdir):
+ """
+ Create a tarfile containing only a file whose name is
+ a zero byte file called testimäge.png.
+ """
+ tarobj = io.BytesIO()
+
+ with tarfile.open(fileobj=tarobj, mode="w:gz") as tgz:
+ data = b""
+
+ filename = "testimäge.png"
+ if six.PY2:
+ filename = filename.decode('utf-8')
+
+ t = tarfile.TarInfo(filename)
+ t.size = len(data)
+
+ tgz.addfile(t, io.BytesIO(data))
+
+ target = tmpdir / 'unicode-pkg-1.0.tar.gz'
+ with open(str(target), mode='wb') as tf:
+ tf.write(tarobj.getvalue())
+ return str(target)
+
+
+@pytest.mark.xfail(reason="#710 and #712")
+def test_unicode_files(tarfile_with_unicode, tmpdir):
+ target = tmpdir / 'out'
+ archive_util.unpack_archive(tarfile_with_unicode, six.text_type(target))
diff --git a/setuptools/tests/test_bdist_egg.py b/setuptools/tests/test_bdist_egg.py
new file mode 100644
index 0000000..54742aa
--- /dev/null
+++ b/setuptools/tests/test_bdist_egg.py
@@ -0,0 +1,66 @@
+"""develop tests
+"""
+import os
+import re
+import zipfile
+
+import pytest
+
+from setuptools.dist import Distribution
+
+from . import contexts
+
+SETUP_PY = """\
+from setuptools import setup
+
+setup(name='foo', py_modules=['hi'])
+"""
+
+
+@pytest.fixture(scope='function')
+def setup_context(tmpdir):
+ with (tmpdir / 'setup.py').open('w') as f:
+ f.write(SETUP_PY)
+ with (tmpdir / 'hi.py').open('w') as f:
+ f.write('1\n')
+ with tmpdir.as_cwd():
+ yield tmpdir
+
+
+class Test:
+ def test_bdist_egg(self, setup_context, user_override):
+ dist = Distribution(dict(
+ script_name='setup.py',
+ script_args=['bdist_egg'],
+ name='foo',
+ py_modules=['hi'],
+ ))
+ os.makedirs(os.path.join('build', 'src'))
+ with contexts.quiet():
+ dist.parse_command_line()
+ dist.run_commands()
+
+ # let's see if we got our egg link at the right place
+ [content] = os.listdir('dist')
+ assert re.match(r'foo-0.0.0-py[23].\d.egg$', content)
+
+ @pytest.mark.xfail(
+ os.environ.get('PYTHONDONTWRITEBYTECODE'),
+ reason="Byte code disabled",
+ )
+ def test_exclude_source_files(self, setup_context, user_override):
+ dist = Distribution(dict(
+ script_name='setup.py',
+ script_args=['bdist_egg', '--exclude-source-files'],
+ name='foo',
+ py_modules=['hi'],
+ ))
+ with contexts.quiet():
+ dist.parse_command_line()
+ dist.run_commands()
+ [dist_name] = os.listdir('dist')
+ dist_filename = os.path.join('dist', dist_name)
+ zip = zipfile.ZipFile(dist_filename)
+ names = list(zi.filename for zi in zip.filelist)
+ assert 'hi.pyc' in names
+ assert 'hi.py' not in names
diff --git a/setuptools/tests/test_build_clib.py b/setuptools/tests/test_build_clib.py
new file mode 100644
index 0000000..aebcc35
--- /dev/null
+++ b/setuptools/tests/test_build_clib.py
@@ -0,0 +1,59 @@
+import pytest
+import os
+import shutil
+
+import mock
+from distutils.errors import DistutilsSetupError
+from setuptools.command.build_clib import build_clib
+from setuptools.dist import Distribution
+
+
+class TestBuildCLib:
+ @mock.patch(
+ 'setuptools.command.build_clib.newer_pairwise_group'
+ )
+ def test_build_libraries(self, mock_newer):
+ dist = Distribution()
+ cmd = build_clib(dist)
+
+ # this will be a long section, just making sure all
+ # exceptions are properly raised
+ libs = [('example', {'sources': 'broken.c'})]
+ with pytest.raises(DistutilsSetupError):
+ cmd.build_libraries(libs)
+
+ obj_deps = 'some_string'
+ libs = [('example', {'sources': ['source.c'], 'obj_deps': obj_deps})]
+ with pytest.raises(DistutilsSetupError):
+ cmd.build_libraries(libs)
+
+ obj_deps = {'': ''}
+ libs = [('example', {'sources': ['source.c'], 'obj_deps': obj_deps})]
+ with pytest.raises(DistutilsSetupError):
+ cmd.build_libraries(libs)
+
+ obj_deps = {'source.c': ''}
+ libs = [('example', {'sources': ['source.c'], 'obj_deps': obj_deps})]
+ with pytest.raises(DistutilsSetupError):
+ cmd.build_libraries(libs)
+
+ # with that out of the way, let's see if the crude dependency
+ # system works
+ cmd.compiler = mock.MagicMock(spec=cmd.compiler)
+ mock_newer.return_value = ([],[])
+
+ obj_deps = {'': ('global.h',), 'example.c': ('example.h',)}
+ libs = [('example', {'sources': ['example.c'] ,'obj_deps': obj_deps})]
+
+ cmd.build_libraries(libs)
+ assert [['example.c', 'global.h', 'example.h']] in mock_newer.call_args[0]
+ assert not cmd.compiler.compile.called
+ assert cmd.compiler.create_static_lib.call_count == 1
+
+ # reset the call numbers so we can test again
+ cmd.compiler.reset_mock()
+
+ mock_newer.return_value = '' # anything as long as it's not ([],[])
+ cmd.build_libraries(libs)
+ assert cmd.compiler.compile.call_count == 1
+ assert cmd.compiler.create_static_lib.call_count == 1
diff --git a/setuptools/tests/test_build_ext.py b/setuptools/tests/test_build_ext.py
new file mode 100644
index 0000000..6025715
--- /dev/null
+++ b/setuptools/tests/test_build_ext.py
@@ -0,0 +1,45 @@
+import sys
+import distutils.command.build_ext as orig
+from distutils.sysconfig import get_config_var
+
+from setuptools.extern import six
+
+from setuptools.command.build_ext import build_ext, get_abi3_suffix
+from setuptools.dist import Distribution
+from setuptools.extension import Extension
+
+
+class TestBuildExt:
+ def test_get_ext_filename(self):
+ """
+ Setuptools needs to give back the same
+ result as distutils, even if the fullname
+ is not in ext_map.
+ """
+ dist = Distribution()
+ cmd = build_ext(dist)
+ cmd.ext_map['foo/bar'] = ''
+ res = cmd.get_ext_filename('foo')
+ wanted = orig.build_ext.get_ext_filename(cmd, 'foo')
+ assert res == wanted
+
+ def test_abi3_filename(self):
+ """
+ Filename needs to be loadable by several versions
+ of Python 3 if 'is_abi3' is truthy on Extension()
+ """
+ print(get_abi3_suffix())
+
+ extension = Extension('spam.eggs', ['eggs.c'], py_limited_api=True)
+ dist = Distribution(dict(ext_modules=[extension]))
+ cmd = build_ext(dist)
+ cmd.finalize_options()
+ assert 'spam.eggs' in cmd.ext_map
+ res = cmd.get_ext_filename('spam.eggs')
+
+ if six.PY2 or not get_abi3_suffix():
+ assert res.endswith(get_config_var('SO'))
+ elif sys.platform == 'win32':
+ assert res.endswith('eggs.pyd')
+ else:
+ assert 'abi3' in res
diff --git a/setuptools/tests/test_build_meta.py b/setuptools/tests/test_build_meta.py
new file mode 100644
index 0000000..659c1a6
--- /dev/null
+++ b/setuptools/tests/test_build_meta.py
@@ -0,0 +1,126 @@
+import os
+
+import pytest
+
+from .files import build_files
+from .textwrap import DALS
+
+
+futures = pytest.importorskip('concurrent.futures')
+importlib = pytest.importorskip('importlib')
+
+
+class BuildBackendBase(object):
+ def __init__(self, cwd=None, env={}, backend_name='setuptools.build_meta'):
+ self.cwd = cwd
+ self.env = env
+ self.backend_name = backend_name
+
+
+class BuildBackend(BuildBackendBase):
+ """PEP 517 Build Backend"""
+ def __init__(self, *args, **kwargs):
+ super(BuildBackend, self).__init__(*args, **kwargs)
+ self.pool = futures.ProcessPoolExecutor()
+
+ def __getattr__(self, name):
+ """Handles aribrary function invocations on the build backend."""
+ def method(*args, **kw):
+ root = os.path.abspath(self.cwd)
+ caller = BuildBackendCaller(root, self.env, self.backend_name)
+ return self.pool.submit(caller, name, *args, **kw).result()
+
+ return method
+
+
+class BuildBackendCaller(BuildBackendBase):
+ def __call__(self, name, *args, **kw):
+ """Handles aribrary function invocations on the build backend."""
+ os.chdir(self.cwd)
+ os.environ.update(self.env)
+ mod = importlib.import_module(self.backend_name)
+ return getattr(mod, name)(*args, **kw)
+
+
+defns = [{
+ 'setup.py': DALS("""
+ __import__('setuptools').setup(
+ name='foo',
+ py_modules=['hello'],
+ setup_requires=['six'],
+ )
+ """),
+ 'hello.py': DALS("""
+ def run():
+ print('hello')
+ """),
+ },
+ {
+ 'setup.py': DALS("""
+ assert __name__ == '__main__'
+ __import__('setuptools').setup(
+ name='foo',
+ py_modules=['hello'],
+ setup_requires=['six'],
+ )
+ """),
+ 'hello.py': DALS("""
+ def run():
+ print('hello')
+ """),
+ },
+ {
+ 'setup.py': DALS("""
+ variable = True
+ def function():
+ return variable
+ assert variable
+ __import__('setuptools').setup(
+ name='foo',
+ py_modules=['hello'],
+ setup_requires=['six'],
+ )
+ """),
+ 'hello.py': DALS("""
+ def run():
+ print('hello')
+ """),
+ }]
+
+
+@pytest.fixture(params=defns)
+def build_backend(tmpdir, request):
+ build_files(request.param, prefix=str(tmpdir))
+ with tmpdir.as_cwd():
+ yield BuildBackend(cwd='.')
+
+
+def test_get_requires_for_build_wheel(build_backend):
+ actual = build_backend.get_requires_for_build_wheel()
+ expected = ['six', 'setuptools', 'wheel']
+ assert sorted(actual) == sorted(expected)
+
+
+def test_build_wheel(build_backend):
+ dist_dir = os.path.abspath('pip-wheel')
+ os.makedirs(dist_dir)
+ wheel_name = build_backend.build_wheel(dist_dir)
+
+ assert os.path.isfile(os.path.join(dist_dir, wheel_name))
+
+
+def test_build_sdist(build_backend):
+ dist_dir = os.path.abspath('pip-sdist')
+ os.makedirs(dist_dir)
+ sdist_name = build_backend.build_sdist(dist_dir)
+
+ assert os.path.isfile(os.path.join(dist_dir, sdist_name))
+
+
+def test_prepare_metadata_for_build_wheel(build_backend):
+ dist_dir = os.path.abspath('pip-dist-info')
+ os.makedirs(dist_dir)
+
+ dist_info = build_backend.prepare_metadata_for_build_wheel(dist_dir)
+
+ assert os.path.isfile(os.path.join(dist_dir, dist_info, 'METADATA'))
diff --git a/setuptools/tests/test_build_py.py b/setuptools/tests/test_build_py.py
new file mode 100644
index 0000000..cc701ae
--- /dev/null
+++ b/setuptools/tests/test_build_py.py
@@ -0,0 +1,30 @@
+import os
+
+import pytest
+
+from setuptools.dist import Distribution
+
+
+@pytest.yield_fixture
+def tmpdir_as_cwd(tmpdir):
+ with tmpdir.as_cwd():
+ yield tmpdir
+
+
+def test_directories_in_package_data_glob(tmpdir_as_cwd):
+ """
+ Directories matching the glob in package_data should
+ not be included in the package data.
+
+ Regression test for #261.
+ """
+ dist = Distribution(dict(
+ script_name='setup.py',
+ script_args=['build_py'],
+ packages=[''],
+ name='foo',
+ package_data={'': ['path/*']},
+ ))
+ os.makedirs('path/subpath')
+ dist.parse_command_line()
+ dist.run_commands()
diff --git a/setuptools/tests/test_config.py b/setuptools/tests/test_config.py
new file mode 100644
index 0000000..abb953a
--- /dev/null
+++ b/setuptools/tests/test_config.py
@@ -0,0 +1,583 @@
+import contextlib
+import pytest
+from distutils.errors import DistutilsOptionError, DistutilsFileError
+from setuptools.dist import Distribution
+from setuptools.config import ConfigHandler, read_configuration
+
+
+class ErrConfigHandler(ConfigHandler):
+ """Erroneous handler. Fails to implement required methods."""
+
+
+def make_package_dir(name, base_dir):
+ dir_package = base_dir.mkdir(name)
+ init_file = dir_package.join('__init__.py')
+ init_file.write('')
+ return dir_package, init_file
+
+
+def fake_env(tmpdir, setup_cfg, setup_py=None):
+
+ if setup_py is None:
+ setup_py = (
+ 'from setuptools import setup\n'
+ 'setup()\n'
+ )
+
+ tmpdir.join('setup.py').write(setup_py)
+ config = tmpdir.join('setup.cfg')
+ config.write(setup_cfg)
+
+ package_dir, init_file = make_package_dir('fake_package', tmpdir)
+
+ init_file.write(
+ 'VERSION = (1, 2, 3)\n'
+ '\n'
+ 'VERSION_MAJOR = 1'
+ '\n'
+ 'def get_version():\n'
+ ' return [3, 4, 5, "dev"]\n'
+ '\n'
+ )
+ return package_dir, config
+
+
+@contextlib.contextmanager
+def get_dist(tmpdir, kwargs_initial=None, parse=True):
+ kwargs_initial = kwargs_initial or {}
+
+ with tmpdir.as_cwd():
+ dist = Distribution(kwargs_initial)
+ dist.script_name = 'setup.py'
+ parse and dist.parse_config_files()
+
+ yield dist
+
+
+def test_parsers_implemented():
+
+ with pytest.raises(NotImplementedError):
+ handler = ErrConfigHandler(None, {})
+ handler.parsers
+
+
+class TestConfigurationReader:
+
+ def test_basic(self, tmpdir):
+ _, config = fake_env(
+ tmpdir,
+ '[metadata]\n'
+ 'version = 10.1.1\n'
+ 'keywords = one, two\n'
+ '\n'
+ '[options]\n'
+ 'scripts = bin/a.py, bin/b.py\n'
+ )
+ config_dict = read_configuration('%s' % config)
+ assert config_dict['metadata']['version'] == '10.1.1'
+ assert config_dict['metadata']['keywords'] == ['one', 'two']
+ assert config_dict['options']['scripts'] == ['bin/a.py', 'bin/b.py']
+
+ def test_no_config(self, tmpdir):
+ with pytest.raises(DistutilsFileError):
+ read_configuration('%s' % tmpdir.join('setup.cfg'))
+
+ def test_ignore_errors(self, tmpdir):
+ _, config = fake_env(
+ tmpdir,
+ '[metadata]\n'
+ 'version = attr: none.VERSION\n'
+ 'keywords = one, two\n'
+ )
+ with pytest.raises(ImportError):
+ read_configuration('%s' % config)
+
+ config_dict = read_configuration(
+ '%s' % config, ignore_option_errors=True)
+
+ assert config_dict['metadata']['keywords'] == ['one', 'two']
+ assert 'version' not in config_dict['metadata']
+
+ config.remove()
+
+
+class TestMetadata:
+
+ def test_basic(self, tmpdir):
+
+ fake_env(
+ tmpdir,
+ '[metadata]\n'
+ 'version = 10.1.1\n'
+ 'description = Some description\n'
+ 'long_description_content_type = text/something\n'
+ 'long_description = file: README\n'
+ 'name = fake_name\n'
+ 'keywords = one, two\n'
+ 'provides = package, package.sub\n'
+ 'license = otherlic\n'
+ 'download_url = http://test.test.com/test/\n'
+ 'maintainer_email = test@test.com\n'
+ )
+
+ tmpdir.join('README').write('readme contents\nline2')
+
+ meta_initial = {
+ # This will be used so `otherlic` won't replace it.
+ 'license': 'BSD 3-Clause License',
+ }
+
+ with get_dist(tmpdir, meta_initial) as dist:
+ metadata = dist.metadata
+
+ assert metadata.version == '10.1.1'
+ assert metadata.description == 'Some description'
+ assert metadata.long_description_content_type == 'text/something'
+ assert metadata.long_description == 'readme contents\nline2'
+ assert metadata.provides == ['package', 'package.sub']
+ assert metadata.license == 'BSD 3-Clause License'
+ assert metadata.name == 'fake_name'
+ assert metadata.keywords == ['one', 'two']
+ assert metadata.download_url == 'http://test.test.com/test/'
+ assert metadata.maintainer_email == 'test@test.com'
+
+ def test_file_mixed(self, tmpdir):
+
+ fake_env(
+ tmpdir,
+ '[metadata]\n'
+ 'long_description = file: README.rst, CHANGES.rst\n'
+ '\n'
+ )
+
+ tmpdir.join('README.rst').write('readme contents\nline2')
+ tmpdir.join('CHANGES.rst').write('changelog contents\nand stuff')
+
+ with get_dist(tmpdir) as dist:
+ assert dist.metadata.long_description == (
+ 'readme contents\nline2\n'
+ 'changelog contents\nand stuff'
+ )
+
+ def test_file_sandboxed(self, tmpdir):
+
+ fake_env(
+ tmpdir,
+ '[metadata]\n'
+ 'long_description = file: ../../README\n'
+ )
+
+ with get_dist(tmpdir, parse=False) as dist:
+ with pytest.raises(DistutilsOptionError):
+ dist.parse_config_files() # file: out of sandbox
+
+ def test_aliases(self, tmpdir):
+
+ fake_env(
+ tmpdir,
+ '[metadata]\n'
+ 'author-email = test@test.com\n'
+ 'home-page = http://test.test.com/test/\n'
+ 'summary = Short summary\n'
+ 'platform = a, b\n'
+ 'classifier =\n'
+ ' Framework :: Django\n'
+ ' Programming Language :: Python :: 3.5\n'
+ )
+
+ with get_dist(tmpdir) as dist:
+ metadata = dist.metadata
+ assert metadata.author_email == 'test@test.com'
+ assert metadata.url == 'http://test.test.com/test/'
+ assert metadata.description == 'Short summary'
+ assert metadata.platforms == ['a', 'b']
+ assert metadata.classifiers == [
+ 'Framework :: Django',
+ 'Programming Language :: Python :: 3.5',
+ ]
+
+ def test_multiline(self, tmpdir):
+
+ fake_env(
+ tmpdir,
+ '[metadata]\n'
+ 'name = fake_name\n'
+ 'keywords =\n'
+ ' one\n'
+ ' two\n'
+ 'classifiers =\n'
+ ' Framework :: Django\n'
+ ' Programming Language :: Python :: 3.5\n'
+ )
+ with get_dist(tmpdir) as dist:
+ metadata = dist.metadata
+ assert metadata.keywords == ['one', 'two']
+ assert metadata.classifiers == [
+ 'Framework :: Django',
+ 'Programming Language :: Python :: 3.5',
+ ]
+
+ def test_dict(self, tmpdir):
+
+ fake_env(
+ tmpdir,
+ '[metadata]\n'
+ 'project_urls =\n'
+ ' Link One = https://example.com/one/\n'
+ ' Link Two = https://example.com/two/\n'
+ )
+ with get_dist(tmpdir) as dist:
+ metadata = dist.metadata
+ assert metadata.project_urls == {
+ 'Link One': 'https://example.com/one/',
+ 'Link Two': 'https://example.com/two/',
+ }
+
+ def test_version(self, tmpdir):
+
+ _, config = fake_env(
+ tmpdir,
+ '[metadata]\n'
+ 'version = attr: fake_package.VERSION\n'
+ )
+ with get_dist(tmpdir) as dist:
+ assert dist.metadata.version == '1.2.3'
+
+ config.write(
+ '[metadata]\n'
+ 'version = attr: fake_package.get_version\n'
+ )
+ with get_dist(tmpdir) as dist:
+ assert dist.metadata.version == '3.4.5.dev'
+
+ config.write(
+ '[metadata]\n'
+ 'version = attr: fake_package.VERSION_MAJOR\n'
+ )
+ with get_dist(tmpdir) as dist:
+ assert dist.metadata.version == '1'
+
+ subpack = tmpdir.join('fake_package').mkdir('subpackage')
+ subpack.join('__init__.py').write('')
+ subpack.join('submodule.py').write('VERSION = (2016, 11, 26)')
+
+ config.write(
+ '[metadata]\n'
+ 'version = attr: fake_package.subpackage.submodule.VERSION\n'
+ )
+ with get_dist(tmpdir) as dist:
+ assert dist.metadata.version == '2016.11.26'
+
+ def test_unknown_meta_item(self, tmpdir):
+
+ fake_env(
+ tmpdir,
+ '[metadata]\n'
+ 'name = fake_name\n'
+ 'unknown = some\n'
+ )
+ with get_dist(tmpdir, parse=False) as dist:
+ dist.parse_config_files() # Skip unknown.
+
+ def test_usupported_section(self, tmpdir):
+
+ fake_env(
+ tmpdir,
+ '[metadata.some]\n'
+ 'key = val\n'
+ )
+ with get_dist(tmpdir, parse=False) as dist:
+ with pytest.raises(DistutilsOptionError):
+ dist.parse_config_files()
+
+ def test_classifiers(self, tmpdir):
+ expected = set([
+ 'Framework :: Django',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.5',
+ ])
+
+ # From file.
+ _, config = fake_env(
+ tmpdir,
+ '[metadata]\n'
+ 'classifiers = file: classifiers\n'
+ )
+
+ tmpdir.join('classifiers').write(
+ 'Framework :: Django\n'
+ 'Programming Language :: Python :: 3\n'
+ 'Programming Language :: Python :: 3.5\n'
+ )
+
+ with get_dist(tmpdir) as dist:
+ assert set(dist.metadata.classifiers) == expected
+
+ # From list notation
+ config.write(
+ '[metadata]\n'
+ 'classifiers =\n'
+ ' Framework :: Django\n'
+ ' Programming Language :: Python :: 3\n'
+ ' Programming Language :: Python :: 3.5\n'
+ )
+ with get_dist(tmpdir) as dist:
+ assert set(dist.metadata.classifiers) == expected
+
+
+class TestOptions:
+
+ def test_basic(self, tmpdir):
+
+ fake_env(
+ tmpdir,
+ '[options]\n'
+ 'zip_safe = True\n'
+ 'use_2to3 = 1\n'
+ 'include_package_data = yes\n'
+ 'package_dir = b=c, =src\n'
+ 'packages = pack_a, pack_b.subpack\n'
+ 'namespace_packages = pack1, pack2\n'
+ 'use_2to3_fixers = your.fixers, or.here\n'
+ 'use_2to3_exclude_fixers = one.here, two.there\n'
+ 'convert_2to3_doctests = src/tests/one.txt, src/two.txt\n'
+ 'scripts = bin/one.py, bin/two.py\n'
+ 'eager_resources = bin/one.py, bin/two.py\n'
+ 'install_requires = docutils>=0.3; pack ==1.1, ==1.3; hey\n'
+ 'tests_require = mock==0.7.2; pytest\n'
+ 'setup_requires = docutils>=0.3; spack ==1.1, ==1.3; there\n'
+ 'dependency_links = http://some.com/here/1, '
+ 'http://some.com/there/2\n'
+ 'python_requires = >=1.0, !=2.8\n'
+ 'py_modules = module1, module2\n'
+ )
+ with get_dist(tmpdir) as dist:
+ assert dist.zip_safe
+ assert dist.use_2to3
+ assert dist.include_package_data
+ assert dist.package_dir == {'': 'src', 'b': 'c'}
+ assert dist.packages == ['pack_a', 'pack_b.subpack']
+ assert dist.namespace_packages == ['pack1', 'pack2']
+ assert dist.use_2to3_fixers == ['your.fixers', 'or.here']
+ assert dist.use_2to3_exclude_fixers == ['one.here', 'two.there']
+ assert dist.convert_2to3_doctests == ([
+ 'src/tests/one.txt', 'src/two.txt'])
+ assert dist.scripts == ['bin/one.py', 'bin/two.py']
+ assert dist.dependency_links == ([
+ 'http://some.com/here/1',
+ 'http://some.com/there/2'
+ ])
+ assert dist.install_requires == ([
+ 'docutils>=0.3',
+ 'pack==1.1,==1.3',
+ 'hey'
+ ])
+ assert dist.setup_requires == ([
+ 'docutils>=0.3',
+ 'spack ==1.1, ==1.3',
+ 'there'
+ ])
+ assert dist.tests_require == ['mock==0.7.2', 'pytest']
+ assert dist.python_requires == '>=1.0, !=2.8'
+ assert dist.py_modules == ['module1', 'module2']
+
+ def test_multiline(self, tmpdir):
+ fake_env(
+ tmpdir,
+ '[options]\n'
+ 'package_dir = \n'
+ ' b=c\n'
+ ' =src\n'
+ 'packages = \n'
+ ' pack_a\n'
+ ' pack_b.subpack\n'
+ 'namespace_packages = \n'
+ ' pack1\n'
+ ' pack2\n'
+ 'use_2to3_fixers = \n'
+ ' your.fixers\n'
+ ' or.here\n'
+ 'use_2to3_exclude_fixers = \n'
+ ' one.here\n'
+ ' two.there\n'
+ 'convert_2to3_doctests = \n'
+ ' src/tests/one.txt\n'
+ ' src/two.txt\n'
+ 'scripts = \n'
+ ' bin/one.py\n'
+ ' bin/two.py\n'
+ 'eager_resources = \n'
+ ' bin/one.py\n'
+ ' bin/two.py\n'
+ 'install_requires = \n'
+ ' docutils>=0.3\n'
+ ' pack ==1.1, ==1.3\n'
+ ' hey\n'
+ 'tests_require = \n'
+ ' mock==0.7.2\n'
+ ' pytest\n'
+ 'setup_requires = \n'
+ ' docutils>=0.3\n'
+ ' spack ==1.1, ==1.3\n'
+ ' there\n'
+ 'dependency_links = \n'
+ ' http://some.com/here/1\n'
+ ' http://some.com/there/2\n'
+ )
+ with get_dist(tmpdir) as dist:
+ assert dist.package_dir == {'': 'src', 'b': 'c'}
+ assert dist.packages == ['pack_a', 'pack_b.subpack']
+ assert dist.namespace_packages == ['pack1', 'pack2']
+ assert dist.use_2to3_fixers == ['your.fixers', 'or.here']
+ assert dist.use_2to3_exclude_fixers == ['one.here', 'two.there']
+ assert dist.convert_2to3_doctests == (
+ ['src/tests/one.txt', 'src/two.txt'])
+ assert dist.scripts == ['bin/one.py', 'bin/two.py']
+ assert dist.dependency_links == ([
+ 'http://some.com/here/1',
+ 'http://some.com/there/2'
+ ])
+ assert dist.install_requires == ([
+ 'docutils>=0.3',
+ 'pack==1.1,==1.3',
+ 'hey'
+ ])
+ assert dist.setup_requires == ([
+ 'docutils>=0.3',
+ 'spack ==1.1, ==1.3',
+ 'there'
+ ])
+ assert dist.tests_require == ['mock==0.7.2', 'pytest']
+
+ def test_package_dir_fail(self, tmpdir):
+ fake_env(
+ tmpdir,
+ '[options]\n'
+ 'package_dir = a b\n'
+ )
+ with get_dist(tmpdir, parse=False) as dist:
+ with pytest.raises(DistutilsOptionError):
+ dist.parse_config_files()
+
+ def test_package_data(self, tmpdir):
+ fake_env(
+ tmpdir,
+ '[options.package_data]\n'
+ '* = *.txt, *.rst\n'
+ 'hello = *.msg\n'
+ '\n'
+ '[options.exclude_package_data]\n'
+ '* = fake1.txt, fake2.txt\n'
+ 'hello = *.dat\n'
+ )
+
+ with get_dist(tmpdir) as dist:
+ assert dist.package_data == {
+ '': ['*.txt', '*.rst'],
+ 'hello': ['*.msg'],
+ }
+ assert dist.exclude_package_data == {
+ '': ['fake1.txt', 'fake2.txt'],
+ 'hello': ['*.dat'],
+ }
+
+ def test_packages(self, tmpdir):
+ fake_env(
+ tmpdir,
+ '[options]\n'
+ 'packages = find:\n'
+ )
+
+ with get_dist(tmpdir) as dist:
+ assert dist.packages == ['fake_package']
+
+ def test_find_directive(self, tmpdir):
+ dir_package, config = fake_env(
+ tmpdir,
+ '[options]\n'
+ 'packages = find:\n'
+ )
+
+ dir_sub_one, _ = make_package_dir('sub_one', dir_package)
+ dir_sub_two, _ = make_package_dir('sub_two', dir_package)
+
+ with get_dist(tmpdir) as dist:
+ assert set(dist.packages) == set([
+ 'fake_package', 'fake_package.sub_two', 'fake_package.sub_one'
+ ])
+
+ config.write(
+ '[options]\n'
+ 'packages = find:\n'
+ '\n'
+ '[options.packages.find]\n'
+ 'where = .\n'
+ 'include =\n'
+ ' fake_package.sub_one\n'
+ ' two\n'
+ )
+ with get_dist(tmpdir) as dist:
+ assert dist.packages == ['fake_package.sub_one']
+
+ config.write(
+ '[options]\n'
+ 'packages = find:\n'
+ '\n'
+ '[options.packages.find]\n'
+ 'exclude =\n'
+ ' fake_package.sub_one\n'
+ )
+ with get_dist(tmpdir) as dist:
+ assert set(dist.packages) == set(
+ ['fake_package', 'fake_package.sub_two'])
+
+ def test_extras_require(self, tmpdir):
+ fake_env(
+ tmpdir,
+ '[options.extras_require]\n'
+ 'pdf = ReportLab>=1.2; RXP\n'
+ 'rest = \n'
+ ' docutils>=0.3\n'
+ ' pack ==1.1, ==1.3\n'
+ )
+
+ with get_dist(tmpdir) as dist:
+ assert dist.extras_require == {
+ 'pdf': ['ReportLab>=1.2', 'RXP'],
+ 'rest': ['docutils>=0.3', 'pack==1.1,==1.3']
+ }
+ assert dist.metadata.provides_extras == set(['pdf', 'rest'])
+
+ def test_entry_points(self, tmpdir):
+ _, config = fake_env(
+ tmpdir,
+ '[options.entry_points]\n'
+ 'group1 = point1 = pack.module:func, '
+ '.point2 = pack.module2:func_rest [rest]\n'
+ 'group2 = point3 = pack.module:func2\n'
+ )
+
+ with get_dist(tmpdir) as dist:
+ assert dist.entry_points == {
+ 'group1': [
+ 'point1 = pack.module:func',
+ '.point2 = pack.module2:func_rest [rest]',
+ ],
+ 'group2': ['point3 = pack.module:func2']
+ }
+
+ expected = (
+ '[blogtool.parsers]\n'
+ '.rst = some.nested.module:SomeClass.some_classmethod[reST]\n'
+ )
+
+ tmpdir.join('entry_points').write(expected)
+
+ # From file.
+ config.write(
+ '[options]\n'
+ 'entry_points = file: entry_points\n'
+ )
+
+ with get_dist(tmpdir) as dist:
+ assert dist.entry_points == expected
diff --git a/setuptools/tests/test_dep_util.py b/setuptools/tests/test_dep_util.py
new file mode 100644
index 0000000..e5027c1
--- /dev/null
+++ b/setuptools/tests/test_dep_util.py
@@ -0,0 +1,30 @@
+from setuptools.dep_util import newer_pairwise_group
+import os
+import pytest
+
+
+@pytest.fixture
+def groups_target(tmpdir):
+ """Sets up some older sources, a target and newer sources.
+ Returns a 3-tuple in this order.
+ """
+ creation_order = ['older.c', 'older.h', 'target.o', 'newer.c', 'newer.h']
+ mtime = 0
+
+ for i in range(len(creation_order)):
+ creation_order[i] = os.path.join(str(tmpdir), creation_order[i])
+ with open(creation_order[i], 'w'):
+ pass
+
+ # make sure modification times are sequential
+ os.utime(creation_order[i], (mtime, mtime))
+ mtime += 1
+
+ return creation_order[:2], creation_order[2], creation_order[3:]
+
+
+def test_newer_pairwise_group(groups_target):
+ older = newer_pairwise_group([groups_target[0]], [groups_target[1]])
+ newer = newer_pairwise_group([groups_target[2]], [groups_target[1]])
+ assert older == ([], [])
+ assert newer == ([groups_target[2]], [groups_target[1]])
diff --git a/setuptools/tests/test_depends.py b/setuptools/tests/test_depends.py
new file mode 100644
index 0000000..e0cfa88
--- /dev/null
+++ b/setuptools/tests/test_depends.py
@@ -0,0 +1,16 @@
+import sys
+
+from setuptools import depends
+
+
+class TestGetModuleConstant:
+
+ def test_basic(self):
+ """
+ Invoke get_module_constant on a module in
+ the test package.
+ """
+ mod_name = 'setuptools.tests.mod_with_constant'
+ val = depends.get_module_constant(mod_name, 'value')
+ assert val == 'three, sir!'
+ assert 'setuptools.tests.mod_with_constant' not in sys.modules
diff --git a/setuptools/tests/test_develop.py b/setuptools/tests/test_develop.py
new file mode 100644
index 0000000..00d4bd9
--- /dev/null
+++ b/setuptools/tests/test_develop.py
@@ -0,0 +1,202 @@
+"""develop tests
+"""
+
+from __future__ import absolute_import, unicode_literals
+
+import os
+import site
+import sys
+import io
+import subprocess
+import platform
+
+from setuptools.extern import six
+from setuptools.command import test
+
+import pytest
+
+from setuptools.command.develop import develop
+from setuptools.dist import Distribution
+from . import contexts
+from . import namespaces
+
+SETUP_PY = """\
+from setuptools import setup
+
+setup(name='foo',
+ packages=['foo'],
+ use_2to3=True,
+)
+"""
+
+INIT_PY = """print "foo"
+"""
+
+
+@pytest.yield_fixture
+def temp_user(monkeypatch):
+ with contexts.tempdir() as user_base:
+ with contexts.tempdir() as user_site:
+ monkeypatch.setattr('site.USER_BASE', user_base)
+ monkeypatch.setattr('site.USER_SITE', user_site)
+ yield
+
+
+@pytest.yield_fixture
+def test_env(tmpdir, temp_user):
+ target = tmpdir
+ foo = target.mkdir('foo')
+ setup = target / 'setup.py'
+ if setup.isfile():
+ raise ValueError(dir(target))
+ with setup.open('w') as f:
+ f.write(SETUP_PY)
+ init = foo / '__init__.py'
+ with init.open('w') as f:
+ f.write(INIT_PY)
+ with target.as_cwd():
+ yield target
+
+
+class TestDevelop:
+ in_virtualenv = hasattr(sys, 'real_prefix')
+ in_venv = hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix
+
+ @pytest.mark.skipif(
+ in_virtualenv or in_venv,
+ reason="Cannot run when invoked in a virtualenv or venv")
+ def test_2to3_user_mode(self, test_env):
+ settings = dict(
+ name='foo',
+ packages=['foo'],
+ use_2to3=True,
+ version='0.0',
+ )
+ dist = Distribution(settings)
+ dist.script_name = 'setup.py'
+ cmd = develop(dist)
+ cmd.user = 1
+ cmd.ensure_finalized()
+ cmd.install_dir = site.USER_SITE
+ cmd.user = 1
+ with contexts.quiet():
+ cmd.run()
+
+ # let's see if we got our egg link at the right place
+ content = os.listdir(site.USER_SITE)
+ content.sort()
+ assert content == ['easy-install.pth', 'foo.egg-link']
+
+ # Check that we are using the right code.
+ fn = os.path.join(site.USER_SITE, 'foo.egg-link')
+ with io.open(fn) as egg_link_file:
+ path = egg_link_file.read().split()[0].strip()
+ fn = os.path.join(path, 'foo', '__init__.py')
+ with io.open(fn) as init_file:
+ init = init_file.read().strip()
+
+ expected = 'print("foo")' if six.PY3 else 'print "foo"'
+ assert init == expected
+
+ def test_console_scripts(self, tmpdir):
+ """
+ Test that console scripts are installed and that they reference
+ only the project by name and not the current version.
+ """
+ pytest.skip(
+ "TODO: needs a fixture to cause 'develop' "
+ "to be invoked without mutating environment.")
+ settings = dict(
+ name='foo',
+ packages=['foo'],
+ version='0.0',
+ entry_points={
+ 'console_scripts': [
+ 'foocmd = foo:foo',
+ ],
+ },
+ )
+ dist = Distribution(settings)
+ dist.script_name = 'setup.py'
+ cmd = develop(dist)
+ cmd.ensure_finalized()
+ cmd.install_dir = tmpdir
+ cmd.run()
+ # assert '0.0' not in foocmd_text
+
+
+class TestResolver:
+ """
+ TODO: These tests were written with a minimal understanding
+ of what _resolve_setup_path is intending to do. Come up with
+ more meaningful cases that look like real-world scenarios.
+ """
+ def test_resolve_setup_path_cwd(self):
+ assert develop._resolve_setup_path('.', '.', '.') == '.'
+
+ def test_resolve_setup_path_one_dir(self):
+ assert develop._resolve_setup_path('pkgs', '.', 'pkgs') == '../'
+
+ def test_resolve_setup_path_one_dir_trailing_slash(self):
+ assert develop._resolve_setup_path('pkgs/', '.', 'pkgs') == '../'
+
+
+class TestNamespaces:
+
+ @staticmethod
+ def install_develop(src_dir, target):
+
+ develop_cmd = [
+ sys.executable,
+ 'setup.py',
+ 'develop',
+ '--install-dir', str(target),
+ ]
+ with src_dir.as_cwd():
+ with test.test.paths_on_pythonpath([str(target)]):
+ subprocess.check_call(develop_cmd)
+
+ @pytest.mark.skipif(
+ bool(os.environ.get("APPVEYOR")),
+ reason="https://github.com/pypa/setuptools/issues/851",
+ )
+ @pytest.mark.skipif(
+ platform.python_implementation() == 'PyPy' and six.PY3,
+ reason="https://github.com/pypa/setuptools/issues/1202",
+ )
+ def test_namespace_package_importable(self, tmpdir):
+ """
+ Installing two packages sharing the same namespace, one installed
+ naturally using pip or `--single-version-externally-managed`
+ and the other installed using `develop` should leave the namespace
+ in tact and both packages reachable by import.
+ """
+ pkg_A = namespaces.build_namespace_package(tmpdir, 'myns.pkgA')
+ pkg_B = namespaces.build_namespace_package(tmpdir, 'myns.pkgB')
+ target = tmpdir / 'packages'
+ # use pip to install to the target directory
+ install_cmd = [
+ sys.executable,
+ '-m',
+ 'pip',
+ 'install',
+ str(pkg_A),
+ '-t', str(target),
+ ]
+ subprocess.check_call(install_cmd)
+ self.install_develop(pkg_B, target)
+ namespaces.make_site_dir(target)
+ try_import = [
+ sys.executable,
+ '-c', 'import myns.pkgA; import myns.pkgB',
+ ]
+ with test.test.paths_on_pythonpath([str(target)]):
+ subprocess.check_call(try_import)
+
+ # additionally ensure that pkg_resources import works
+ pkg_resources_imp = [
+ sys.executable,
+ '-c', 'import pkg_resources',
+ ]
+ with test.test.paths_on_pythonpath([str(target)]):
+ subprocess.check_call(pkg_resources_imp)
diff --git a/setuptools/tests/test_dist.py b/setuptools/tests/test_dist.py
new file mode 100644
index 0000000..5162e1c
--- /dev/null
+++ b/setuptools/tests/test_dist.py
@@ -0,0 +1,145 @@
+# -*- coding: utf-8 -*-
+
+from __future__ import unicode_literals
+
+import io
+
+from setuptools import Distribution
+from setuptools.extern.six.moves.urllib.request import pathname2url
+from setuptools.extern.six.moves.urllib_parse import urljoin
+
+from .textwrap import DALS
+from .test_easy_install import make_nspkg_sdist
+
+import pytest
+
+
+def test_dist_fetch_build_egg(tmpdir):
+ """
+ Check multiple calls to `Distribution.fetch_build_egg` work as expected.
+ """
+ index = tmpdir.mkdir('index')
+ index_url = urljoin('file://', pathname2url(str(index)))
+
+ def sdist_with_index(distname, version):
+ dist_dir = index.mkdir(distname)
+ dist_sdist = '%s-%s.tar.gz' % (distname, version)
+ make_nspkg_sdist(str(dist_dir.join(dist_sdist)), distname, version)
+ with dist_dir.join('index.html').open('w') as fp:
+ fp.write(DALS(
+ '''
+ <!DOCTYPE html><html><body>
+ <a href="{dist_sdist}" rel="internal">{dist_sdist}</a><br/>
+ </body></html>
+ '''
+ ).format(dist_sdist=dist_sdist))
+ sdist_with_index('barbazquux', '3.2.0')
+ sdist_with_index('barbazquux-runner', '2.11.1')
+ with tmpdir.join('setup.cfg').open('w') as fp:
+ fp.write(DALS(
+ '''
+ [easy_install]
+ index_url = {index_url}
+ '''
+ ).format(index_url=index_url))
+ reqs = '''
+ barbazquux-runner
+ barbazquux
+ '''.split()
+ with tmpdir.as_cwd():
+ dist = Distribution()
+ dist.parse_config_files()
+ resolved_dists = [
+ dist.fetch_build_egg(r)
+ for r in reqs
+ ]
+ assert [dist.key for dist in resolved_dists if dist] == reqs
+
+
+def __maintainer_test_cases():
+ attrs = {"name": "package",
+ "version": "1.0",
+ "description": "xxx"}
+
+ def merge_dicts(d1, d2):
+ d1 = d1.copy()
+ d1.update(d2)
+
+ return d1
+
+ test_cases = [
+ ('No author, no maintainer', attrs.copy()),
+ ('Author (no e-mail), no maintainer', merge_dicts(
+ attrs,
+ {'author': 'Author Name'})),
+ ('Author (e-mail), no maintainer', merge_dicts(
+ attrs,
+ {'author': 'Author Name',
+ 'author_email': 'author@name.com'})),
+ ('No author, maintainer (no e-mail)', merge_dicts(
+ attrs,
+ {'maintainer': 'Maintainer Name'})),
+ ('No author, maintainer (e-mail)', merge_dicts(
+ attrs,
+ {'maintainer': 'Maintainer Name',
+ 'maintainer_email': 'maintainer@name.com'})),
+ ('Author (no e-mail), Maintainer (no-email)', merge_dicts(
+ attrs,
+ {'author': 'Author Name',
+ 'maintainer': 'Maintainer Name'})),
+ ('Author (e-mail), Maintainer (e-mail)', merge_dicts(
+ attrs,
+ {'author': 'Author Name',
+ 'author_email': 'author@name.com',
+ 'maintainer': 'Maintainer Name',
+ 'maintainer_email': 'maintainer@name.com'})),
+ ('No author (e-mail), no maintainer (e-mail)', merge_dicts(
+ attrs,
+ {'author_email': 'author@name.com',
+ 'maintainer_email': 'maintainer@name.com'})),
+ ('Author unicode', merge_dicts(
+ attrs,
+ {'author': '鉄沢寛'})),
+ ('Maintainer unicode', merge_dicts(
+ attrs,
+ {'maintainer': 'Jan Łukasiewicz'})),
+ ]
+
+ return test_cases
+
+
+@pytest.mark.parametrize('name,attrs', __maintainer_test_cases())
+def test_maintainer_author(name, attrs, tmpdir):
+ tested_keys = {
+ 'author': 'Author',
+ 'author_email': 'Author-email',
+ 'maintainer': 'Maintainer',
+ 'maintainer_email': 'Maintainer-email',
+ }
+
+ # Generate a PKG-INFO file
+ dist = Distribution(attrs)
+ fn = tmpdir.mkdir('pkg_info')
+ fn_s = str(fn)
+
+ dist.metadata.write_pkg_info(fn_s)
+
+ with io.open(str(fn.join('PKG-INFO')), 'r', encoding='utf-8') as f:
+ raw_pkg_lines = f.readlines()
+
+ # Drop blank lines
+ pkg_lines = list(filter(None, raw_pkg_lines))
+
+ pkg_lines_set = set(pkg_lines)
+
+ # Duplicate lines should not be generated
+ assert len(pkg_lines) == len(pkg_lines_set)
+
+ for fkey, dkey in tested_keys.items():
+ val = attrs.get(dkey, None)
+ if val is None:
+ for line in pkg_lines:
+ assert not line.startswith(fkey + ':')
+ else:
+ line = '%s: %s' % (fkey, val)
+ assert line in pkg_lines_set
diff --git a/setuptools/tests/test_dist_info.py b/setuptools/tests/test_dist_info.py
new file mode 100644
index 0000000..f7e7d2b
--- /dev/null
+++ b/setuptools/tests/test_dist_info.py
@@ -0,0 +1,78 @@
+"""Test .dist-info style distributions.
+"""
+
+from __future__ import unicode_literals
+
+from setuptools.extern.six.moves import map
+
+import pytest
+
+import pkg_resources
+from .textwrap import DALS
+
+
+class TestDistInfo:
+
+ metadata_base = DALS("""
+ Metadata-Version: 1.2
+ Requires-Dist: splort (==4)
+ Provides-Extra: baz
+ Requires-Dist: quux (>=1.1); extra == 'baz'
+ """)
+
+ @classmethod
+ def build_metadata(cls, **kwargs):
+ lines = (
+ '{key}: {value}\n'.format(**locals())
+ for key, value in kwargs.items()
+ )
+ return cls.metadata_base + ''.join(lines)
+
+ @pytest.fixture
+ def metadata(self, tmpdir):
+ dist_info_name = 'VersionedDistribution-2.718.dist-info'
+ versioned = tmpdir / dist_info_name
+ versioned.mkdir()
+ filename = versioned / 'METADATA'
+ content = self.build_metadata(
+ Name='VersionedDistribution',
+ )
+ filename.write_text(content, encoding='utf-8')
+
+ dist_info_name = 'UnversionedDistribution.dist-info'
+ unversioned = tmpdir / dist_info_name
+ unversioned.mkdir()
+ filename = unversioned / 'METADATA'
+ content = self.build_metadata(
+ Name='UnversionedDistribution',
+ Version='0.3',
+ )
+ filename.write_text(content, encoding='utf-8')
+
+ return str(tmpdir)
+
+ def test_distinfo(self, metadata):
+ dists = dict(
+ (d.project_name, d)
+ for d in pkg_resources.find_distributions(metadata)
+ )
+
+ assert len(dists) == 2, dists
+
+ unversioned = dists['UnversionedDistribution']
+ versioned = dists['VersionedDistribution']
+
+ assert versioned.version == '2.718' # from filename
+ assert unversioned.version == '0.3' # from METADATA
+
+ def test_conditional_dependencies(self, metadata):
+ specs = 'splort==4', 'quux>=1.1'
+ requires = list(map(pkg_resources.Requirement.parse, specs))
+
+ for d in pkg_resources.find_distributions(metadata):
+ assert d.requires() == requires[:1]
+ assert d.requires(extras=('baz',)) == [
+ requires[0],
+ pkg_resources.Requirement.parse('quux>=1.1;extra=="baz"'),
+ ]
+ assert d.extras == ['baz']
diff --git a/setuptools/tests/test_easy_install.py b/setuptools/tests/test_easy_install.py
new file mode 100644
index 0000000..57339c8
--- /dev/null
+++ b/setuptools/tests/test_easy_install.py
@@ -0,0 +1,760 @@
+# -*- coding: utf-8 -*-
+"""Easy install Tests
+"""
+from __future__ import absolute_import
+
+import sys
+import os
+import tempfile
+import site
+import contextlib
+import tarfile
+import logging
+import itertools
+import distutils.errors
+import io
+import zipfile
+import mock
+
+import time
+from setuptools.extern.six.moves import urllib
+
+import pytest
+
+from setuptools import sandbox
+from setuptools.sandbox import run_setup
+import setuptools.command.easy_install as ei
+from setuptools.command.easy_install import PthDistributions
+from setuptools.command import easy_install as easy_install_pkg
+from setuptools.dist import Distribution
+from pkg_resources import normalize_path, working_set
+from pkg_resources import Distribution as PRDistribution
+import setuptools.tests.server
+from setuptools.tests import fail_on_ascii
+import pkg_resources
+
+from . import contexts
+from .textwrap import DALS
+
+
+class FakeDist(object):
+ def get_entry_map(self, group):
+ if group != 'console_scripts':
+ return {}
+ return {'name': 'ep'}
+
+ def as_requirement(self):
+ return 'spec'
+
+
+SETUP_PY = DALS("""
+ from setuptools import setup
+
+ setup(name='foo')
+ """)
+
+
+class TestEasyInstallTest:
+ def test_install_site_py(self, tmpdir):
+ dist = Distribution()
+ cmd = ei.easy_install(dist)
+ cmd.sitepy_installed = False
+ cmd.install_dir = str(tmpdir)
+ cmd.install_site_py()
+ assert (tmpdir / 'site.py').exists()
+
+ def test_get_script_args(self):
+ header = ei.CommandSpec.best().from_environment().as_header()
+ expected = header + DALS(r"""
+ # EASY-INSTALL-ENTRY-SCRIPT: 'spec','console_scripts','name'
+ __requires__ = 'spec'
+ import re
+ import sys
+ from pkg_resources import load_entry_point
+
+ if __name__ == '__main__':
+ sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
+ sys.exit(
+ load_entry_point('spec', 'console_scripts', 'name')()
+ )
+ """)
+ dist = FakeDist()
+
+ args = next(ei.ScriptWriter.get_args(dist))
+ name, script = itertools.islice(args, 2)
+
+ assert script == expected
+
+ def test_no_find_links(self):
+ # new option '--no-find-links', that blocks find-links added at
+ # the project level
+ dist = Distribution()
+ cmd = ei.easy_install(dist)
+ cmd.check_pth_processing = lambda: True
+ cmd.no_find_links = True
+ cmd.find_links = ['link1', 'link2']
+ cmd.install_dir = os.path.join(tempfile.mkdtemp(), 'ok')
+ cmd.args = ['ok']
+ cmd.ensure_finalized()
+ assert cmd.package_index.scanned_urls == {}
+
+ # let's try without it (default behavior)
+ cmd = ei.easy_install(dist)
+ cmd.check_pth_processing = lambda: True
+ cmd.find_links = ['link1', 'link2']
+ cmd.install_dir = os.path.join(tempfile.mkdtemp(), 'ok')
+ cmd.args = ['ok']
+ cmd.ensure_finalized()
+ keys = sorted(cmd.package_index.scanned_urls.keys())
+ assert keys == ['link1', 'link2']
+
+ def test_write_exception(self):
+ """
+ Test that `cant_write_to_target` is rendered as a DistutilsError.
+ """
+ dist = Distribution()
+ cmd = ei.easy_install(dist)
+ cmd.install_dir = os.getcwd()
+ with pytest.raises(distutils.errors.DistutilsError):
+ cmd.cant_write_to_target()
+
+ def test_all_site_dirs(self, monkeypatch):
+ """
+ get_site_dirs should always return site dirs reported by
+ site.getsitepackages.
+ """
+ path = normalize_path('/setuptools/test/site-packages')
+ mock_gsp = lambda: [path]
+ monkeypatch.setattr(site, 'getsitepackages', mock_gsp, raising=False)
+ assert path in ei.get_site_dirs()
+
+ def test_all_site_dirs_works_without_getsitepackages(self, monkeypatch):
+ monkeypatch.delattr(site, 'getsitepackages', raising=False)
+ assert ei.get_site_dirs()
+
+ @pytest.fixture
+ def sdist_unicode(self, tmpdir):
+ files = [
+ (
+ 'setup.py',
+ DALS("""
+ import setuptools
+ setuptools.setup(
+ name="setuptools-test-unicode",
+ version="1.0",
+ packages=["mypkg"],
+ include_package_data=True,
+ )
+ """),
+ ),
+ (
+ 'mypkg/__init__.py',
+ "",
+ ),
+ (
+ u'mypkg/\u2603.txt',
+ "",
+ ),
+ ]
+ sdist_name = 'setuptools-test-unicode-1.0.zip'
+ sdist = tmpdir / sdist_name
+ # can't use make_sdist, because the issue only occurs
+ # with zip sdists.
+ sdist_zip = zipfile.ZipFile(str(sdist), 'w')
+ for filename, content in files:
+ sdist_zip.writestr(filename, content)
+ sdist_zip.close()
+ return str(sdist)
+
+ @fail_on_ascii
+ def test_unicode_filename_in_sdist(self, sdist_unicode, tmpdir, monkeypatch):
+ """
+ The install command should execute correctly even if
+ the package has unicode filenames.
+ """
+ dist = Distribution({'script_args': ['easy_install']})
+ target = (tmpdir / 'target').ensure_dir()
+ cmd = ei.easy_install(
+ dist,
+ install_dir=str(target),
+ args=['x'],
+ )
+ monkeypatch.setitem(os.environ, 'PYTHONPATH', str(target))
+ cmd.ensure_finalized()
+ cmd.easy_install(sdist_unicode)
+
+ @pytest.fixture
+ def sdist_script(self, tmpdir):
+ files = [
+ (
+ 'setup.py',
+ DALS("""
+ import setuptools
+ setuptools.setup(
+ name="setuptools-test-script",
+ version="1.0",
+ scripts=["mypkg_script"],
+ )
+ """),
+ ),
+ (
+ u'mypkg_script',
+ DALS("""
+ #/usr/bin/python
+ print('mypkg_script')
+ """),
+ ),
+ ]
+ sdist_name = 'setuptools-test-script-1.0.zip'
+ sdist = str(tmpdir / sdist_name)
+ make_sdist(sdist, files)
+ return sdist
+
+ @pytest.mark.skipif(not sys.platform.startswith('linux'),
+ reason="Test can only be run on Linux")
+ def test_script_install(self, sdist_script, tmpdir, monkeypatch):
+ """
+ Check scripts are installed.
+ """
+ dist = Distribution({'script_args': ['easy_install']})
+ target = (tmpdir / 'target').ensure_dir()
+ cmd = ei.easy_install(
+ dist,
+ install_dir=str(target),
+ args=['x'],
+ )
+ monkeypatch.setitem(os.environ, 'PYTHONPATH', str(target))
+ cmd.ensure_finalized()
+ cmd.easy_install(sdist_script)
+ assert (target / 'mypkg_script').exists()
+
+
+class TestPTHFileWriter:
+ def test_add_from_cwd_site_sets_dirty(self):
+ '''a pth file manager should set dirty
+ if a distribution is in site but also the cwd
+ '''
+ pth = PthDistributions('does-not_exist', [os.getcwd()])
+ assert not pth.dirty
+ pth.add(PRDistribution(os.getcwd()))
+ assert pth.dirty
+
+ def test_add_from_site_is_ignored(self):
+ location = '/test/location/does-not-have-to-exist'
+ # PthDistributions expects all locations to be normalized
+ location = pkg_resources.normalize_path(location)
+ pth = PthDistributions('does-not_exist', [location, ])
+ assert not pth.dirty
+ pth.add(PRDistribution(location))
+ assert not pth.dirty
+
+
+@pytest.yield_fixture
+def setup_context(tmpdir):
+ with (tmpdir / 'setup.py').open('w') as f:
+ f.write(SETUP_PY)
+ with tmpdir.as_cwd():
+ yield tmpdir
+
+
+@pytest.mark.usefixtures("user_override")
+@pytest.mark.usefixtures("setup_context")
+class TestUserInstallTest:
+
+ # prevent check that site-packages is writable. easy_install
+ # shouldn't be writing to system site-packages during finalize
+ # options, but while it does, bypass the behavior.
+ prev_sp_write = mock.patch(
+ 'setuptools.command.easy_install.easy_install.check_site_dir',
+ mock.Mock(),
+ )
+
+ # simulate setuptools installed in user site packages
+ @mock.patch('setuptools.command.easy_install.__file__', site.USER_SITE)
+ @mock.patch('site.ENABLE_USER_SITE', True)
+ @prev_sp_write
+ def test_user_install_not_implied_user_site_enabled(self):
+ self.assert_not_user_site()
+
+ @mock.patch('site.ENABLE_USER_SITE', False)
+ @prev_sp_write
+ def test_user_install_not_implied_user_site_disabled(self):
+ self.assert_not_user_site()
+
+ @staticmethod
+ def assert_not_user_site():
+ # create a finalized easy_install command
+ dist = Distribution()
+ dist.script_name = 'setup.py'
+ cmd = ei.easy_install(dist)
+ cmd.args = ['py']
+ cmd.ensure_finalized()
+ assert not cmd.user, 'user should not be implied'
+
+ def test_multiproc_atexit(self):
+ pytest.importorskip('multiprocessing')
+
+ log = logging.getLogger('test_easy_install')
+ logging.basicConfig(level=logging.INFO, stream=sys.stderr)
+ log.info('this should not break')
+
+ @pytest.fixture()
+ def foo_package(self, tmpdir):
+ egg_file = tmpdir / 'foo-1.0.egg-info'
+ with egg_file.open('w') as f:
+ f.write('Name: foo\n')
+ return str(tmpdir)
+
+ @pytest.yield_fixture()
+ def install_target(self, tmpdir):
+ target = str(tmpdir)
+ with mock.patch('sys.path', sys.path + [target]):
+ python_path = os.path.pathsep.join(sys.path)
+ with mock.patch.dict(os.environ, PYTHONPATH=python_path):
+ yield target
+
+ def test_local_index(self, foo_package, install_target):
+ """
+ The local index must be used when easy_install locates installed
+ packages.
+ """
+ dist = Distribution()
+ dist.script_name = 'setup.py'
+ cmd = ei.easy_install(dist)
+ cmd.install_dir = install_target
+ cmd.args = ['foo']
+ cmd.ensure_finalized()
+ cmd.local_index.scan([foo_package])
+ res = cmd.easy_install('foo')
+ actual = os.path.normcase(os.path.realpath(res.location))
+ expected = os.path.normcase(os.path.realpath(foo_package))
+ assert actual == expected
+
+ @contextlib.contextmanager
+ def user_install_setup_context(self, *args, **kwargs):
+ """
+ Wrap sandbox.setup_context to patch easy_install in that context to
+ appear as user-installed.
+ """
+ with self.orig_context(*args, **kwargs):
+ import setuptools.command.easy_install as ei
+ ei.__file__ = site.USER_SITE
+ yield
+
+ def patched_setup_context(self):
+ self.orig_context = sandbox.setup_context
+
+ return mock.patch(
+ 'setuptools.sandbox.setup_context',
+ self.user_install_setup_context,
+ )
+
+
+@pytest.yield_fixture
+def distutils_package():
+ distutils_setup_py = SETUP_PY.replace(
+ 'from setuptools import setup',
+ 'from distutils.core import setup',
+ )
+ with contexts.tempdir(cd=os.chdir):
+ with open('setup.py', 'w') as f:
+ f.write(distutils_setup_py)
+ yield
+
+
+class TestDistutilsPackage:
+ def test_bdist_egg_available_on_distutils_pkg(self, distutils_package):
+ run_setup('setup.py', ['bdist_egg'])
+
+
+class TestSetupRequires:
+ def test_setup_requires_honors_fetch_params(self):
+ """
+ When easy_install installs a source distribution which specifies
+ setup_requires, it should honor the fetch parameters (such as
+ allow-hosts, index-url, and find-links).
+ """
+ # set up a server which will simulate an alternate package index.
+ p_index = setuptools.tests.server.MockServer()
+ p_index.start()
+ netloc = 1
+ p_index_loc = urllib.parse.urlparse(p_index.url)[netloc]
+ if p_index_loc.endswith(':0'):
+ # Some platforms (Jython) don't find a port to which to bind,
+ # so skip this test for them.
+ return
+ with contexts.quiet():
+ # create an sdist that has a build-time dependency.
+ with TestSetupRequires.create_sdist() as dist_file:
+ with contexts.tempdir() as temp_install_dir:
+ with contexts.environment(PYTHONPATH=temp_install_dir):
+ ei_params = [
+ '--index-url', p_index.url,
+ '--allow-hosts', p_index_loc,
+ '--exclude-scripts',
+ '--install-dir', temp_install_dir,
+ dist_file,
+ ]
+ with sandbox.save_argv(['easy_install']):
+ # attempt to install the dist. It should fail because
+ # it doesn't exist.
+ with pytest.raises(SystemExit):
+ easy_install_pkg.main(ei_params)
+ # there should have been two or three requests to the server
+ # (three happens on Python 3.3a)
+ assert 2 <= len(p_index.requests) <= 3
+ assert p_index.requests[0].path == '/does-not-exist/'
+
+ @staticmethod
+ @contextlib.contextmanager
+ def create_sdist():
+ """
+ Return an sdist with a setup_requires dependency (of something that
+ doesn't exist)
+ """
+ with contexts.tempdir() as dir:
+ dist_path = os.path.join(dir, 'setuptools-test-fetcher-1.0.tar.gz')
+ make_sdist(dist_path, [
+ ('setup.py', DALS("""
+ import setuptools
+ setuptools.setup(
+ name="setuptools-test-fetcher",
+ version="1.0",
+ setup_requires = ['does-not-exist'],
+ )
+ """))])
+ yield dist_path
+
+ use_setup_cfg = (
+ (),
+ ('dependency_links',),
+ ('setup_requires',),
+ ('dependency_links', 'setup_requires'),
+ )
+
+ @pytest.mark.parametrize('use_setup_cfg', use_setup_cfg)
+ def test_setup_requires_overrides_version_conflict(self, use_setup_cfg):
+ """
+ Regression test for distribution issue 323:
+ https://bitbucket.org/tarek/distribute/issues/323
+
+ Ensures that a distribution's setup_requires requirements can still be
+ installed and used locally even if a conflicting version of that
+ requirement is already on the path.
+ """
+
+ fake_dist = PRDistribution('does-not-matter', project_name='foobar',
+ version='0.0')
+ working_set.add(fake_dist)
+
+ with contexts.save_pkg_resources_state():
+ with contexts.tempdir() as temp_dir:
+ test_pkg = create_setup_requires_package(temp_dir, use_setup_cfg=use_setup_cfg)
+ test_setup_py = os.path.join(test_pkg, 'setup.py')
+ with contexts.quiet() as (stdout, stderr):
+ # Don't even need to install the package, just
+ # running the setup.py at all is sufficient
+ run_setup(test_setup_py, ['--name'])
+
+ lines = stdout.readlines()
+ assert len(lines) > 0
+ assert lines[-1].strip() == 'test_pkg'
+
+ @pytest.mark.parametrize('use_setup_cfg', use_setup_cfg)
+ def test_setup_requires_override_nspkg(self, use_setup_cfg):
+ """
+ Like ``test_setup_requires_overrides_version_conflict`` but where the
+ ``setup_requires`` package is part of a namespace package that has
+ *already* been imported.
+ """
+
+ with contexts.save_pkg_resources_state():
+ with contexts.tempdir() as temp_dir:
+ foobar_1_archive = os.path.join(temp_dir, 'foo.bar-0.1.tar.gz')
+ make_nspkg_sdist(foobar_1_archive, 'foo.bar', '0.1')
+ # Now actually go ahead an extract to the temp dir and add the
+ # extracted path to sys.path so foo.bar v0.1 is importable
+ foobar_1_dir = os.path.join(temp_dir, 'foo.bar-0.1')
+ os.mkdir(foobar_1_dir)
+ with tarfile.open(foobar_1_archive) as tf:
+ tf.extractall(foobar_1_dir)
+ sys.path.insert(1, foobar_1_dir)
+
+ dist = PRDistribution(foobar_1_dir, project_name='foo.bar',
+ version='0.1')
+ working_set.add(dist)
+
+ template = DALS("""\
+ import foo # Even with foo imported first the
+ # setup_requires package should override
+ import setuptools
+ setuptools.setup(**%r)
+
+ if not (hasattr(foo, '__path__') and
+ len(foo.__path__) == 2):
+ print('FAIL')
+
+ if 'foo.bar-0.2' not in foo.__path__[0]:
+ print('FAIL')
+ """)
+
+ test_pkg = create_setup_requires_package(
+ temp_dir, 'foo.bar', '0.2', make_nspkg_sdist, template,
+ use_setup_cfg=use_setup_cfg)
+
+ test_setup_py = os.path.join(test_pkg, 'setup.py')
+
+ with contexts.quiet() as (stdout, stderr):
+ try:
+ # Don't even need to install the package, just
+ # running the setup.py at all is sufficient
+ run_setup(test_setup_py, ['--name'])
+ except pkg_resources.VersionConflict:
+ self.fail('Installing setup.py requirements '
+ 'caused a VersionConflict')
+
+ assert 'FAIL' not in stdout.getvalue()
+ lines = stdout.readlines()
+ assert len(lines) > 0
+ assert lines[-1].strip() == 'test_pkg'
+
+ @pytest.mark.parametrize('use_setup_cfg', use_setup_cfg)
+ def test_setup_requires_with_attr_version(self, use_setup_cfg):
+ def make_dependency_sdist(dist_path, distname, version):
+ make_sdist(dist_path, [
+ ('setup.py',
+ DALS("""
+ import setuptools
+ setuptools.setup(
+ name={name!r},
+ version={version!r},
+ py_modules=[{name!r}],
+ )
+ """.format(name=distname, version=version))),
+ (distname + '.py',
+ DALS("""
+ version = 42
+ """
+ ))])
+ with contexts.save_pkg_resources_state():
+ with contexts.tempdir() as temp_dir:
+ test_pkg = create_setup_requires_package(
+ temp_dir, setup_attrs=dict(version='attr: foobar.version'),
+ make_package=make_dependency_sdist,
+ use_setup_cfg=use_setup_cfg+('version',),
+ )
+ test_setup_py = os.path.join(test_pkg, 'setup.py')
+ with contexts.quiet() as (stdout, stderr):
+ run_setup(test_setup_py, ['--version'])
+ lines = stdout.readlines()
+ assert len(lines) > 0
+ assert lines[-1].strip() == '42'
+
+
+def make_trivial_sdist(dist_path, distname, version):
+ """
+ Create a simple sdist tarball at dist_path, containing just a simple
+ setup.py.
+ """
+
+ make_sdist(dist_path, [
+ ('setup.py',
+ DALS("""\
+ import setuptools
+ setuptools.setup(
+ name=%r,
+ version=%r
+ )
+ """ % (distname, version)))])
+
+
+def make_nspkg_sdist(dist_path, distname, version):
+ """
+ Make an sdist tarball with distname and version which also contains one
+ package with the same name as distname. The top-level package is
+ designated a namespace package).
+ """
+
+ parts = distname.split('.')
+ nspackage = parts[0]
+
+ packages = ['.'.join(parts[:idx]) for idx in range(1, len(parts) + 1)]
+
+ setup_py = DALS("""\
+ import setuptools
+ setuptools.setup(
+ name=%r,
+ version=%r,
+ packages=%r,
+ namespace_packages=[%r]
+ )
+ """ % (distname, version, packages, nspackage))
+
+ init = "__import__('pkg_resources').declare_namespace(__name__)"
+
+ files = [('setup.py', setup_py),
+ (os.path.join(nspackage, '__init__.py'), init)]
+ for package in packages[1:]:
+ filename = os.path.join(*(package.split('.') + ['__init__.py']))
+ files.append((filename, ''))
+
+ make_sdist(dist_path, files)
+
+
+def make_sdist(dist_path, files):
+ """
+ Create a simple sdist tarball at dist_path, containing the files
+ listed in ``files`` as ``(filename, content)`` tuples.
+ """
+
+ with tarfile.open(dist_path, 'w:gz') as dist:
+ for filename, content in files:
+ file_bytes = io.BytesIO(content.encode('utf-8'))
+ file_info = tarfile.TarInfo(name=filename)
+ file_info.size = len(file_bytes.getvalue())
+ file_info.mtime = int(time.time())
+ dist.addfile(file_info, fileobj=file_bytes)
+
+
+def create_setup_requires_package(path, distname='foobar', version='0.1',
+ make_package=make_trivial_sdist,
+ setup_py_template=None, setup_attrs={},
+ use_setup_cfg=()):
+ """Creates a source tree under path for a trivial test package that has a
+ single requirement in setup_requires--a tarball for that requirement is
+ also created and added to the dependency_links argument.
+
+ ``distname`` and ``version`` refer to the name/version of the package that
+ the test package requires via ``setup_requires``. The name of the test
+ package itself is just 'test_pkg'.
+ """
+
+ test_setup_attrs = {
+ 'name': 'test_pkg', 'version': '0.0',
+ 'setup_requires': ['%s==%s' % (distname, version)],
+ 'dependency_links': [os.path.abspath(path)]
+ }
+ test_setup_attrs.update(setup_attrs)
+
+ test_pkg = os.path.join(path, 'test_pkg')
+ os.mkdir(test_pkg)
+
+ if use_setup_cfg:
+ test_setup_cfg = os.path.join(test_pkg, 'setup.cfg')
+ options = []
+ metadata = []
+ for name in use_setup_cfg:
+ value = test_setup_attrs.pop(name)
+ if name in 'name version'.split():
+ section = metadata
+ else:
+ section = options
+ if isinstance(value, (tuple, list)):
+ value = ';'.join(value)
+ section.append('%s: %s' % (name, value))
+ with open(test_setup_cfg, 'w') as f:
+ f.write(DALS(
+ """
+ [metadata]
+ {metadata}
+ [options]
+ {options}
+ """
+ ).format(
+ options='\n'.join(options),
+ metadata='\n'.join(metadata),
+ ))
+
+ test_setup_py = os.path.join(test_pkg, 'setup.py')
+
+ if setup_py_template is None:
+ setup_py_template = DALS("""\
+ import setuptools
+ setuptools.setup(**%r)
+ """)
+
+ with open(test_setup_py, 'w') as f:
+ f.write(setup_py_template % test_setup_attrs)
+
+ foobar_path = os.path.join(path, '%s-%s.tar.gz' % (distname, version))
+ make_package(foobar_path, distname, version)
+
+ return test_pkg
+
+
+@pytest.mark.skipif(
+ sys.platform.startswith('java') and ei.is_sh(sys.executable),
+ reason="Test cannot run under java when executable is sh"
+)
+class TestScriptHeader:
+ non_ascii_exe = '/Users/José/bin/python'
+ exe_with_spaces = r'C:\Program Files\Python33\python.exe'
+
+ def test_get_script_header(self):
+ expected = '#!%s\n' % ei.nt_quote_arg(os.path.normpath(sys.executable))
+ actual = ei.ScriptWriter.get_script_header('#!/usr/local/bin/python')
+ assert actual == expected
+
+ def test_get_script_header_args(self):
+ expected = '#!%s -x\n' % ei.nt_quote_arg(os.path.normpath
+ (sys.executable))
+ actual = ei.ScriptWriter.get_script_header('#!/usr/bin/python -x')
+ assert actual == expected
+
+ def test_get_script_header_non_ascii_exe(self):
+ actual = ei.ScriptWriter.get_script_header('#!/usr/bin/python',
+ executable=self.non_ascii_exe)
+ expected = '#!%s -x\n' % self.non_ascii_exe
+ assert actual == expected
+
+ def test_get_script_header_exe_with_spaces(self):
+ actual = ei.ScriptWriter.get_script_header('#!/usr/bin/python',
+ executable='"' + self.exe_with_spaces + '"')
+ expected = '#!"%s"\n' % self.exe_with_spaces
+ assert actual == expected
+
+
+class TestCommandSpec:
+ def test_custom_launch_command(self):
+ """
+ Show how a custom CommandSpec could be used to specify a #! executable
+ which takes parameters.
+ """
+ cmd = ei.CommandSpec(['/usr/bin/env', 'python3'])
+ assert cmd.as_header() == '#!/usr/bin/env python3\n'
+
+ def test_from_param_for_CommandSpec_is_passthrough(self):
+ """
+ from_param should return an instance of a CommandSpec
+ """
+ cmd = ei.CommandSpec(['python'])
+ cmd_new = ei.CommandSpec.from_param(cmd)
+ assert cmd is cmd_new
+
+ @mock.patch('sys.executable', TestScriptHeader.exe_with_spaces)
+ @mock.patch.dict(os.environ)
+ def test_from_environment_with_spaces_in_executable(self):
+ os.environ.pop('__PYVENV_LAUNCHER__', None)
+ cmd = ei.CommandSpec.from_environment()
+ assert len(cmd) == 1
+ assert cmd.as_header().startswith('#!"')
+
+ def test_from_simple_string_uses_shlex(self):
+ """
+ In order to support `executable = /usr/bin/env my-python`, make sure
+ from_param invokes shlex on that input.
+ """
+ cmd = ei.CommandSpec.from_param('/usr/bin/env my-python')
+ assert len(cmd) == 2
+ assert '"' not in cmd.as_header()
+
+
+class TestWindowsScriptWriter:
+ def test_header(self):
+ hdr = ei.WindowsScriptWriter.get_script_header('')
+ assert hdr.startswith('#!')
+ assert hdr.endswith('\n')
+ hdr = hdr.lstrip('#!')
+ hdr = hdr.rstrip('\n')
+ # header should not start with an escaped quote
+ assert not hdr.startswith('\\"')
diff --git a/setuptools/tests/test_egg_info.py b/setuptools/tests/test_egg_info.py
new file mode 100644
index 0000000..2a070de
--- /dev/null
+++ b/setuptools/tests/test_egg_info.py
@@ -0,0 +1,598 @@
+import sys
+import ast
+import os
+import glob
+import re
+import stat
+
+from setuptools.command.egg_info import egg_info, manifest_maker
+from setuptools.dist import Distribution
+from setuptools.extern.six.moves import map
+
+import pytest
+
+from . import environment
+from .files import build_files
+from .textwrap import DALS
+from . import contexts
+
+
+class Environment(str):
+ pass
+
+
+class TestEggInfo(object):
+
+ setup_script = DALS("""
+ from setuptools import setup
+
+ setup(
+ name='foo',
+ py_modules=['hello'],
+ entry_points={'console_scripts': ['hi = hello.run']},
+ zip_safe=False,
+ )
+ """)
+
+ def _create_project(self):
+ build_files({
+ 'setup.py': self.setup_script,
+ 'hello.py': DALS("""
+ def run():
+ print('hello')
+ """)
+ })
+
+ @pytest.yield_fixture
+ def env(self):
+ with contexts.tempdir(prefix='setuptools-test.') as env_dir:
+ env = Environment(env_dir)
+ os.chmod(env_dir, stat.S_IRWXU)
+ subs = 'home', 'lib', 'scripts', 'data', 'egg-base'
+ env.paths = dict(
+ (dirname, os.path.join(env_dir, dirname))
+ for dirname in subs
+ )
+ list(map(os.mkdir, env.paths.values()))
+ build_files({
+ env.paths['home']: {
+ '.pydistutils.cfg': DALS("""
+ [egg_info]
+ egg-base = %(egg-base)s
+ """ % env.paths)
+ }
+ })
+ yield env
+
+ def test_egg_info_save_version_info_setup_empty(self, tmpdir_cwd, env):
+ """
+ When the egg_info section is empty or not present, running
+ save_version_info should add the settings to the setup.cfg
+ in a deterministic order, consistent with the ordering found
+ on Python 2.7 with PYTHONHASHSEED=0.
+ """
+ setup_cfg = os.path.join(env.paths['home'], 'setup.cfg')
+ dist = Distribution()
+ ei = egg_info(dist)
+ ei.initialize_options()
+ ei.save_version_info(setup_cfg)
+
+ with open(setup_cfg, 'r') as f:
+ content = f.read()
+
+ assert '[egg_info]' in content
+ assert 'tag_build =' in content
+ assert 'tag_date = 0' in content
+
+ expected_order = 'tag_build', 'tag_date',
+
+ self._validate_content_order(content, expected_order)
+
+ @staticmethod
+ def _validate_content_order(content, expected):
+ """
+ Assert that the strings in expected appear in content
+ in order.
+ """
+ pattern = '.*'.join(expected)
+ flags = re.MULTILINE | re.DOTALL
+ assert re.search(pattern, content, flags)
+
+ def test_egg_info_save_version_info_setup_defaults(self, tmpdir_cwd, env):
+ """
+ When running save_version_info on an existing setup.cfg
+ with the 'default' values present from a previous run,
+ the file should remain unchanged.
+ """
+ setup_cfg = os.path.join(env.paths['home'], 'setup.cfg')
+ build_files({
+ setup_cfg: DALS("""
+ [egg_info]
+ tag_build =
+ tag_date = 0
+ """),
+ })
+ dist = Distribution()
+ ei = egg_info(dist)
+ ei.initialize_options()
+ ei.save_version_info(setup_cfg)
+
+ with open(setup_cfg, 'r') as f:
+ content = f.read()
+
+ assert '[egg_info]' in content
+ assert 'tag_build =' in content
+ assert 'tag_date = 0' in content
+
+ expected_order = 'tag_build', 'tag_date',
+
+ self._validate_content_order(content, expected_order)
+
+ def test_egg_base_installed_egg_info(self, tmpdir_cwd, env):
+ self._create_project()
+
+ self._run_install_command(tmpdir_cwd, env)
+ actual = self._find_egg_info_files(env.paths['lib'])
+
+ expected = [
+ 'PKG-INFO',
+ 'SOURCES.txt',
+ 'dependency_links.txt',
+ 'entry_points.txt',
+ 'not-zip-safe',
+ 'top_level.txt',
+ ]
+ assert sorted(actual) == expected
+
+ def test_manifest_template_is_read(self, tmpdir_cwd, env):
+ self._create_project()
+ build_files({
+ 'MANIFEST.in': DALS("""
+ recursive-include docs *.rst
+ """),
+ 'docs': {
+ 'usage.rst': "Run 'hi'",
+ }
+ })
+ self._run_install_command(tmpdir_cwd, env)
+ egg_info_dir = self._find_egg_info_files(env.paths['lib']).base
+ sources_txt = os.path.join(egg_info_dir, 'SOURCES.txt')
+ with open(sources_txt) as f:
+ assert 'docs/usage.rst' in f.read().split('\n')
+
+ def _setup_script_with_requires(self, requires, use_setup_cfg=False):
+ setup_script = DALS(
+ '''
+ from setuptools import setup
+
+ setup(name='foo', zip_safe=False, %s)
+ '''
+ ) % ('' if use_setup_cfg else requires)
+ setup_config = requires if use_setup_cfg else ''
+ build_files({'setup.py': setup_script,
+ 'setup.cfg': setup_config})
+
+ mismatch_marker = "python_version<'{this_ver}'".format(
+ this_ver=sys.version_info[0],
+ )
+ # Alternate equivalent syntax.
+ mismatch_marker_alternate = 'python_version < "{this_ver}"'.format(
+ this_ver=sys.version_info[0],
+ )
+ invalid_marker = "<=>++"
+
+ class RequiresTestHelper(object):
+
+ @staticmethod
+ def parametrize(*test_list, **format_dict):
+ idlist = []
+ argvalues = []
+ for test in test_list:
+ test_params = test.lstrip().split('\n\n', 3)
+ name_kwargs = test_params.pop(0).split('\n')
+ if len(name_kwargs) > 1:
+ val = name_kwargs[1].strip()
+ install_cmd_kwargs = ast.literal_eval(val)
+ else:
+ install_cmd_kwargs = {}
+ name = name_kwargs[0].strip()
+ setup_py_requires, setup_cfg_requires, expected_requires = (
+ DALS(a).format(**format_dict) for a in test_params
+ )
+ for id_, requires, use_cfg in (
+ (name, setup_py_requires, False),
+ (name + '_in_setup_cfg', setup_cfg_requires, True),
+ ):
+ idlist.append(id_)
+ marks = ()
+ if requires.startswith('@xfail\n'):
+ requires = requires[7:]
+ marks = pytest.mark.xfail
+ argvalues.append(pytest.param(requires, use_cfg,
+ expected_requires,
+ install_cmd_kwargs,
+ marks=marks))
+ return pytest.mark.parametrize(
+ 'requires,use_setup_cfg,'
+ 'expected_requires,install_cmd_kwargs',
+ argvalues, ids=idlist,
+ )
+
+ @RequiresTestHelper.parametrize(
+ # Format of a test:
+ #
+ # id
+ # install_cmd_kwargs [optional]
+ #
+ # requires block (when used in setup.py)
+ #
+ # requires block (when used in setup.cfg)
+ #
+ # expected contents of requires.txt
+
+ '''
+ install_requires_deterministic
+
+ install_requires=["fake-factory==0.5.2", "pytz"]
+
+ [options]
+ install_requires =
+ fake-factory==0.5.2
+ pytz
+
+ fake-factory==0.5.2
+ pytz
+ ''',
+
+ '''
+ install_requires_ordered
+
+ install_requires=["fake-factory>=1.12.3,!=2.0"]
+
+ [options]
+ install_requires =
+ fake-factory>=1.12.3,!=2.0
+
+ fake-factory!=2.0,>=1.12.3
+ ''',
+
+ '''
+ install_requires_with_marker
+
+ install_requires=["barbazquux;{mismatch_marker}"],
+
+ [options]
+ install_requires =
+ barbazquux; {mismatch_marker}
+
+ [:{mismatch_marker_alternate}]
+ barbazquux
+ ''',
+
+ '''
+ install_requires_with_extra
+ {'cmd': ['egg_info']}
+
+ install_requires=["barbazquux [test]"],
+
+ [options]
+ install_requires =
+ barbazquux [test]
+
+ barbazquux[test]
+ ''',
+
+ '''
+ install_requires_with_extra_and_marker
+
+ install_requires=["barbazquux [test]; {mismatch_marker}"],
+
+ [options]
+ install_requires =
+ barbazquux [test]; {mismatch_marker}
+
+ [:{mismatch_marker_alternate}]
+ barbazquux[test]
+ ''',
+
+ '''
+ setup_requires_with_markers
+
+ setup_requires=["barbazquux;{mismatch_marker}"],
+
+ [options]
+ setup_requires =
+ barbazquux; {mismatch_marker}
+
+ ''',
+
+ '''
+ tests_require_with_markers
+ {'cmd': ['test'], 'output': "Ran 0 tests in"}
+
+ tests_require=["barbazquux;{mismatch_marker}"],
+
+ [options]
+ tests_require =
+ barbazquux; {mismatch_marker}
+
+ ''',
+
+ '''
+ extras_require_with_extra
+ {'cmd': ['egg_info']}
+
+ extras_require={{"extra": ["barbazquux [test]"]}},
+
+ [options.extras_require]
+ extra = barbazquux [test]
+
+ [extra]
+ barbazquux[test]
+ ''',
+
+ '''
+ extras_require_with_extra_and_marker_in_req
+
+ extras_require={{"extra": ["barbazquux [test]; {mismatch_marker}"]}},
+
+ [options.extras_require]
+ extra =
+ barbazquux [test]; {mismatch_marker}
+
+ [extra]
+
+ [extra:{mismatch_marker_alternate}]
+ barbazquux[test]
+ ''',
+
+ # FIXME: ConfigParser does not allow : in key names!
+ '''
+ extras_require_with_marker
+
+ extras_require={{":{mismatch_marker}": ["barbazquux"]}},
+
+ @xfail
+ [options.extras_require]
+ :{mismatch_marker} = barbazquux
+
+ [:{mismatch_marker}]
+ barbazquux
+ ''',
+
+ '''
+ extras_require_with_marker_in_req
+
+ extras_require={{"extra": ["barbazquux; {mismatch_marker}"]}},
+
+ [options.extras_require]
+ extra =
+ barbazquux; {mismatch_marker}
+
+ [extra]
+
+ [extra:{mismatch_marker_alternate}]
+ barbazquux
+ ''',
+
+ '''
+ extras_require_with_empty_section
+
+ extras_require={{"empty": []}},
+
+ [options.extras_require]
+ empty =
+
+ [empty]
+ ''',
+ # Format arguments.
+ invalid_marker=invalid_marker,
+ mismatch_marker=mismatch_marker,
+ mismatch_marker_alternate=mismatch_marker_alternate,
+ )
+ def test_requires(
+ self, tmpdir_cwd, env, requires, use_setup_cfg,
+ expected_requires, install_cmd_kwargs):
+ self._setup_script_with_requires(requires, use_setup_cfg)
+ self._run_install_command(tmpdir_cwd, env, **install_cmd_kwargs)
+ egg_info_dir = os.path.join('.', 'foo.egg-info')
+ requires_txt = os.path.join(egg_info_dir, 'requires.txt')
+ if os.path.exists(requires_txt):
+ with open(requires_txt) as fp:
+ install_requires = fp.read()
+ else:
+ install_requires = ''
+ assert install_requires.lstrip() == expected_requires
+ assert glob.glob(os.path.join(env.paths['lib'], 'barbazquux*')) == []
+
+ def test_install_requires_unordered_disallowed(self, tmpdir_cwd, env):
+ """
+ Packages that pass unordered install_requires sequences
+ should be rejected as they produce non-deterministic
+ builds. See #458.
+ """
+ req = 'install_requires={"fake-factory==0.5.2", "pytz"}'
+ self._setup_script_with_requires(req)
+ with pytest.raises(AssertionError):
+ self._run_install_command(tmpdir_cwd, env)
+
+ def test_extras_require_with_invalid_marker(self, tmpdir_cwd, env):
+ tmpl = 'extras_require={{":{marker}": ["barbazquux"]}},'
+ req = tmpl.format(marker=self.invalid_marker)
+ self._setup_script_with_requires(req)
+ with pytest.raises(AssertionError):
+ self._run_install_command(tmpdir_cwd, env)
+ assert glob.glob(os.path.join(env.paths['lib'], 'barbazquux*')) == []
+
+ def test_extras_require_with_invalid_marker_in_req(self, tmpdir_cwd, env):
+ tmpl = 'extras_require={{"extra": ["barbazquux; {marker}"]}},'
+ req = tmpl.format(marker=self.invalid_marker)
+ self._setup_script_with_requires(req)
+ with pytest.raises(AssertionError):
+ self._run_install_command(tmpdir_cwd, env)
+ assert glob.glob(os.path.join(env.paths['lib'], 'barbazquux*')) == []
+
+ def test_provides_extra(self, tmpdir_cwd, env):
+ self._setup_script_with_requires(
+ 'extras_require={"foobar": ["barbazquux"]},')
+ environ = os.environ.copy().update(
+ HOME=env.paths['home'],
+ )
+ code, data = environment.run_setup_py(
+ cmd=['egg_info'],
+ pypath=os.pathsep.join([env.paths['lib'], str(tmpdir_cwd)]),
+ data_stream=1,
+ env=environ,
+ )
+ egg_info_dir = os.path.join('.', 'foo.egg-info')
+ with open(os.path.join(egg_info_dir, 'PKG-INFO')) as pkginfo_file:
+ pkg_info_lines = pkginfo_file.read().split('\n')
+ assert 'Provides-Extra: foobar' in pkg_info_lines
+ assert 'Metadata-Version: 2.1' in pkg_info_lines
+
+ def test_doesnt_provides_extra(self, tmpdir_cwd, env):
+ self._setup_script_with_requires(
+ '''install_requires=["spam ; python_version<'3.3'"]''')
+ environ = os.environ.copy().update(
+ HOME=env.paths['home'],
+ )
+ environment.run_setup_py(
+ cmd=['egg_info'],
+ pypath=os.pathsep.join([env.paths['lib'], str(tmpdir_cwd)]),
+ data_stream=1,
+ env=environ,
+ )
+ egg_info_dir = os.path.join('.', 'foo.egg-info')
+ with open(os.path.join(egg_info_dir, 'PKG-INFO')) as pkginfo_file:
+ pkg_info_text = pkginfo_file.read()
+ assert 'Provides-Extra:' not in pkg_info_text
+
+ def test_long_description_content_type(self, tmpdir_cwd, env):
+ # Test that specifying a `long_description_content_type` keyword arg to
+ # the `setup` function results in writing a `Description-Content-Type`
+ # line to the `PKG-INFO` file in the `<distribution>.egg-info`
+ # directory.
+ # `Description-Content-Type` is described at
+ # https://github.com/pypa/python-packaging-user-guide/pull/258
+
+ self._setup_script_with_requires(
+ """long_description_content_type='text/markdown',""")
+ environ = os.environ.copy().update(
+ HOME=env.paths['home'],
+ )
+ code, data = environment.run_setup_py(
+ cmd=['egg_info'],
+ pypath=os.pathsep.join([env.paths['lib'], str(tmpdir_cwd)]),
+ data_stream=1,
+ env=environ,
+ )
+ egg_info_dir = os.path.join('.', 'foo.egg-info')
+ with open(os.path.join(egg_info_dir, 'PKG-INFO')) as pkginfo_file:
+ pkg_info_lines = pkginfo_file.read().split('\n')
+ expected_line = 'Description-Content-Type: text/markdown'
+ assert expected_line in pkg_info_lines
+ assert 'Metadata-Version: 2.1' in pkg_info_lines
+
+ def test_project_urls(self, tmpdir_cwd, env):
+ # Test that specifying a `project_urls` dict to the `setup`
+ # function results in writing multiple `Project-URL` lines to
+ # the `PKG-INFO` file in the `<distribution>.egg-info`
+ # directory.
+ # `Project-URL` is described at https://packaging.python.org
+ # /specifications/core-metadata/#project-url-multiple-use
+
+ self._setup_script_with_requires(
+ """project_urls={
+ 'Link One': 'https://example.com/one/',
+ 'Link Two': 'https://example.com/two/',
+ },""")
+ environ = os.environ.copy().update(
+ HOME=env.paths['home'],
+ )
+ code, data = environment.run_setup_py(
+ cmd=['egg_info'],
+ pypath=os.pathsep.join([env.paths['lib'], str(tmpdir_cwd)]),
+ data_stream=1,
+ env=environ,
+ )
+ egg_info_dir = os.path.join('.', 'foo.egg-info')
+ with open(os.path.join(egg_info_dir, 'PKG-INFO')) as pkginfo_file:
+ pkg_info_lines = pkginfo_file.read().split('\n')
+ expected_line = 'Project-URL: Link One, https://example.com/one/'
+ assert expected_line in pkg_info_lines
+ expected_line = 'Project-URL: Link Two, https://example.com/two/'
+ assert expected_line in pkg_info_lines
+
+ def test_python_requires_egg_info(self, tmpdir_cwd, env):
+ self._setup_script_with_requires(
+ """python_requires='>=2.7.12',""")
+ environ = os.environ.copy().update(
+ HOME=env.paths['home'],
+ )
+ code, data = environment.run_setup_py(
+ cmd=['egg_info'],
+ pypath=os.pathsep.join([env.paths['lib'], str(tmpdir_cwd)]),
+ data_stream=1,
+ env=environ,
+ )
+ egg_info_dir = os.path.join('.', 'foo.egg-info')
+ with open(os.path.join(egg_info_dir, 'PKG-INFO')) as pkginfo_file:
+ pkg_info_lines = pkginfo_file.read().split('\n')
+ assert 'Requires-Python: >=2.7.12' in pkg_info_lines
+ assert 'Metadata-Version: 1.2' in pkg_info_lines
+
+ def test_python_requires_install(self, tmpdir_cwd, env):
+ self._setup_script_with_requires(
+ """python_requires='>=1.2.3',""")
+ self._run_install_command(tmpdir_cwd, env)
+ egg_info_dir = self._find_egg_info_files(env.paths['lib']).base
+ pkginfo = os.path.join(egg_info_dir, 'PKG-INFO')
+ with open(pkginfo) as f:
+ assert 'Requires-Python: >=1.2.3' in f.read().split('\n')
+
+ def test_manifest_maker_warning_suppression(self):
+ fixtures = [
+ "standard file not found: should have one of foo.py, bar.py",
+ "standard file 'setup.py' not found"
+ ]
+
+ for msg in fixtures:
+ assert manifest_maker._should_suppress_warning(msg)
+
+ def _run_install_command(self, tmpdir_cwd, env, cmd=None, output=None):
+ environ = os.environ.copy().update(
+ HOME=env.paths['home'],
+ )
+ if cmd is None:
+ cmd = [
+ 'install',
+ '--home', env.paths['home'],
+ '--install-lib', env.paths['lib'],
+ '--install-scripts', env.paths['scripts'],
+ '--install-data', env.paths['data'],
+ ]
+ code, data = environment.run_setup_py(
+ cmd=cmd,
+ pypath=os.pathsep.join([env.paths['lib'], str(tmpdir_cwd)]),
+ data_stream=1,
+ env=environ,
+ )
+ if code:
+ raise AssertionError(data)
+ if output:
+ assert output in data
+
+ def _find_egg_info_files(self, root):
+ class DirList(list):
+ def __init__(self, files, base):
+ super(DirList, self).__init__(files)
+ self.base = base
+
+ results = (
+ DirList(filenames, dirpath)
+ for dirpath, dirnames, filenames in os.walk(root)
+ if os.path.basename(dirpath) == 'EGG-INFO'
+ )
+ # expect exactly one result
+ result, = results
+ return result
diff --git a/setuptools/tests/test_find_packages.py b/setuptools/tests/test_find_packages.py
new file mode 100644
index 0000000..a6023de
--- /dev/null
+++ b/setuptools/tests/test_find_packages.py
@@ -0,0 +1,182 @@
+"""Tests for setuptools.find_packages()."""
+import os
+import sys
+import shutil
+import tempfile
+import platform
+
+import pytest
+
+import setuptools
+from setuptools import find_packages
+
+find_420_packages = setuptools.PEP420PackageFinder.find
+
+# modeled after CPython's test.support.can_symlink
+
+
+def can_symlink():
+ TESTFN = tempfile.mktemp()
+ symlink_path = TESTFN + "can_symlink"
+ try:
+ os.symlink(TESTFN, symlink_path)
+ can = True
+ except (OSError, NotImplementedError, AttributeError):
+ can = False
+ else:
+ os.remove(symlink_path)
+ globals().update(can_symlink=lambda: can)
+ return can
+
+
+def has_symlink():
+ bad_symlink = (
+ # Windows symlink directory detection is broken on Python 3.2
+ platform.system() == 'Windows' and sys.version_info[:2] == (3, 2)
+ )
+ return can_symlink() and not bad_symlink
+
+
+class TestFindPackages:
+ def setup_method(self, method):
+ self.dist_dir = tempfile.mkdtemp()
+ self._make_pkg_structure()
+
+ def teardown_method(self, method):
+ shutil.rmtree(self.dist_dir)
+
+ def _make_pkg_structure(self):
+ """Make basic package structure.
+
+ dist/
+ docs/
+ conf.py
+ pkg/
+ __pycache__/
+ nspkg/
+ mod.py
+ subpkg/
+ assets/
+ asset
+ __init__.py
+ setup.py
+
+ """
+ self.docs_dir = self._mkdir('docs', self.dist_dir)
+ self._touch('conf.py', self.docs_dir)
+ self.pkg_dir = self._mkdir('pkg', self.dist_dir)
+ self._mkdir('__pycache__', self.pkg_dir)
+ self.ns_pkg_dir = self._mkdir('nspkg', self.pkg_dir)
+ self._touch('mod.py', self.ns_pkg_dir)
+ self.sub_pkg_dir = self._mkdir('subpkg', self.pkg_dir)
+ self.asset_dir = self._mkdir('assets', self.sub_pkg_dir)
+ self._touch('asset', self.asset_dir)
+ self._touch('__init__.py', self.sub_pkg_dir)
+ self._touch('setup.py', self.dist_dir)
+
+ def _mkdir(self, path, parent_dir=None):
+ if parent_dir:
+ path = os.path.join(parent_dir, path)
+ os.mkdir(path)
+ return path
+
+ def _touch(self, path, dir_=None):
+ if dir_:
+ path = os.path.join(dir_, path)
+ fp = open(path, 'w')
+ fp.close()
+ return path
+
+ def test_regular_package(self):
+ self._touch('__init__.py', self.pkg_dir)
+ packages = find_packages(self.dist_dir)
+ assert packages == ['pkg', 'pkg.subpkg']
+
+ def test_exclude(self):
+ self._touch('__init__.py', self.pkg_dir)
+ packages = find_packages(self.dist_dir, exclude=('pkg.*',))
+ assert packages == ['pkg']
+
+ def test_exclude_recursive(self):
+ """
+ Excluding a parent package should not exclude child packages as well.
+ """
+ self._touch('__init__.py', self.pkg_dir)
+ self._touch('__init__.py', self.sub_pkg_dir)
+ packages = find_packages(self.dist_dir, exclude=('pkg',))
+ assert packages == ['pkg.subpkg']
+
+ def test_include_excludes_other(self):
+ """
+ If include is specified, other packages should be excluded.
+ """
+ self._touch('__init__.py', self.pkg_dir)
+ alt_dir = self._mkdir('other_pkg', self.dist_dir)
+ self._touch('__init__.py', alt_dir)
+ packages = find_packages(self.dist_dir, include=['other_pkg'])
+ assert packages == ['other_pkg']
+
+ def test_dir_with_dot_is_skipped(self):
+ shutil.rmtree(os.path.join(self.dist_dir, 'pkg/subpkg/assets'))
+ data_dir = self._mkdir('some.data', self.pkg_dir)
+ self._touch('__init__.py', data_dir)
+ self._touch('file.dat', data_dir)
+ packages = find_packages(self.dist_dir)
+ assert 'pkg.some.data' not in packages
+
+ def test_dir_with_packages_in_subdir_is_excluded(self):
+ """
+ Ensure that a package in a non-package such as build/pkg/__init__.py
+ is excluded.
+ """
+ build_dir = self._mkdir('build', self.dist_dir)
+ build_pkg_dir = self._mkdir('pkg', build_dir)
+ self._touch('__init__.py', build_pkg_dir)
+ packages = find_packages(self.dist_dir)
+ assert 'build.pkg' not in packages
+
+ @pytest.mark.skipif(not has_symlink(), reason='Symlink support required')
+ def test_symlinked_packages_are_included(self):
+ """
+ A symbolically-linked directory should be treated like any other
+ directory when matched as a package.
+
+ Create a link from lpkg -> pkg.
+ """
+ self._touch('__init__.py', self.pkg_dir)
+ linked_pkg = os.path.join(self.dist_dir, 'lpkg')
+ os.symlink('pkg', linked_pkg)
+ assert os.path.isdir(linked_pkg)
+ packages = find_packages(self.dist_dir)
+ assert 'lpkg' in packages
+
+ def _assert_packages(self, actual, expected):
+ assert set(actual) == set(expected)
+
+ def test_pep420_ns_package(self):
+ packages = find_420_packages(
+ self.dist_dir, include=['pkg*'], exclude=['pkg.subpkg.assets'])
+ self._assert_packages(packages, ['pkg', 'pkg.nspkg', 'pkg.subpkg'])
+
+ def test_pep420_ns_package_no_includes(self):
+ packages = find_420_packages(
+ self.dist_dir, exclude=['pkg.subpkg.assets'])
+ self._assert_packages(packages, ['docs', 'pkg', 'pkg.nspkg', 'pkg.subpkg'])
+
+ def test_pep420_ns_package_no_includes_or_excludes(self):
+ packages = find_420_packages(self.dist_dir)
+ expected = [
+ 'docs', 'pkg', 'pkg.nspkg', 'pkg.subpkg', 'pkg.subpkg.assets']
+ self._assert_packages(packages, expected)
+
+ def test_regular_package_with_nested_pep420_ns_packages(self):
+ self._touch('__init__.py', self.pkg_dir)
+ packages = find_420_packages(
+ self.dist_dir, exclude=['docs', 'pkg.subpkg.assets'])
+ self._assert_packages(packages, ['pkg', 'pkg.nspkg', 'pkg.subpkg'])
+
+ def test_pep420_ns_package_no_non_package_dirs(self):
+ shutil.rmtree(self.docs_dir)
+ shutil.rmtree(os.path.join(self.dist_dir, 'pkg/subpkg/assets'))
+ packages = find_420_packages(self.dist_dir)
+ self._assert_packages(packages, ['pkg', 'pkg.nspkg', 'pkg.subpkg'])
diff --git a/setuptools/tests/test_install_scripts.py b/setuptools/tests/test_install_scripts.py
new file mode 100644
index 0000000..7393241
--- /dev/null
+++ b/setuptools/tests/test_install_scripts.py
@@ -0,0 +1,88 @@
+"""install_scripts tests
+"""
+
+import io
+import sys
+
+import pytest
+
+from setuptools.command.install_scripts import install_scripts
+from setuptools.dist import Distribution
+from . import contexts
+
+
+class TestInstallScripts:
+ settings = dict(
+ name='foo',
+ entry_points={'console_scripts': ['foo=foo:foo']},
+ version='0.0',
+ )
+ unix_exe = '/usr/dummy-test-path/local/bin/python'
+ unix_spaces_exe = '/usr/bin/env dummy-test-python'
+ win32_exe = 'C:\\Dummy Test Path\\Program Files\\Python 3.3\\python.exe'
+
+ def _run_install_scripts(self, install_dir, executable=None):
+ dist = Distribution(self.settings)
+ dist.script_name = 'setup.py'
+ cmd = install_scripts(dist)
+ cmd.install_dir = install_dir
+ if executable is not None:
+ bs = cmd.get_finalized_command('build_scripts')
+ bs.executable = executable
+ cmd.ensure_finalized()
+ with contexts.quiet():
+ cmd.run()
+
+ @pytest.mark.skipif(sys.platform == 'win32', reason='non-Windows only')
+ def test_sys_executable_escaping_unix(self, tmpdir, monkeypatch):
+ """
+ Ensure that shebang is not quoted on Unix when getting the Python exe
+ from sys.executable.
+ """
+ expected = '#!%s\n' % self.unix_exe
+ monkeypatch.setattr('sys.executable', self.unix_exe)
+ with tmpdir.as_cwd():
+ self._run_install_scripts(str(tmpdir))
+ with io.open(str(tmpdir.join('foo')), 'r') as f:
+ actual = f.readline()
+ assert actual == expected
+
+ @pytest.mark.skipif(sys.platform != 'win32', reason='Windows only')
+ def test_sys_executable_escaping_win32(self, tmpdir, monkeypatch):
+ """
+ Ensure that shebang is quoted on Windows when getting the Python exe
+ from sys.executable and it contains a space.
+ """
+ expected = '#!"%s"\n' % self.win32_exe
+ monkeypatch.setattr('sys.executable', self.win32_exe)
+ with tmpdir.as_cwd():
+ self._run_install_scripts(str(tmpdir))
+ with io.open(str(tmpdir.join('foo-script.py')), 'r') as f:
+ actual = f.readline()
+ assert actual == expected
+
+ @pytest.mark.skipif(sys.platform == 'win32', reason='non-Windows only')
+ def test_executable_with_spaces_escaping_unix(self, tmpdir):
+ """
+ Ensure that shebang on Unix is not quoted, even when a value with spaces
+ is specified using --executable.
+ """
+ expected = '#!%s\n' % self.unix_spaces_exe
+ with tmpdir.as_cwd():
+ self._run_install_scripts(str(tmpdir), self.unix_spaces_exe)
+ with io.open(str(tmpdir.join('foo')), 'r') as f:
+ actual = f.readline()
+ assert actual == expected
+
+ @pytest.mark.skipif(sys.platform != 'win32', reason='Windows only')
+ def test_executable_arg_escaping_win32(self, tmpdir):
+ """
+ Ensure that shebang on Windows is quoted when getting a path with spaces
+ from --executable, that is itself properly quoted.
+ """
+ expected = '#!"%s"\n' % self.win32_exe
+ with tmpdir.as_cwd():
+ self._run_install_scripts(str(tmpdir), '"' + self.win32_exe + '"')
+ with io.open(str(tmpdir.join('foo-script.py')), 'r') as f:
+ actual = f.readline()
+ assert actual == expected
diff --git a/setuptools/tests/test_integration.py b/setuptools/tests/test_integration.py
new file mode 100644
index 0000000..3a9a6c5
--- /dev/null
+++ b/setuptools/tests/test_integration.py
@@ -0,0 +1,165 @@
+"""Run some integration tests.
+
+Try to install a few packages.
+"""
+
+import glob
+import os
+import sys
+
+from setuptools.extern.six.moves import urllib
+import pytest
+
+from setuptools.command.easy_install import easy_install
+from setuptools.command import easy_install as easy_install_pkg
+from setuptools.dist import Distribution
+
+
+def setup_module(module):
+ packages = 'stevedore', 'virtualenvwrapper', 'pbr', 'novaclient'
+ for pkg in packages:
+ try:
+ __import__(pkg)
+ tmpl = "Integration tests cannot run when {pkg} is installed"
+ pytest.skip(tmpl.format(**locals()))
+ except ImportError:
+ pass
+
+ try:
+ urllib.request.urlopen('https://pypi.python.org/pypi')
+ except Exception as exc:
+ pytest.skip(str(exc))
+
+
+@pytest.fixture
+def install_context(request, tmpdir, monkeypatch):
+ """Fixture to set up temporary installation directory.
+ """
+ # Save old values so we can restore them.
+ new_cwd = tmpdir.mkdir('cwd')
+ user_base = tmpdir.mkdir('user_base')
+ user_site = tmpdir.mkdir('user_site')
+ install_dir = tmpdir.mkdir('install_dir')
+
+ def fin():
+ # undo the monkeypatch, particularly needed under
+ # windows because of kept handle on cwd
+ monkeypatch.undo()
+ new_cwd.remove()
+ user_base.remove()
+ user_site.remove()
+ install_dir.remove()
+
+ request.addfinalizer(fin)
+
+ # Change the environment and site settings to control where the
+ # files are installed and ensure we do not overwrite anything.
+ monkeypatch.chdir(new_cwd)
+ monkeypatch.setattr(easy_install_pkg, '__file__', user_site.strpath)
+ monkeypatch.setattr('site.USER_BASE', user_base.strpath)
+ monkeypatch.setattr('site.USER_SITE', user_site.strpath)
+ monkeypatch.setattr('sys.path', sys.path + [install_dir.strpath])
+ monkeypatch.setenv('PYTHONPATH', os.path.pathsep.join(sys.path))
+
+ # Set up the command for performing the installation.
+ dist = Distribution()
+ cmd = easy_install(dist)
+ cmd.install_dir = install_dir.strpath
+ return cmd
+
+
+def _install_one(requirement, cmd, pkgname, modulename):
+ cmd.args = [requirement]
+ cmd.ensure_finalized()
+ cmd.run()
+ target = cmd.install_dir
+ dest_path = glob.glob(os.path.join(target, pkgname + '*.egg'))
+ assert dest_path
+ assert os.path.exists(os.path.join(dest_path[0], pkgname, modulename))
+
+
+def test_stevedore(install_context):
+ _install_one('stevedore', install_context,
+ 'stevedore', 'extension.py')
+
+
+@pytest.mark.xfail
+def test_virtualenvwrapper(install_context):
+ _install_one('virtualenvwrapper', install_context,
+ 'virtualenvwrapper', 'hook_loader.py')
+
+
+def test_pbr(install_context):
+ _install_one('pbr', install_context,
+ 'pbr', 'core.py')
+
+
+@pytest.mark.xfail
+def test_python_novaclient(install_context):
+ _install_one('python-novaclient', install_context,
+ 'novaclient', 'base.py')
+
+
+def test_pyuri(install_context):
+ """
+ Install the pyuri package (version 0.3.1 at the time of writing).
+
+ This is also a regression test for issue #1016.
+ """
+ _install_one('pyuri', install_context, 'pyuri', 'uri.py')
+
+ pyuri = install_context.installed_projects['pyuri']
+
+ # The package data should be installed.
+ assert os.path.exists(os.path.join(pyuri.location, 'pyuri', 'uri.regex'))
+
+
+import re
+import subprocess
+import functools
+import tarfile, zipfile
+
+
+build_deps = ['appdirs', 'packaging', 'pyparsing', 'six']
+@pytest.mark.parametrize("build_dep", build_deps)
+@pytest.mark.skipif(sys.version_info < (3, 6), reason='run only on late versions')
+def test_build_deps_on_distutils(request, tmpdir_factory, build_dep):
+ """
+ All setuptools build dependencies must build without
+ setuptools.
+ """
+ if 'pyparsing' in build_dep:
+ pytest.xfail(reason="Project imports setuptools unconditionally")
+ build_target = tmpdir_factory.mktemp('source')
+ build_dir = download_and_extract(request, build_dep, build_target)
+ install_target = tmpdir_factory.mktemp('target')
+ output = install(build_dir, install_target)
+ for line in output.splitlines():
+ match = re.search('Unknown distribution option: (.*)', line)
+ allowed_unknowns = [
+ 'test_suite',
+ 'tests_require',
+ 'install_requires',
+ ]
+ assert not match or match.group(1).strip('"\'') in allowed_unknowns
+
+
+def install(pkg_dir, install_dir):
+ with open(os.path.join(pkg_dir, 'setuptools.py'), 'w') as breaker:
+ breaker.write('raise ImportError()')
+ cmd = [sys.executable, 'setup.py', 'install', '--prefix', install_dir]
+ env = dict(os.environ, PYTHONPATH=pkg_dir)
+ output = subprocess.check_output(cmd, cwd=pkg_dir, env=env, stderr=subprocess.STDOUT)
+ return output.decode('utf-8')
+
+
+def download_and_extract(request, req, target):
+ cmd = [sys.executable, '-m', 'pip', 'download', '--no-deps',
+ '--no-binary', ':all:', req]
+ output = subprocess.check_output(cmd, encoding='utf-8')
+ filename = re.search('Saved (.*)', output).group(1)
+ request.addfinalizer(functools.partial(os.remove, filename))
+ opener = zipfile.ZipFile if filename.endswith('.zip') else tarfile.open
+ with opener(filename) as archive:
+ archive.extractall(target)
+ return os.path.join(target, os.listdir(target)[0])
diff --git a/setuptools/tests/test_manifest.py b/setuptools/tests/test_manifest.py
new file mode 100644
index 0000000..65eec7d
--- /dev/null
+++ b/setuptools/tests/test_manifest.py
@@ -0,0 +1,602 @@
+# -*- coding: utf-8 -*-
+"""sdist tests"""
+
+import contextlib
+import os
+import shutil
+import sys
+import tempfile
+import itertools
+from distutils import log
+from distutils.errors import DistutilsTemplateError
+
+import pkg_resources.py31compat
+from setuptools.command.egg_info import FileList, egg_info, translate_pattern
+from setuptools.dist import Distribution
+from setuptools.extern import six
+from setuptools.tests.textwrap import DALS
+
+import pytest
+
+py3_only = pytest.mark.xfail(six.PY2, reason="Test runs on Python 3 only")
+
+
+def make_local_path(s):
+ """Converts '/' in a string to os.sep"""
+ return s.replace('/', os.sep)
+
+
+SETUP_ATTRS = {
+ 'name': 'app',
+ 'version': '0.0',
+ 'packages': ['app'],
+}
+
+SETUP_PY = """\
+from setuptools import setup
+
+setup(**%r)
+""" % SETUP_ATTRS
+
+
+@contextlib.contextmanager
+def quiet():
+ old_stdout, old_stderr = sys.stdout, sys.stderr
+ sys.stdout, sys.stderr = six.StringIO(), six.StringIO()
+ try:
+ yield
+ finally:
+ sys.stdout, sys.stderr = old_stdout, old_stderr
+
+
+def touch(filename):
+ open(filename, 'w').close()
+
+
+# The set of files always in the manifest, including all files in the
+# .egg-info directory
+default_files = frozenset(map(make_local_path, [
+ 'README.rst',
+ 'MANIFEST.in',
+ 'setup.py',
+ 'app.egg-info/PKG-INFO',
+ 'app.egg-info/SOURCES.txt',
+ 'app.egg-info/dependency_links.txt',
+ 'app.egg-info/top_level.txt',
+ 'app/__init__.py',
+]))
+
+
+translate_specs = [
+ ('foo', ['foo'], ['bar', 'foobar']),
+ ('foo/bar', ['foo/bar'], ['foo/bar/baz', './foo/bar', 'foo']),
+
+ # Glob matching
+ ('*.txt', ['foo.txt', 'bar.txt'], ['foo/foo.txt']),
+ ('dir/*.txt', ['dir/foo.txt', 'dir/bar.txt', 'dir/.txt'], ['notdir/foo.txt']),
+ ('*/*.py', ['bin/start.py'], []),
+ ('docs/page-?.txt', ['docs/page-9.txt'], ['docs/page-10.txt']),
+
+ # Globstars change what they mean depending upon where they are
+ (
+ 'foo/**/bar',
+ ['foo/bing/bar', 'foo/bing/bang/bar', 'foo/bar'],
+ ['foo/abar'],
+ ),
+ (
+ 'foo/**',
+ ['foo/bar/bing.py', 'foo/x'],
+ ['/foo/x'],
+ ),
+ (
+ '**',
+ ['x', 'abc/xyz', '@nything'],
+ [],
+ ),
+
+ # Character classes
+ (
+ 'pre[one]post',
+ ['preopost', 'prenpost', 'preepost'],
+ ['prepost', 'preonepost'],
+ ),
+
+ (
+ 'hello[!one]world',
+ ['helloxworld', 'helloyworld'],
+ ['hellooworld', 'helloworld', 'hellooneworld'],
+ ),
+
+ (
+ '[]one].txt',
+ ['o.txt', '].txt', 'e.txt'],
+ ['one].txt'],
+ ),
+
+ (
+ 'foo[!]one]bar',
+ ['fooybar'],
+ ['foo]bar', 'fooobar', 'fooebar'],
+ ),
+
+]
+"""
+A spec of inputs for 'translate_pattern' and matches and mismatches
+for that input.
+"""
+
+match_params = itertools.chain.from_iterable(
+ zip(itertools.repeat(pattern), matches)
+ for pattern, matches, mismatches in translate_specs
+)
+
+
+@pytest.fixture(params=match_params)
+def pattern_match(request):
+ return map(make_local_path, request.param)
+
+
+mismatch_params = itertools.chain.from_iterable(
+ zip(itertools.repeat(pattern), mismatches)
+ for pattern, matches, mismatches in translate_specs
+)
+
+
+@pytest.fixture(params=mismatch_params)
+def pattern_mismatch(request):
+ return map(make_local_path, request.param)
+
+
+def test_translated_pattern_match(pattern_match):
+ pattern, target = pattern_match
+ assert translate_pattern(pattern).match(target)
+
+
+def test_translated_pattern_mismatch(pattern_mismatch):
+ pattern, target = pattern_mismatch
+ assert not translate_pattern(pattern).match(target)
+
+
+class TempDirTestCase(object):
+ def setup_method(self, method):
+ self.temp_dir = tempfile.mkdtemp()
+ self.old_cwd = os.getcwd()
+ os.chdir(self.temp_dir)
+
+ def teardown_method(self, method):
+ os.chdir(self.old_cwd)
+ shutil.rmtree(self.temp_dir)
+
+
+class TestManifestTest(TempDirTestCase):
+ def setup_method(self, method):
+ super(TestManifestTest, self).setup_method(method)
+
+ f = open(os.path.join(self.temp_dir, 'setup.py'), 'w')
+ f.write(SETUP_PY)
+ f.close()
+ """
+ Create a file tree like:
+ - LICENSE
+ - README.rst
+ - testing.rst
+ - .hidden.rst
+ - app/
+ - __init__.py
+ - a.txt
+ - b.txt
+ - c.rst
+ - static/
+ - app.js
+ - app.js.map
+ - app.css
+ - app.css.map
+ """
+
+ for fname in ['README.rst', '.hidden.rst', 'testing.rst', 'LICENSE']:
+ touch(os.path.join(self.temp_dir, fname))
+
+ # Set up the rest of the test package
+ test_pkg = os.path.join(self.temp_dir, 'app')
+ os.mkdir(test_pkg)
+ for fname in ['__init__.py', 'a.txt', 'b.txt', 'c.rst']:
+ touch(os.path.join(test_pkg, fname))
+
+ # Some compiled front-end assets to include
+ static = os.path.join(test_pkg, 'static')
+ os.mkdir(static)
+ for fname in ['app.js', 'app.js.map', 'app.css', 'app.css.map']:
+ touch(os.path.join(static, fname))
+
+ def make_manifest(self, contents):
+ """Write a MANIFEST.in."""
+ with open(os.path.join(self.temp_dir, 'MANIFEST.in'), 'w') as f:
+ f.write(DALS(contents))
+
+ def get_files(self):
+ """Run egg_info and get all the files to include, as a set"""
+ dist = Distribution(SETUP_ATTRS)
+ dist.script_name = 'setup.py'
+ cmd = egg_info(dist)
+ cmd.ensure_finalized()
+
+ cmd.run()
+
+ return set(cmd.filelist.files)
+
+ def test_no_manifest(self):
+ """Check a missing MANIFEST.in includes only the standard files."""
+ assert (default_files - set(['MANIFEST.in'])) == self.get_files()
+
+ def test_empty_files(self):
+ """Check an empty MANIFEST.in includes only the standard files."""
+ self.make_manifest("")
+ assert default_files == self.get_files()
+
+ def test_include(self):
+ """Include extra rst files in the project root."""
+ self.make_manifest("include *.rst")
+ files = default_files | set([
+ 'testing.rst', '.hidden.rst'])
+ assert files == self.get_files()
+
+ def test_exclude(self):
+ """Include everything in app/ except the text files"""
+ l = make_local_path
+ self.make_manifest(
+ """
+ include app/*
+ exclude app/*.txt
+ """)
+ files = default_files | set([l('app/c.rst')])
+ assert files == self.get_files()
+
+ def test_include_multiple(self):
+ """Include with multiple patterns."""
+ l = make_local_path
+ self.make_manifest("include app/*.txt app/static/*")
+ files = default_files | set([
+ l('app/a.txt'), l('app/b.txt'),
+ l('app/static/app.js'), l('app/static/app.js.map'),
+ l('app/static/app.css'), l('app/static/app.css.map')])
+ assert files == self.get_files()
+
+ def test_graft(self):
+ """Include the whole app/static/ directory."""
+ l = make_local_path
+ self.make_manifest("graft app/static")
+ files = default_files | set([
+ l('app/static/app.js'), l('app/static/app.js.map'),
+ l('app/static/app.css'), l('app/static/app.css.map')])
+ assert files == self.get_files()
+
+ def test_graft_glob_syntax(self):
+ """Include the whole app/static/ directory."""
+ l = make_local_path
+ self.make_manifest("graft */static")
+ files = default_files | set([
+ l('app/static/app.js'), l('app/static/app.js.map'),
+ l('app/static/app.css'), l('app/static/app.css.map')])
+ assert files == self.get_files()
+
+ def test_graft_global_exclude(self):
+ """Exclude all *.map files in the project."""
+ l = make_local_path
+ self.make_manifest(
+ """
+ graft app/static
+ global-exclude *.map
+ """)
+ files = default_files | set([
+ l('app/static/app.js'), l('app/static/app.css')])
+ assert files == self.get_files()
+
+ def test_global_include(self):
+ """Include all *.rst, *.js, and *.css files in the whole tree."""
+ l = make_local_path
+ self.make_manifest(
+ """
+ global-include *.rst *.js *.css
+ """)
+ files = default_files | set([
+ '.hidden.rst', 'testing.rst', l('app/c.rst'),
+ l('app/static/app.js'), l('app/static/app.css')])
+ assert files == self.get_files()
+
+ def test_graft_prune(self):
+ """Include all files in app/, except for the whole app/static/ dir."""
+ l = make_local_path
+ self.make_manifest(
+ """
+ graft app
+ prune app/static
+ """)
+ files = default_files | set([
+ l('app/a.txt'), l('app/b.txt'), l('app/c.rst')])
+ assert files == self.get_files()
+
+
+class TestFileListTest(TempDirTestCase):
+ """
+ A copy of the relevant bits of distutils/tests/test_filelist.py,
+ to ensure setuptools' version of FileList keeps parity with distutils.
+ """
+
+ def setup_method(self, method):
+ super(TestFileListTest, self).setup_method(method)
+ self.threshold = log.set_threshold(log.FATAL)
+ self._old_log = log.Log._log
+ log.Log._log = self._log
+ self.logs = []
+
+ def teardown_method(self, method):
+ log.set_threshold(self.threshold)
+ log.Log._log = self._old_log
+ super(TestFileListTest, self).teardown_method(method)
+
+ def _log(self, level, msg, args):
+ if level not in (log.DEBUG, log.INFO, log.WARN, log.ERROR, log.FATAL):
+ raise ValueError('%s wrong log level' % str(level))
+ self.logs.append((level, msg, args))
+
+ def get_logs(self, *levels):
+ def _format(msg, args):
+ if len(args) == 0:
+ return msg
+ return msg % args
+ return [_format(msg, args) for level, msg, args
+ in self.logs if level in levels]
+
+ def clear_logs(self):
+ self.logs = []
+
+ def assertNoWarnings(self):
+ assert self.get_logs(log.WARN) == []
+ self.clear_logs()
+
+ def assertWarnings(self):
+ assert len(self.get_logs(log.WARN)) > 0
+ self.clear_logs()
+
+ def make_files(self, files):
+ for file in files:
+ file = os.path.join(self.temp_dir, file)
+ dirname, basename = os.path.split(file)
+ pkg_resources.py31compat.makedirs(dirname, exist_ok=True)
+ open(file, 'w').close()
+
+ def test_process_template_line(self):
+ # testing all MANIFEST.in template patterns
+ file_list = FileList()
+ l = make_local_path
+
+ # simulated file list
+ self.make_files([
+ 'foo.tmp', 'ok', 'xo', 'four.txt',
+ 'buildout.cfg',
+ # filelist does not filter out VCS directories,
+ # it's sdist that does
+ l('.hg/last-message.txt'),
+ l('global/one.txt'),
+ l('global/two.txt'),
+ l('global/files.x'),
+ l('global/here.tmp'),
+ l('f/o/f.oo'),
+ l('dir/graft-one'),
+ l('dir/dir2/graft2'),
+ l('dir3/ok'),
+ l('dir3/sub/ok.txt'),
+ ])
+
+ MANIFEST_IN = DALS("""\
+ include ok
+ include xo
+ exclude xo
+ include foo.tmp
+ include buildout.cfg
+ global-include *.x
+ global-include *.txt
+ global-exclude *.tmp
+ recursive-include f *.oo
+ recursive-exclude global *.x
+ graft dir
+ prune dir3
+ """)
+
+ for line in MANIFEST_IN.split('\n'):
+ if not line:
+ continue
+ file_list.process_template_line(line)
+
+ wanted = [
+ 'buildout.cfg',
+ 'four.txt',
+ 'ok',
+ l('.hg/last-message.txt'),
+ l('dir/graft-one'),
+ l('dir/dir2/graft2'),
+ l('f/o/f.oo'),
+ l('global/one.txt'),
+ l('global/two.txt'),
+ ]
+
+ file_list.sort()
+ assert file_list.files == wanted
+
+ def test_exclude_pattern(self):
+ # return False if no match
+ file_list = FileList()
+ assert not file_list.exclude_pattern('*.py')
+
+ # return True if files match
+ file_list = FileList()
+ file_list.files = ['a.py', 'b.py']
+ assert file_list.exclude_pattern('*.py')
+
+ # test excludes
+ file_list = FileList()
+ file_list.files = ['a.py', 'a.txt']
+ file_list.exclude_pattern('*.py')
+ file_list.sort()
+ assert file_list.files == ['a.txt']
+
+ def test_include_pattern(self):
+ # return False if no match
+ file_list = FileList()
+ self.make_files([])
+ assert not file_list.include_pattern('*.py')
+
+ # return True if files match
+ file_list = FileList()
+ self.make_files(['a.py', 'b.txt'])
+ assert file_list.include_pattern('*.py')
+
+ # test * matches all files
+ file_list = FileList()
+ self.make_files(['a.py', 'b.txt'])
+ file_list.include_pattern('*')
+ file_list.sort()
+ assert file_list.files == ['a.py', 'b.txt']
+
+ def test_process_template_line_invalid(self):
+ # invalid lines
+ file_list = FileList()
+ for action in ('include', 'exclude', 'global-include',
+ 'global-exclude', 'recursive-include',
+ 'recursive-exclude', 'graft', 'prune', 'blarg'):
+ try:
+ file_list.process_template_line(action)
+ except DistutilsTemplateError:
+ pass
+ except Exception:
+ assert False, "Incorrect error thrown"
+ else:
+ assert False, "Should have thrown an error"
+
+ def test_include(self):
+ l = make_local_path
+ # include
+ file_list = FileList()
+ self.make_files(['a.py', 'b.txt', l('d/c.py')])
+
+ file_list.process_template_line('include *.py')
+ file_list.sort()
+ assert file_list.files == ['a.py']
+ self.assertNoWarnings()
+
+ file_list.process_template_line('include *.rb')
+ file_list.sort()
+ assert file_list.files == ['a.py']
+ self.assertWarnings()
+
+ def test_exclude(self):
+ l = make_local_path
+ # exclude
+ file_list = FileList()
+ file_list.files = ['a.py', 'b.txt', l('d/c.py')]
+
+ file_list.process_template_line('exclude *.py')
+ file_list.sort()
+ assert file_list.files == ['b.txt', l('d/c.py')]
+ self.assertNoWarnings()
+
+ file_list.process_template_line('exclude *.rb')
+ file_list.sort()
+ assert file_list.files == ['b.txt', l('d/c.py')]
+ self.assertWarnings()
+
+ def test_global_include(self):
+ l = make_local_path
+ # global-include
+ file_list = FileList()
+ self.make_files(['a.py', 'b.txt', l('d/c.py')])
+
+ file_list.process_template_line('global-include *.py')
+ file_list.sort()
+ assert file_list.files == ['a.py', l('d/c.py')]
+ self.assertNoWarnings()
+
+ file_list.process_template_line('global-include *.rb')
+ file_list.sort()
+ assert file_list.files == ['a.py', l('d/c.py')]
+ self.assertWarnings()
+
+ def test_global_exclude(self):
+ l = make_local_path
+ # global-exclude
+ file_list = FileList()
+ file_list.files = ['a.py', 'b.txt', l('d/c.py')]
+
+ file_list.process_template_line('global-exclude *.py')
+ file_list.sort()
+ assert file_list.files == ['b.txt']
+ self.assertNoWarnings()
+
+ file_list.process_template_line('global-exclude *.rb')
+ file_list.sort()
+ assert file_list.files == ['b.txt']
+ self.assertWarnings()
+
+ def test_recursive_include(self):
+ l = make_local_path
+ # recursive-include
+ file_list = FileList()
+ self.make_files(['a.py', l('d/b.py'), l('d/c.txt'), l('d/d/e.py')])
+
+ file_list.process_template_line('recursive-include d *.py')
+ file_list.sort()
+ assert file_list.files == [l('d/b.py'), l('d/d/e.py')]
+ self.assertNoWarnings()
+
+ file_list.process_template_line('recursive-include e *.py')
+ file_list.sort()
+ assert file_list.files == [l('d/b.py'), l('d/d/e.py')]
+ self.assertWarnings()
+
+ def test_recursive_exclude(self):
+ l = make_local_path
+ # recursive-exclude
+ file_list = FileList()
+ file_list.files = ['a.py', l('d/b.py'), l('d/c.txt'), l('d/d/e.py')]
+
+ file_list.process_template_line('recursive-exclude d *.py')
+ file_list.sort()
+ assert file_list.files == ['a.py', l('d/c.txt')]
+ self.assertNoWarnings()
+
+ file_list.process_template_line('recursive-exclude e *.py')
+ file_list.sort()
+ assert file_list.files == ['a.py', l('d/c.txt')]
+ self.assertWarnings()
+
+ def test_graft(self):
+ l = make_local_path
+ # graft
+ file_list = FileList()
+ self.make_files(['a.py', l('d/b.py'), l('d/d/e.py'), l('f/f.py')])
+
+ file_list.process_template_line('graft d')
+ file_list.sort()
+ assert file_list.files == [l('d/b.py'), l('d/d/e.py')]
+ self.assertNoWarnings()
+
+ file_list.process_template_line('graft e')
+ file_list.sort()
+ assert file_list.files == [l('d/b.py'), l('d/d/e.py')]
+ self.assertWarnings()
+
+ def test_prune(self):
+ l = make_local_path
+ # prune
+ file_list = FileList()
+ file_list.files = ['a.py', l('d/b.py'), l('d/d/e.py'), l('f/f.py')]
+
+ file_list.process_template_line('prune d')
+ file_list.sort()
+ assert file_list.files == ['a.py', l('f/f.py')]
+ self.assertNoWarnings()
+
+ file_list.process_template_line('prune e')
+ file_list.sort()
+ assert file_list.files == ['a.py', l('f/f.py')]
+ self.assertWarnings()
diff --git a/setuptools/tests/test_msvc.py b/setuptools/tests/test_msvc.py
new file mode 100644
index 0000000..32d7a90
--- /dev/null
+++ b/setuptools/tests/test_msvc.py
@@ -0,0 +1,178 @@
+"""
+Tests for msvc support module.
+"""
+
+import os
+import contextlib
+import distutils.errors
+import mock
+
+import pytest
+
+from . import contexts
+
+# importing only setuptools should apply the patch
+__import__('setuptools')
+
+pytest.importorskip("distutils.msvc9compiler")
+
+
+def mock_reg(hkcu=None, hklm=None):
+ """
+ Return a mock for distutils.msvc9compiler.Reg, patched
+ to mock out the functions that access the registry.
+ """
+
+ _winreg = getattr(distutils.msvc9compiler, '_winreg', None)
+ winreg = getattr(distutils.msvc9compiler, 'winreg', _winreg)
+
+ hives = {
+ winreg.HKEY_CURRENT_USER: hkcu or {},
+ winreg.HKEY_LOCAL_MACHINE: hklm or {},
+ }
+
+ @classmethod
+ def read_keys(cls, base, key):
+ """Return list of registry keys."""
+ hive = hives.get(base, {})
+ return [
+ k.rpartition('\\')[2]
+ for k in hive if k.startswith(key.lower())
+ ]
+
+ @classmethod
+ def read_values(cls, base, key):
+ """Return dict of registry keys and values."""
+ hive = hives.get(base, {})
+ return dict(
+ (k.rpartition('\\')[2], hive[k])
+ for k in hive if k.startswith(key.lower())
+ )
+
+ return mock.patch.multiple(distutils.msvc9compiler.Reg,
+ read_keys=read_keys, read_values=read_values)
+
+
+class TestModulePatch:
+ """
+ Ensure that importing setuptools is sufficient to replace
+ the standard find_vcvarsall function with a version that
+ recognizes the "Visual C++ for Python" package.
+ """
+
+ key_32 = r'software\microsoft\devdiv\vcforpython\9.0\installdir'
+ key_64 = r'software\wow6432node\microsoft\devdiv\vcforpython\9.0\installdir'
+
+ def test_patched(self):
+ "Test the module is actually patched"
+ mod_name = distutils.msvc9compiler.find_vcvarsall.__module__
+ assert mod_name == "setuptools.msvc", "find_vcvarsall unpatched"
+
+ def test_no_registry_entries_means_nothing_found(self):
+ """
+ No registry entries or environment variable should lead to an error
+ directing the user to download vcpython27.
+ """
+ find_vcvarsall = distutils.msvc9compiler.find_vcvarsall
+ query_vcvarsall = distutils.msvc9compiler.query_vcvarsall
+
+ with contexts.environment(VS90COMNTOOLS=None):
+ with mock_reg():
+ assert find_vcvarsall(9.0) is None
+
+ try:
+ query_vcvarsall(9.0)
+ except Exception as exc:
+ expected = distutils.errors.DistutilsPlatformError
+ assert isinstance(exc, expected)
+ assert 'aka.ms/vcpython27' in str(exc)
+
+ @pytest.yield_fixture
+ def user_preferred_setting(self):
+ """
+ Set up environment with different install dirs for user vs. system
+ and yield the user_install_dir for the expected result.
+ """
+ with self.mock_install_dir() as user_install_dir:
+ with self.mock_install_dir() as system_install_dir:
+ reg = mock_reg(
+ hkcu={
+ self.key_32: user_install_dir,
+ },
+ hklm={
+ self.key_32: system_install_dir,
+ self.key_64: system_install_dir,
+ },
+ )
+ with reg:
+ yield user_install_dir
+
+ def test_prefer_current_user(self, user_preferred_setting):
+ """
+ Ensure user's settings are preferred.
+ """
+ result = distutils.msvc9compiler.find_vcvarsall(9.0)
+ expected = os.path.join(user_preferred_setting, 'vcvarsall.bat')
+ assert expected == result
+
+ @pytest.yield_fixture
+ def local_machine_setting(self):
+ """
+ Set up environment with only the system environment configured.
+ """
+ with self.mock_install_dir() as system_install_dir:
+ reg = mock_reg(
+ hklm={
+ self.key_32: system_install_dir,
+ },
+ )
+ with reg:
+ yield system_install_dir
+
+ def test_local_machine_recognized(self, local_machine_setting):
+ """
+ Ensure machine setting is honored if user settings are not present.
+ """
+ result = distutils.msvc9compiler.find_vcvarsall(9.0)
+ expected = os.path.join(local_machine_setting, 'vcvarsall.bat')
+ assert expected == result
+
+ @pytest.yield_fixture
+ def x64_preferred_setting(self):
+ """
+ Set up environment with 64-bit and 32-bit system settings configured
+ and yield the canonical location.
+ """
+ with self.mock_install_dir() as x32_dir:
+ with self.mock_install_dir() as x64_dir:
+ reg = mock_reg(
+ hklm={
+ # This *should* only exist on 32-bit machines
+ self.key_32: x32_dir,
+ # This *should* only exist on 64-bit machines
+ self.key_64: x64_dir,
+ },
+ )
+ with reg:
+ yield x32_dir
+
+ def test_ensure_64_bit_preferred(self, x64_preferred_setting):
+ """
+ Ensure 64-bit system key is preferred.
+ """
+ result = distutils.msvc9compiler.find_vcvarsall(9.0)
+ expected = os.path.join(x64_preferred_setting, 'vcvarsall.bat')
+ assert expected == result
+
+ @staticmethod
+ @contextlib.contextmanager
+ def mock_install_dir():
+ """
+ Make a mock install dir in a unique location so that tests can
+ distinguish which dir was detected in a given scenario.
+ """
+ with contexts.tempdir() as result:
+ vcvarsall = os.path.join(result, 'vcvarsall.bat')
+ with open(vcvarsall, 'w'):
+ pass
+ yield result
diff --git a/setuptools/tests/test_namespaces.py b/setuptools/tests/test_namespaces.py
new file mode 100644
index 0000000..1ac1b35
--- /dev/null
+++ b/setuptools/tests/test_namespaces.py
@@ -0,0 +1,111 @@
+from __future__ import absolute_import, unicode_literals
+
+import os
+import sys
+import subprocess
+
+import pytest
+
+from . import namespaces
+from setuptools.command import test
+
+
+class TestNamespaces:
+
+ @pytest.mark.xfail(sys.version_info < (3, 5),
+ reason="Requires importlib.util.module_from_spec")
+ @pytest.mark.skipif(bool(os.environ.get("APPVEYOR")),
+ reason="https://github.com/pypa/setuptools/issues/851")
+ def test_mixed_site_and_non_site(self, tmpdir):
+ """
+ Installing two packages sharing the same namespace, one installed
+ to a site dir and the other installed just to a path on PYTHONPATH
+ should leave the namespace in tact and both packages reachable by
+ import.
+ """
+ pkg_A = namespaces.build_namespace_package(tmpdir, 'myns.pkgA')
+ pkg_B = namespaces.build_namespace_package(tmpdir, 'myns.pkgB')
+ site_packages = tmpdir / 'site-packages'
+ path_packages = tmpdir / 'path-packages'
+ targets = site_packages, path_packages
+ # use pip to install to the target directory
+ install_cmd = [
+ sys.executable,
+ '-m',
+ 'pip.__main__',
+ 'install',
+ str(pkg_A),
+ '-t', str(site_packages),
+ ]
+ subprocess.check_call(install_cmd)
+ namespaces.make_site_dir(site_packages)
+ install_cmd = [
+ sys.executable,
+ '-m',
+ 'pip.__main__',
+ 'install',
+ str(pkg_B),
+ '-t', str(path_packages),
+ ]
+ subprocess.check_call(install_cmd)
+ try_import = [
+ sys.executable,
+ '-c', 'import myns.pkgA; import myns.pkgB',
+ ]
+ with test.test.paths_on_pythonpath(map(str, targets)):
+ subprocess.check_call(try_import)
+
+ @pytest.mark.skipif(bool(os.environ.get("APPVEYOR")),
+ reason="https://github.com/pypa/setuptools/issues/851")
+ def test_pkg_resources_import(self, tmpdir):
+ """
+ Ensure that a namespace package doesn't break on import
+ of pkg_resources.
+ """
+ pkg = namespaces.build_namespace_package(tmpdir, 'myns.pkgA')
+ target = tmpdir / 'packages'
+ target.mkdir()
+ install_cmd = [
+ sys.executable,
+ '-m', 'easy_install',
+ '-d', str(target),
+ str(pkg),
+ ]
+ with test.test.paths_on_pythonpath([str(target)]):
+ subprocess.check_call(install_cmd)
+ namespaces.make_site_dir(target)
+ try_import = [
+ sys.executable,
+ '-c', 'import pkg_resources',
+ ]
+ with test.test.paths_on_pythonpath([str(target)]):
+ subprocess.check_call(try_import)
+
+ @pytest.mark.skipif(bool(os.environ.get("APPVEYOR")),
+ reason="https://github.com/pypa/setuptools/issues/851")
+ def test_namespace_package_installed_and_cwd(self, tmpdir):
+ """
+ Installing a namespace packages but also having it in the current
+ working directory, only one version should take precedence.
+ """
+ pkg_A = namespaces.build_namespace_package(tmpdir, 'myns.pkgA')
+ target = tmpdir / 'packages'
+ # use pip to install to the target directory
+ install_cmd = [
+ sys.executable,
+ '-m',
+ 'pip.__main__',
+ 'install',
+ str(pkg_A),
+ '-t', str(target),
+ ]
+ subprocess.check_call(install_cmd)
+ namespaces.make_site_dir(target)
+
+ # ensure that package imports and pkg_resources imports
+ pkg_resources_imp = [
+ sys.executable,
+ '-c', 'import pkg_resources; import myns.pkgA',
+ ]
+ with test.test.paths_on_pythonpath([str(target)]):
+ subprocess.check_call(pkg_resources_imp, cwd=str(pkg_A))
diff --git a/setuptools/tests/test_packageindex.py b/setuptools/tests/test_packageindex.py
new file mode 100644
index 0000000..63b9294
--- /dev/null
+++ b/setuptools/tests/test_packageindex.py
@@ -0,0 +1,275 @@
+from __future__ import absolute_import
+
+import sys
+import os
+import distutils.errors
+
+from setuptools.extern import six
+from setuptools.extern.six.moves import urllib, http_client
+
+import pkg_resources
+import setuptools.package_index
+from setuptools.tests.server import IndexServer
+from .textwrap import DALS
+
+
+class TestPackageIndex:
+ def test_regex(self):
+ hash_url = 'http://other_url?:action=show_md5&amp;'
+ hash_url += 'digest=0123456789abcdef0123456789abcdef'
+ doc = """
+ <a href="http://some_url">Name</a>
+ (<a title="MD5 hash"
+ href="{hash_url}">md5</a>)
+ """.lstrip().format(**locals())
+ assert setuptools.package_index.PYPI_MD5.match(doc)
+
+ def test_bad_url_bad_port(self):
+ index = setuptools.package_index.PackageIndex()
+ url = 'http://127.0.0.1:0/nonesuch/test_package_index'
+ try:
+ v = index.open_url(url)
+ except Exception as v:
+ assert url in str(v)
+ else:
+ assert isinstance(v, urllib.error.HTTPError)
+
+ def test_bad_url_typo(self):
+ # issue 16
+ # easy_install inquant.contentmirror.plone breaks because of a typo
+ # in its home URL
+ index = setuptools.package_index.PackageIndex(
+ hosts=('www.example.com',)
+ )
+
+ url = 'url:%20https://svn.plone.org/svn/collective/inquant.contentmirror.plone/trunk'
+ try:
+ v = index.open_url(url)
+ except Exception as v:
+ assert url in str(v)
+ else:
+ assert isinstance(v, urllib.error.HTTPError)
+
+ def test_bad_url_bad_status_line(self):
+ index = setuptools.package_index.PackageIndex(
+ hosts=('www.example.com',)
+ )
+
+ def _urlopen(*args):
+ raise http_client.BadStatusLine('line')
+
+ index.opener = _urlopen
+ url = 'http://example.com'
+ try:
+ v = index.open_url(url)
+ except Exception as v:
+ assert 'line' in str(v)
+ else:
+ raise AssertionError('Should have raise here!')
+
+ def test_bad_url_double_scheme(self):
+ """
+ A bad URL with a double scheme should raise a DistutilsError.
+ """
+ index = setuptools.package_index.PackageIndex(
+ hosts=('www.example.com',)
+ )
+
+ # issue 20
+ url = 'http://http://svn.pythonpaste.org/Paste/wphp/trunk'
+ try:
+ index.open_url(url)
+ except distutils.errors.DistutilsError as error:
+ msg = six.text_type(error)
+ assert 'nonnumeric port' in msg or 'getaddrinfo failed' in msg or 'Name or service not known' in msg
+ return
+ raise RuntimeError("Did not raise")
+
+ def test_bad_url_screwy_href(self):
+ index = setuptools.package_index.PackageIndex(
+ hosts=('www.example.com',)
+ )
+
+ # issue #160
+ if sys.version_info[0] == 2 and sys.version_info[1] == 7:
+ # this should not fail
+ url = 'http://example.com'
+ page = ('<a href="http://www.famfamfam.com]('
+ 'http://www.famfamfam.com/">')
+ index.process_index(url, page)
+
+ def test_url_ok(self):
+ index = setuptools.package_index.PackageIndex(
+ hosts=('www.example.com',)
+ )
+ url = 'file:///tmp/test_package_index'
+ assert index.url_ok(url, True)
+
+ def test_links_priority(self):
+ """
+ Download links from the pypi simple index should be used before
+ external download links.
+ https://bitbucket.org/tarek/distribute/issue/163
+
+ Usecase :
+ - someone uploads a package on pypi, a md5 is generated
+ - someone manually copies this link (with the md5 in the url) onto an
+ external page accessible from the package page.
+ - someone reuploads the package (with a different md5)
+ - while easy_installing, an MD5 error occurs because the external link
+ is used
+ -> Setuptools should use the link from pypi, not the external one.
+ """
+ if sys.platform.startswith('java'):
+ # Skip this test on jython because binding to :0 fails
+ return
+
+ # start an index server
+ server = IndexServer()
+ server.start()
+ index_url = server.base_url() + 'test_links_priority/simple/'
+
+ # scan a test index
+ pi = setuptools.package_index.PackageIndex(index_url)
+ requirement = pkg_resources.Requirement.parse('foobar')
+ pi.find_packages(requirement)
+ server.stop()
+
+ # the distribution has been found
+ assert 'foobar' in pi
+ # we have only one link, because links are compared without md5
+ assert len(pi['foobar']) == 1
+ # the link should be from the index
+ assert 'correct_md5' in pi['foobar'][0].location
+
+ def test_parse_bdist_wininst(self):
+ parse = setuptools.package_index.parse_bdist_wininst
+
+ actual = parse('reportlab-2.5.win32-py2.4.exe')
+ expected = 'reportlab-2.5', '2.4', 'win32'
+ assert actual == expected
+
+ actual = parse('reportlab-2.5.win32.exe')
+ expected = 'reportlab-2.5', None, 'win32'
+ assert actual == expected
+
+ actual = parse('reportlab-2.5.win-amd64-py2.7.exe')
+ expected = 'reportlab-2.5', '2.7', 'win-amd64'
+ assert actual == expected
+
+ actual = parse('reportlab-2.5.win-amd64.exe')
+ expected = 'reportlab-2.5', None, 'win-amd64'
+ assert actual == expected
+
+ def test__vcs_split_rev_from_url(self):
+ """
+ Test the basic usage of _vcs_split_rev_from_url
+ """
+ vsrfu = setuptools.package_index.PackageIndex._vcs_split_rev_from_url
+ url, rev = vsrfu('https://example.com/bar@2995')
+ assert url == 'https://example.com/bar'
+ assert rev == '2995'
+
+ def test_local_index(self, tmpdir):
+ """
+ local_open should be able to read an index from the file system.
+ """
+ index_file = tmpdir / 'index.html'
+ with index_file.open('w') as f:
+ f.write('<div>content</div>')
+ url = 'file:' + urllib.request.pathname2url(str(tmpdir)) + '/'
+ res = setuptools.package_index.local_open(url)
+ assert 'content' in res.read()
+
+ def test_egg_fragment(self):
+ """
+ EGG fragments must comply to PEP 440
+ """
+ epoch = [
+ '',
+ '1!',
+ ]
+ releases = [
+ '0',
+ '0.0',
+ '0.0.0',
+ ]
+ pre = [
+ 'a0',
+ 'b0',
+ 'rc0',
+ ]
+ post = [
+ '.post0'
+ ]
+ dev = [
+ '.dev0',
+ ]
+ local = [
+ ('', ''),
+ ('+ubuntu.0', '+ubuntu.0'),
+ ('+ubuntu-0', '+ubuntu.0'),
+ ('+ubuntu_0', '+ubuntu.0'),
+ ]
+ versions = [
+ [''.join([e, r, p, l]) for l in ll]
+ for e in epoch
+ for r in releases
+ for p in sum([pre, post, dev], [''])
+ for ll in local]
+ for v, vc in versions:
+ dists = list(setuptools.package_index.distros_for_url(
+ 'http://example.com/example.zip#egg=example-' + v))
+ assert dists[0].version == ''
+ assert dists[1].version == vc
+
+
+class TestContentCheckers:
+ def test_md5(self):
+ checker = setuptools.package_index.HashChecker.from_url(
+ 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478')
+ checker.feed('You should probably not be using MD5'.encode('ascii'))
+ assert checker.hash.hexdigest() == 'f12895fdffbd45007040d2e44df98478'
+ assert checker.is_valid()
+
+ def test_other_fragment(self):
+ "Content checks should succeed silently if no hash is present"
+ checker = setuptools.package_index.HashChecker.from_url(
+ 'http://foo/bar#something%20completely%20different')
+ checker.feed('anything'.encode('ascii'))
+ assert checker.is_valid()
+
+ def test_blank_md5(self):
+ "Content checks should succeed if a hash is empty"
+ checker = setuptools.package_index.HashChecker.from_url(
+ 'http://foo/bar#md5=')
+ checker.feed('anything'.encode('ascii'))
+ assert checker.is_valid()
+
+ def test_get_hash_name_md5(self):
+ checker = setuptools.package_index.HashChecker.from_url(
+ 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478')
+ assert checker.hash_name == 'md5'
+
+ def test_report(self):
+ checker = setuptools.package_index.HashChecker.from_url(
+ 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478')
+ rep = checker.report(lambda x: x, 'My message about %s')
+ assert rep == 'My message about md5'
+
+
+class TestPyPIConfig:
+ def test_percent_in_password(self, tmpdir, monkeypatch):
+ monkeypatch.setitem(os.environ, 'HOME', str(tmpdir))
+ pypirc = tmpdir / '.pypirc'
+ with pypirc.open('w') as strm:
+ strm.write(DALS("""
+ [pypi]
+ repository=https://pypi.org
+ username=jaraco
+ password=pity%
+ """))
+ cfg = setuptools.package_index.PyPIConfig()
+ cred = cfg.creds_by_repository['https://pypi.org']
+ assert cred.username == 'jaraco'
+ assert cred.password == 'pity%'
diff --git a/setuptools/tests/test_sandbox.py b/setuptools/tests/test_sandbox.py
new file mode 100644
index 0000000..d867542
--- /dev/null
+++ b/setuptools/tests/test_sandbox.py
@@ -0,0 +1,133 @@
+"""develop tests
+"""
+import os
+import types
+
+import pytest
+
+import pkg_resources
+import setuptools.sandbox
+
+
+class TestSandbox:
+ def test_devnull(self, tmpdir):
+ with setuptools.sandbox.DirectorySandbox(str(tmpdir)):
+ self._file_writer(os.devnull)
+
+ @staticmethod
+ def _file_writer(path):
+ def do_write():
+ with open(path, 'w') as f:
+ f.write('xxx')
+
+ return do_write
+
+ def test_setup_py_with_BOM(self):
+ """
+ It should be possible to execute a setup.py with a Byte Order Mark
+ """
+ target = pkg_resources.resource_filename(__name__,
+ 'script-with-bom.py')
+ namespace = types.ModuleType('namespace')
+ setuptools.sandbox._execfile(target, vars(namespace))
+ assert namespace.result == 'passed'
+
+ def test_setup_py_with_CRLF(self, tmpdir):
+ setup_py = tmpdir / 'setup.py'
+ with setup_py.open('wb') as stream:
+ stream.write(b'"degenerate script"\r\n')
+ setuptools.sandbox._execfile(str(setup_py), globals())
+
+
+class TestExceptionSaver:
+ def test_exception_trapped(self):
+ with setuptools.sandbox.ExceptionSaver():
+ raise ValueError("details")
+
+ def test_exception_resumed(self):
+ with setuptools.sandbox.ExceptionSaver() as saved_exc:
+ raise ValueError("details")
+
+ with pytest.raises(ValueError) as caught:
+ saved_exc.resume()
+
+ assert isinstance(caught.value, ValueError)
+ assert str(caught.value) == 'details'
+
+ def test_exception_reconstructed(self):
+ orig_exc = ValueError("details")
+
+ with setuptools.sandbox.ExceptionSaver() as saved_exc:
+ raise orig_exc
+
+ with pytest.raises(ValueError) as caught:
+ saved_exc.resume()
+
+ assert isinstance(caught.value, ValueError)
+ assert caught.value is not orig_exc
+
+ def test_no_exception_passes_quietly(self):
+ with setuptools.sandbox.ExceptionSaver() as saved_exc:
+ pass
+
+ saved_exc.resume()
+
+ def test_unpickleable_exception(self):
+ class CantPickleThis(Exception):
+ "This Exception is unpickleable because it's not in globals"
+ def __repr__(self):
+ return 'CantPickleThis%r' % (self.args,)
+
+ with setuptools.sandbox.ExceptionSaver() as saved_exc:
+ raise CantPickleThis('detail')
+
+ with pytest.raises(setuptools.sandbox.UnpickleableException) as caught:
+ saved_exc.resume()
+
+ assert str(caught.value) == "CantPickleThis('detail',)"
+
+ def test_unpickleable_exception_when_hiding_setuptools(self):
+ """
+ As revealed in #440, an infinite recursion can occur if an unpickleable
+ exception while setuptools is hidden. Ensure this doesn't happen.
+ """
+
+ class ExceptionUnderTest(Exception):
+ """
+ An unpickleable exception (not in globals).
+ """
+
+ with pytest.raises(setuptools.sandbox.UnpickleableException) as caught:
+ with setuptools.sandbox.save_modules():
+ setuptools.sandbox.hide_setuptools()
+ raise ExceptionUnderTest()
+
+ msg, = caught.value.args
+ assert msg == 'ExceptionUnderTest()'
+
+ def test_sandbox_violation_raised_hiding_setuptools(self, tmpdir):
+ """
+ When in a sandbox with setuptools hidden, a SandboxViolation
+ should reflect a proper exception and not be wrapped in
+ an UnpickleableException.
+ """
+
+ def write_file():
+ "Trigger a SandboxViolation by writing outside the sandbox"
+ with open('/etc/foo', 'w'):
+ pass
+
+ with pytest.raises(setuptools.sandbox.SandboxViolation) as caught:
+ with setuptools.sandbox.save_modules():
+ setuptools.sandbox.hide_setuptools()
+ with setuptools.sandbox.DirectorySandbox(str(tmpdir)):
+ write_file()
+
+ cmd, args, kwargs = caught.value.args
+ assert cmd == 'open'
+ assert args == ('/etc/foo', 'w')
+ assert kwargs == {}
+
+ msg = str(caught.value)
+ assert 'open' in msg
+ assert "('/etc/foo', 'w')" in msg
diff --git a/setuptools/tests/test_sdist.py b/setuptools/tests/test_sdist.py
new file mode 100644
index 0000000..02222da
--- /dev/null
+++ b/setuptools/tests/test_sdist.py
@@ -0,0 +1,427 @@
+# -*- coding: utf-8 -*-
+"""sdist tests"""
+
+import os
+import shutil
+import sys
+import tempfile
+import unicodedata
+import contextlib
+import io
+
+from setuptools.extern import six
+from setuptools.extern.six.moves import map
+
+import pytest
+
+import pkg_resources
+from setuptools.command.sdist import sdist
+from setuptools.command.egg_info import manifest_maker
+from setuptools.dist import Distribution
+from setuptools.tests import fail_on_ascii
+from .text import Filenames
+
+py3_only = pytest.mark.xfail(six.PY2, reason="Test runs on Python 3 only")
+
+SETUP_ATTRS = {
+ 'name': 'sdist_test',
+ 'version': '0.0',
+ 'packages': ['sdist_test'],
+ 'package_data': {'sdist_test': ['*.txt']},
+ 'data_files': [("data", [os.path.join("d", "e.dat")])],
+}
+
+SETUP_PY = """\
+from setuptools import setup
+
+setup(**%r)
+""" % SETUP_ATTRS
+
+
+@contextlib.contextmanager
+def quiet():
+ old_stdout, old_stderr = sys.stdout, sys.stderr
+ sys.stdout, sys.stderr = six.StringIO(), six.StringIO()
+ try:
+ yield
+ finally:
+ sys.stdout, sys.stderr = old_stdout, old_stderr
+
+
+# Convert to POSIX path
+def posix(path):
+ if six.PY3 and not isinstance(path, str):
+ return path.replace(os.sep.encode('ascii'), b'/')
+ else:
+ return path.replace(os.sep, '/')
+
+
+# HFS Plus uses decomposed UTF-8
+def decompose(path):
+ if isinstance(path, six.text_type):
+ return unicodedata.normalize('NFD', path)
+ try:
+ path = path.decode('utf-8')
+ path = unicodedata.normalize('NFD', path)
+ path = path.encode('utf-8')
+ except UnicodeError:
+ pass # Not UTF-8
+ return path
+
+
+def read_all_bytes(filename):
+ with io.open(filename, 'rb') as fp:
+ return fp.read()
+
+
+def latin1_fail():
+ try:
+ desc, filename = tempfile.mkstemp(suffix=Filenames.latin_1)
+ os.close(desc)
+ os.remove(filename)
+ except Exception:
+ return True
+
+
+fail_on_latin1_encoded_filenames = pytest.mark.xfail(
+ latin1_fail(),
+ reason="System does not support latin-1 filenames",
+)
+
+
+class TestSdistTest:
+ def setup_method(self, method):
+ self.temp_dir = tempfile.mkdtemp()
+ f = open(os.path.join(self.temp_dir, 'setup.py'), 'w')
+ f.write(SETUP_PY)
+ f.close()
+
+ # Set up the rest of the test package
+ test_pkg = os.path.join(self.temp_dir, 'sdist_test')
+ os.mkdir(test_pkg)
+ data_folder = os.path.join(self.temp_dir, "d")
+ os.mkdir(data_folder)
+ # *.rst was not included in package_data, so c.rst should not be
+ # automatically added to the manifest when not under version control
+ for fname in ['__init__.py', 'a.txt', 'b.txt', 'c.rst',
+ os.path.join(data_folder, "e.dat")]:
+ # Just touch the files; their contents are irrelevant
+ open(os.path.join(test_pkg, fname), 'w').close()
+
+ self.old_cwd = os.getcwd()
+ os.chdir(self.temp_dir)
+
+ def teardown_method(self, method):
+ os.chdir(self.old_cwd)
+ shutil.rmtree(self.temp_dir)
+
+ def test_package_data_in_sdist(self):
+ """Regression test for pull request #4: ensures that files listed in
+ package_data are included in the manifest even if they're not added to
+ version control.
+ """
+
+ dist = Distribution(SETUP_ATTRS)
+ dist.script_name = 'setup.py'
+ cmd = sdist(dist)
+ cmd.ensure_finalized()
+
+ with quiet():
+ cmd.run()
+
+ manifest = cmd.filelist.files
+ assert os.path.join('sdist_test', 'a.txt') in manifest
+ assert os.path.join('sdist_test', 'b.txt') in manifest
+ assert os.path.join('sdist_test', 'c.rst') not in manifest
+ assert os.path.join('d', 'e.dat') in manifest
+
+ def test_defaults_case_sensitivity(self):
+ """
+ Make sure default files (README.*, etc.) are added in a case-sensitive
+ way to avoid problems with packages built on Windows.
+ """
+
+ open(os.path.join(self.temp_dir, 'readme.rst'), 'w').close()
+ open(os.path.join(self.temp_dir, 'SETUP.cfg'), 'w').close()
+
+ dist = Distribution(SETUP_ATTRS)
+ # the extension deliberately capitalized for this test
+ # to make sure the actual filename (not capitalized) gets added
+ # to the manifest
+ dist.script_name = 'setup.PY'
+ cmd = sdist(dist)
+ cmd.ensure_finalized()
+
+ with quiet():
+ cmd.run()
+
+ # lowercase all names so we can test in a
+ # case-insensitive way to make sure the files
+ # are not included.
+ manifest = map(lambda x: x.lower(), cmd.filelist.files)
+ assert 'readme.rst' not in manifest, manifest
+ assert 'setup.py' not in manifest, manifest
+ assert 'setup.cfg' not in manifest, manifest
+
+ @fail_on_ascii
+ def test_manifest_is_written_with_utf8_encoding(self):
+ # Test for #303.
+ dist = Distribution(SETUP_ATTRS)
+ dist.script_name = 'setup.py'
+ mm = manifest_maker(dist)
+ mm.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt')
+ os.mkdir('sdist_test.egg-info')
+
+ # UTF-8 filename
+ filename = os.path.join('sdist_test', 'smörbröd.py')
+
+ # Must create the file or it will get stripped.
+ open(filename, 'w').close()
+
+ # Add UTF-8 filename and write manifest
+ with quiet():
+ mm.run()
+ mm.filelist.append(filename)
+ mm.write_manifest()
+
+ contents = read_all_bytes(mm.manifest)
+
+ # The manifest should be UTF-8 encoded
+ u_contents = contents.decode('UTF-8')
+
+ # The manifest should contain the UTF-8 filename
+ if six.PY2:
+ fs_enc = sys.getfilesystemencoding()
+ filename = filename.decode(fs_enc)
+
+ assert posix(filename) in u_contents
+
+ @py3_only
+ @fail_on_ascii
+ def test_write_manifest_allows_utf8_filenames(self):
+ # Test for #303.
+ dist = Distribution(SETUP_ATTRS)
+ dist.script_name = 'setup.py'
+ mm = manifest_maker(dist)
+ mm.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt')
+ os.mkdir('sdist_test.egg-info')
+
+ filename = os.path.join(b'sdist_test', Filenames.utf_8)
+
+ # Must touch the file or risk removal
+ open(filename, "w").close()
+
+ # Add filename and write manifest
+ with quiet():
+ mm.run()
+ u_filename = filename.decode('utf-8')
+ mm.filelist.files.append(u_filename)
+ # Re-write manifest
+ mm.write_manifest()
+
+ contents = read_all_bytes(mm.manifest)
+
+ # The manifest should be UTF-8 encoded
+ contents.decode('UTF-8')
+
+ # The manifest should contain the UTF-8 filename
+ assert posix(filename) in contents
+
+ # The filelist should have been updated as well
+ assert u_filename in mm.filelist.files
+
+ @py3_only
+ def test_write_manifest_skips_non_utf8_filenames(self):
+ """
+ Files that cannot be encoded to UTF-8 (specifically, those that
+ weren't originally successfully decoded and have surrogate
+ escapes) should be omitted from the manifest.
+ See https://bitbucket.org/tarek/distribute/issue/303 for history.
+ """
+ dist = Distribution(SETUP_ATTRS)
+ dist.script_name = 'setup.py'
+ mm = manifest_maker(dist)
+ mm.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt')
+ os.mkdir('sdist_test.egg-info')
+
+ # Latin-1 filename
+ filename = os.path.join(b'sdist_test', Filenames.latin_1)
+
+ # Add filename with surrogates and write manifest
+ with quiet():
+ mm.run()
+ u_filename = filename.decode('utf-8', 'surrogateescape')
+ mm.filelist.append(u_filename)
+ # Re-write manifest
+ mm.write_manifest()
+
+ contents = read_all_bytes(mm.manifest)
+
+ # The manifest should be UTF-8 encoded
+ contents.decode('UTF-8')
+
+ # The Latin-1 filename should have been skipped
+ assert posix(filename) not in contents
+
+ # The filelist should have been updated as well
+ assert u_filename not in mm.filelist.files
+
+ @fail_on_ascii
+ def test_manifest_is_read_with_utf8_encoding(self):
+ # Test for #303.
+ dist = Distribution(SETUP_ATTRS)
+ dist.script_name = 'setup.py'
+ cmd = sdist(dist)
+ cmd.ensure_finalized()
+
+ # Create manifest
+ with quiet():
+ cmd.run()
+
+ # Add UTF-8 filename to manifest
+ filename = os.path.join(b'sdist_test', Filenames.utf_8)
+ cmd.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt')
+ manifest = open(cmd.manifest, 'ab')
+ manifest.write(b'\n' + filename)
+ manifest.close()
+
+ # The file must exist to be included in the filelist
+ open(filename, 'w').close()
+
+ # Re-read manifest
+ cmd.filelist.files = []
+ with quiet():
+ cmd.read_manifest()
+
+ # The filelist should contain the UTF-8 filename
+ if six.PY3:
+ filename = filename.decode('utf-8')
+ assert filename in cmd.filelist.files
+
+ @py3_only
+ @fail_on_latin1_encoded_filenames
+ def test_read_manifest_skips_non_utf8_filenames(self):
+ # Test for #303.
+ dist = Distribution(SETUP_ATTRS)
+ dist.script_name = 'setup.py'
+ cmd = sdist(dist)
+ cmd.ensure_finalized()
+
+ # Create manifest
+ with quiet():
+ cmd.run()
+
+ # Add Latin-1 filename to manifest
+ filename = os.path.join(b'sdist_test', Filenames.latin_1)
+ cmd.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt')
+ manifest = open(cmd.manifest, 'ab')
+ manifest.write(b'\n' + filename)
+ manifest.close()
+
+ # The file must exist to be included in the filelist
+ open(filename, 'w').close()
+
+ # Re-read manifest
+ cmd.filelist.files = []
+ with quiet():
+ cmd.read_manifest()
+
+ # The Latin-1 filename should have been skipped
+ filename = filename.decode('latin-1')
+ assert filename not in cmd.filelist.files
+
+ @fail_on_ascii
+ @fail_on_latin1_encoded_filenames
+ def test_sdist_with_utf8_encoded_filename(self):
+ # Test for #303.
+ dist = Distribution(SETUP_ATTRS)
+ dist.script_name = 'setup.py'
+ cmd = sdist(dist)
+ cmd.ensure_finalized()
+
+ filename = os.path.join(b'sdist_test', Filenames.utf_8)
+ open(filename, 'w').close()
+
+ with quiet():
+ cmd.run()
+
+ if sys.platform == 'darwin':
+ filename = decompose(filename)
+
+ if six.PY3:
+ fs_enc = sys.getfilesystemencoding()
+
+ if sys.platform == 'win32':
+ if fs_enc == 'cp1252':
+ # Python 3 mangles the UTF-8 filename
+ filename = filename.decode('cp1252')
+ assert filename in cmd.filelist.files
+ else:
+ filename = filename.decode('mbcs')
+ assert filename in cmd.filelist.files
+ else:
+ filename = filename.decode('utf-8')
+ assert filename in cmd.filelist.files
+ else:
+ assert filename in cmd.filelist.files
+
+ @fail_on_latin1_encoded_filenames
+ def test_sdist_with_latin1_encoded_filename(self):
+ # Test for #303.
+ dist = Distribution(SETUP_ATTRS)
+ dist.script_name = 'setup.py'
+ cmd = sdist(dist)
+ cmd.ensure_finalized()
+
+ # Latin-1 filename
+ filename = os.path.join(b'sdist_test', Filenames.latin_1)
+ open(filename, 'w').close()
+ assert os.path.isfile(filename)
+
+ with quiet():
+ cmd.run()
+
+ if six.PY3:
+ # not all windows systems have a default FS encoding of cp1252
+ if sys.platform == 'win32':
+ # Latin-1 is similar to Windows-1252 however
+ # on mbcs filesys it is not in latin-1 encoding
+ fs_enc = sys.getfilesystemencoding()
+ if fs_enc != 'mbcs':
+ fs_enc = 'latin-1'
+ filename = filename.decode(fs_enc)
+
+ assert filename in cmd.filelist.files
+ else:
+ # The Latin-1 filename should have been skipped
+ filename = filename.decode('latin-1')
+ filename not in cmd.filelist.files
+ else:
+ # Under Python 2 there seems to be no decoded string in the
+ # filelist. However, due to decode and encoding of the
+ # file name to get utf-8 Manifest the latin1 maybe excluded
+ try:
+ # fs_enc should match how one is expect the decoding to
+ # be proformed for the manifest output.
+ fs_enc = sys.getfilesystemencoding()
+ filename.decode(fs_enc)
+ assert filename in cmd.filelist.files
+ except UnicodeDecodeError:
+ filename not in cmd.filelist.files
+
+
+def test_default_revctrl():
+ """
+ When _default_revctrl was removed from the `setuptools.command.sdist`
+ module in 10.0, it broke some systems which keep an old install of
+ setuptools (Distribute) around. Those old versions require that the
+ setuptools package continue to implement that interface, so this
+ function provides that interface, stubbed. See #320 for details.
+
+ This interface must be maintained until Ubuntu 12.04 is no longer
+ supported (by Setuptools).
+ """
+ ep_def = 'svn_cvs = setuptools.command.sdist:_default_revctrl'
+ ep = pkg_resources.EntryPoint.parse(ep_def)
+ res = ep.resolve()
+ assert hasattr(res, '__iter__')
diff --git a/setuptools/tests/test_setuptools.py b/setuptools/tests/test_setuptools.py
new file mode 100644
index 0000000..26e37a6
--- /dev/null
+++ b/setuptools/tests/test_setuptools.py
@@ -0,0 +1,368 @@
+"""Tests for the 'setuptools' package"""
+
+import sys
+import os
+import distutils.core
+import distutils.cmd
+from distutils.errors import DistutilsOptionError, DistutilsPlatformError
+from distutils.errors import DistutilsSetupError
+from distutils.core import Extension
+from distutils.version import LooseVersion
+
+import pytest
+
+import setuptools
+import setuptools.dist
+import setuptools.depends as dep
+from setuptools import Feature
+from setuptools.depends import Require
+from setuptools.extern import six
+
+
+def makeSetup(**args):
+ """Return distribution from 'setup(**args)', without executing commands"""
+
+ distutils.core._setup_stop_after = "commandline"
+
+ # Don't let system command line leak into tests!
+ args.setdefault('script_args', ['install'])
+
+ try:
+ return setuptools.setup(**args)
+ finally:
+ distutils.core._setup_stop_after = None
+
+
+needs_bytecode = pytest.mark.skipif(
+ not hasattr(dep, 'get_module_constant'),
+ reason="bytecode support not available",
+)
+
+
+class TestDepends:
+ def testExtractConst(self):
+ if not hasattr(dep, 'extract_constant'):
+ # skip on non-bytecode platforms
+ return
+
+ def f1():
+ global x, y, z
+ x = "test"
+ y = z
+
+ fc = six.get_function_code(f1)
+
+ # unrecognized name
+ assert dep.extract_constant(fc, 'q', -1) is None
+
+ # constant assigned
+ dep.extract_constant(fc, 'x', -1) == "test"
+
+ # expression assigned
+ dep.extract_constant(fc, 'y', -1) == -1
+
+ # recognized name, not assigned
+ dep.extract_constant(fc, 'z', -1) is None
+
+ def testFindModule(self):
+ with pytest.raises(ImportError):
+ dep.find_module('no-such.-thing')
+ with pytest.raises(ImportError):
+ dep.find_module('setuptools.non-existent')
+ f, p, i = dep.find_module('setuptools.tests')
+ f.close()
+
+ @needs_bytecode
+ def testModuleExtract(self):
+ from json import __version__
+ assert dep.get_module_constant('json', '__version__') == __version__
+ assert dep.get_module_constant('sys', 'version') == sys.version
+ assert dep.get_module_constant('setuptools.tests.test_setuptools', '__doc__') == __doc__
+
+ @needs_bytecode
+ def testRequire(self):
+ req = Require('Json', '1.0.3', 'json')
+
+ assert req.name == 'Json'
+ assert req.module == 'json'
+ assert req.requested_version == '1.0.3'
+ assert req.attribute == '__version__'
+ assert req.full_name() == 'Json-1.0.3'
+
+ from json import __version__
+ assert req.get_version() == __version__
+ assert req.version_ok('1.0.9')
+ assert not req.version_ok('0.9.1')
+ assert not req.version_ok('unknown')
+
+ assert req.is_present()
+ assert req.is_current()
+
+ req = Require('Json 3000', '03000', 'json', format=LooseVersion)
+ assert req.is_present()
+ assert not req.is_current()
+ assert not req.version_ok('unknown')
+
+ req = Require('Do-what-I-mean', '1.0', 'd-w-i-m')
+ assert not req.is_present()
+ assert not req.is_current()
+
+ req = Require('Tests', None, 'tests', homepage="http://example.com")
+ assert req.format is None
+ assert req.attribute is None
+ assert req.requested_version is None
+ assert req.full_name() == 'Tests'
+ assert req.homepage == 'http://example.com'
+
+ from setuptools.tests import __path__
+ paths = [os.path.dirname(p) for p in __path__]
+ assert req.is_present(paths)
+ assert req.is_current(paths)
+
+
+class TestDistro:
+ def setup_method(self, method):
+ self.e1 = Extension('bar.ext', ['bar.c'])
+ self.e2 = Extension('c.y', ['y.c'])
+
+ self.dist = makeSetup(
+ packages=['a', 'a.b', 'a.b.c', 'b', 'c'],
+ py_modules=['b.d', 'x'],
+ ext_modules=(self.e1, self.e2),
+ package_dir={},
+ )
+
+ def testDistroType(self):
+ assert isinstance(self.dist, setuptools.dist.Distribution)
+
+ def testExcludePackage(self):
+ self.dist.exclude_package('a')
+ assert self.dist.packages == ['b', 'c']
+
+ self.dist.exclude_package('b')
+ assert self.dist.packages == ['c']
+ assert self.dist.py_modules == ['x']
+ assert self.dist.ext_modules == [self.e1, self.e2]
+
+ self.dist.exclude_package('c')
+ assert self.dist.packages == []
+ assert self.dist.py_modules == ['x']
+ assert self.dist.ext_modules == [self.e1]
+
+ # test removals from unspecified options
+ makeSetup().exclude_package('x')
+
+ def testIncludeExclude(self):
+ # remove an extension
+ self.dist.exclude(ext_modules=[self.e1])
+ assert self.dist.ext_modules == [self.e2]
+
+ # add it back in
+ self.dist.include(ext_modules=[self.e1])
+ assert self.dist.ext_modules == [self.e2, self.e1]
+
+ # should not add duplicate
+ self.dist.include(ext_modules=[self.e1])
+ assert self.dist.ext_modules == [self.e2, self.e1]
+
+ def testExcludePackages(self):
+ self.dist.exclude(packages=['c', 'b', 'a'])
+ assert self.dist.packages == []
+ assert self.dist.py_modules == ['x']
+ assert self.dist.ext_modules == [self.e1]
+
+ def testEmpty(self):
+ dist = makeSetup()
+ dist.include(packages=['a'], py_modules=['b'], ext_modules=[self.e2])
+ dist = makeSetup()
+ dist.exclude(packages=['a'], py_modules=['b'], ext_modules=[self.e2])
+
+ def testContents(self):
+ assert self.dist.has_contents_for('a')
+ self.dist.exclude_package('a')
+ assert not self.dist.has_contents_for('a')
+
+ assert self.dist.has_contents_for('b')
+ self.dist.exclude_package('b')
+ assert not self.dist.has_contents_for('b')
+
+ assert self.dist.has_contents_for('c')
+ self.dist.exclude_package('c')
+ assert not self.dist.has_contents_for('c')
+
+ def testInvalidIncludeExclude(self):
+ with pytest.raises(DistutilsSetupError):
+ self.dist.include(nonexistent_option='x')
+ with pytest.raises(DistutilsSetupError):
+ self.dist.exclude(nonexistent_option='x')
+ with pytest.raises(DistutilsSetupError):
+ self.dist.include(packages={'x': 'y'})
+ with pytest.raises(DistutilsSetupError):
+ self.dist.exclude(packages={'x': 'y'})
+ with pytest.raises(DistutilsSetupError):
+ self.dist.include(ext_modules={'x': 'y'})
+ with pytest.raises(DistutilsSetupError):
+ self.dist.exclude(ext_modules={'x': 'y'})
+
+ with pytest.raises(DistutilsSetupError):
+ self.dist.include(package_dir=['q'])
+ with pytest.raises(DistutilsSetupError):
+ self.dist.exclude(package_dir=['q'])
+
+
+class TestFeatures:
+ def setup_method(self, method):
+ self.req = Require('Distutils', '1.0.3', 'distutils')
+ self.dist = makeSetup(
+ features={
+ 'foo': Feature("foo", standard=True, require_features=['baz', self.req]),
+ 'bar': Feature("bar", standard=True, packages=['pkg.bar'],
+ py_modules=['bar_et'], remove=['bar.ext'],
+ ),
+ 'baz': Feature(
+ "baz", optional=False, packages=['pkg.baz'],
+ scripts=['scripts/baz_it'],
+ libraries=[('libfoo', 'foo/foofoo.c')]
+ ),
+ 'dwim': Feature("DWIM", available=False, remove='bazish'),
+ },
+ script_args=['--without-bar', 'install'],
+ packages=['pkg.bar', 'pkg.foo'],
+ py_modules=['bar_et', 'bazish'],
+ ext_modules=[Extension('bar.ext', ['bar.c'])]
+ )
+
+ def testDefaults(self):
+ assert not Feature(
+ "test", standard=True, remove='x', available=False
+ ).include_by_default()
+ assert Feature("test", standard=True, remove='x').include_by_default()
+ # Feature must have either kwargs, removes, or require_features
+ with pytest.raises(DistutilsSetupError):
+ Feature("test")
+
+ def testAvailability(self):
+ with pytest.raises(DistutilsPlatformError):
+ self.dist.features['dwim'].include_in(self.dist)
+
+ def testFeatureOptions(self):
+ dist = self.dist
+ assert (
+ ('with-dwim', None, 'include DWIM') in dist.feature_options
+ )
+ assert (
+ ('without-dwim', None, 'exclude DWIM (default)') in dist.feature_options
+ )
+ assert (
+ ('with-bar', None, 'include bar (default)') in dist.feature_options
+ )
+ assert (
+ ('without-bar', None, 'exclude bar') in dist.feature_options
+ )
+ assert dist.feature_negopt['without-foo'] == 'with-foo'
+ assert dist.feature_negopt['without-bar'] == 'with-bar'
+ assert dist.feature_negopt['without-dwim'] == 'with-dwim'
+ assert ('without-baz' not in dist.feature_negopt)
+
+ def testUseFeatures(self):
+ dist = self.dist
+ assert dist.with_foo == 1
+ assert dist.with_bar == 0
+ assert dist.with_baz == 1
+ assert ('bar_et' not in dist.py_modules)
+ assert ('pkg.bar' not in dist.packages)
+ assert ('pkg.baz' in dist.packages)
+ assert ('scripts/baz_it' in dist.scripts)
+ assert (('libfoo', 'foo/foofoo.c') in dist.libraries)
+ assert dist.ext_modules == []
+ assert dist.require_features == [self.req]
+
+ # If we ask for bar, it should fail because we explicitly disabled
+ # it on the command line
+ with pytest.raises(DistutilsOptionError):
+ dist.include_feature('bar')
+
+ def testFeatureWithInvalidRemove(self):
+ with pytest.raises(SystemExit):
+ makeSetup(features={'x': Feature('x', remove='y')})
+
+
+class TestCommandTests:
+ def testTestIsCommand(self):
+ test_cmd = makeSetup().get_command_obj('test')
+ assert (isinstance(test_cmd, distutils.cmd.Command))
+
+ def testLongOptSuiteWNoDefault(self):
+ ts1 = makeSetup(script_args=['test', '--test-suite=foo.tests.suite'])
+ ts1 = ts1.get_command_obj('test')
+ ts1.ensure_finalized()
+ assert ts1.test_suite == 'foo.tests.suite'
+
+ def testDefaultSuite(self):
+ ts2 = makeSetup(test_suite='bar.tests.suite').get_command_obj('test')
+ ts2.ensure_finalized()
+ assert ts2.test_suite == 'bar.tests.suite'
+
+ def testDefaultWModuleOnCmdLine(self):
+ ts3 = makeSetup(
+ test_suite='bar.tests',
+ script_args=['test', '-m', 'foo.tests']
+ ).get_command_obj('test')
+ ts3.ensure_finalized()
+ assert ts3.test_module == 'foo.tests'
+ assert ts3.test_suite == 'foo.tests.test_suite'
+
+ def testConflictingOptions(self):
+ ts4 = makeSetup(
+ script_args=['test', '-m', 'bar.tests', '-s', 'foo.tests.suite']
+ ).get_command_obj('test')
+ with pytest.raises(DistutilsOptionError):
+ ts4.ensure_finalized()
+
+ def testNoSuite(self):
+ ts5 = makeSetup().get_command_obj('test')
+ ts5.ensure_finalized()
+ assert ts5.test_suite is None
+
+
+@pytest.fixture
+def example_source(tmpdir):
+ tmpdir.mkdir('foo')
+ (tmpdir / 'foo/bar.py').write('')
+ (tmpdir / 'readme.txt').write('')
+ return tmpdir
+
+
+def test_findall(example_source):
+ found = list(setuptools.findall(str(example_source)))
+ expected = ['readme.txt', 'foo/bar.py']
+ expected = [example_source.join(fn) for fn in expected]
+ assert found == expected
+
+
+def test_findall_curdir(example_source):
+ with example_source.as_cwd():
+ found = list(setuptools.findall())
+ expected = ['readme.txt', os.path.join('foo', 'bar.py')]
+ assert found == expected
+
+
+@pytest.fixture
+def can_symlink(tmpdir):
+ """
+ Skip if cannot create a symbolic link
+ """
+ link_fn = 'link'
+ target_fn = 'target'
+ try:
+ os.symlink(target_fn, link_fn)
+ except (OSError, NotImplementedError, AttributeError):
+ pytest.skip("Cannot create symbolic links")
+ os.remove(link_fn)
+
+
+def test_findall_missing_symlink(tmpdir, can_symlink):
+ with tmpdir.as_cwd():
+ os.symlink('foo', 'bar')
+ found = list(setuptools.findall())
+ assert found == []
diff --git a/setuptools/tests/test_test.py b/setuptools/tests/test_test.py
new file mode 100644
index 0000000..960527b
--- /dev/null
+++ b/setuptools/tests/test_test.py
@@ -0,0 +1,131 @@
+# -*- coding: UTF-8 -*-
+
+from __future__ import unicode_literals
+
+from distutils import log
+import os
+import sys
+
+import pytest
+
+from setuptools.command.test import test
+from setuptools.dist import Distribution
+
+from .textwrap import DALS
+from . import contexts
+
+SETUP_PY = DALS("""
+ from setuptools import setup
+
+ setup(name='foo',
+ packages=['name', 'name.space', 'name.space.tests'],
+ namespace_packages=['name'],
+ test_suite='name.space.tests.test_suite',
+ )
+ """)
+
+NS_INIT = DALS("""
+ # -*- coding: Latin-1 -*-
+ # Söme Arbiträry Ünicode to test Distribute Issüé 310
+ try:
+ __import__('pkg_resources').declare_namespace(__name__)
+ except ImportError:
+ from pkgutil import extend_path
+ __path__ = extend_path(__path__, __name__)
+ """)
+
+TEST_PY = DALS("""
+ import unittest
+
+ class TestTest(unittest.TestCase):
+ def test_test(self):
+ print "Foo" # Should fail under Python 3 unless 2to3 is used
+
+ test_suite = unittest.makeSuite(TestTest)
+ """)
+
+
+@pytest.fixture
+def sample_test(tmpdir_cwd):
+ os.makedirs('name/space/tests')
+
+ # setup.py
+ with open('setup.py', 'wt') as f:
+ f.write(SETUP_PY)
+
+ # name/__init__.py
+ with open('name/__init__.py', 'wb') as f:
+ f.write(NS_INIT.encode('Latin-1'))
+
+ # name/space/__init__.py
+ with open('name/space/__init__.py', 'wt') as f:
+ f.write('#empty\n')
+
+ # name/space/tests/__init__.py
+ with open('name/space/tests/__init__.py', 'wt') as f:
+ f.write(TEST_PY)
+
+
+@pytest.fixture
+def quiet_log():
+ # Running some of the other tests will automatically
+ # change the log level to info, messing our output.
+ log.set_verbosity(0)
+
+
+@pytest.mark.usefixtures('sample_test', 'quiet_log')
+def test_test(capfd):
+ params = dict(
+ name='foo',
+ packages=['name', 'name.space', 'name.space.tests'],
+ namespace_packages=['name'],
+ test_suite='name.space.tests.test_suite',
+ use_2to3=True,
+ )
+ dist = Distribution(params)
+ dist.script_name = 'setup.py'
+ cmd = test(dist)
+ cmd.ensure_finalized()
+ # The test runner calls sys.exit
+ with contexts.suppress_exceptions(SystemExit):
+ cmd.run()
+ out, err = capfd.readouterr()
+ assert out == 'Foo\n'
+
+
+@pytest.mark.xfail(
+ sys.version_info < (2, 7),
+ reason="No discover support for unittest on Python 2.6",
+)
+@pytest.mark.usefixtures('tmpdir_cwd', 'quiet_log')
+def test_tests_are_run_once(capfd):
+ params = dict(
+ name='foo',
+ packages=['dummy'],
+ )
+ with open('setup.py', 'wt') as f:
+ f.write('from setuptools import setup; setup(\n')
+ for k, v in sorted(params.items()):
+ f.write(' %s=%r,\n' % (k, v))
+ f.write(')\n')
+ os.makedirs('dummy')
+ with open('dummy/__init__.py', 'wt'):
+ pass
+ with open('dummy/test_dummy.py', 'wt') as f:
+ f.write(DALS(
+ """
+ from __future__ import print_function
+ import unittest
+ class TestTest(unittest.TestCase):
+ def test_test(self):
+ print('Foo')
+ """))
+ dist = Distribution(params)
+ dist.script_name = 'setup.py'
+ cmd = test(dist)
+ cmd.ensure_finalized()
+ # The test runner calls sys.exit
+ with contexts.suppress_exceptions(SystemExit):
+ cmd.run()
+ out, err = capfd.readouterr()
+ assert out == 'Foo\n'
diff --git a/setuptools/tests/test_unicode_utils.py b/setuptools/tests/test_unicode_utils.py
new file mode 100644
index 0000000..a24a9bd
--- /dev/null
+++ b/setuptools/tests/test_unicode_utils.py
@@ -0,0 +1,10 @@
+from setuptools import unicode_utils
+
+
+def test_filesys_decode_fs_encoding_is_None(monkeypatch):
+ """
+ Test filesys_decode does not raise TypeError when
+ getfilesystemencoding returns None.
+ """
+ monkeypatch.setattr('sys.getfilesystemencoding', lambda: None)
+ unicode_utils.filesys_decode(b'test')
diff --git a/setuptools/tests/test_upload_docs.py b/setuptools/tests/test_upload_docs.py
new file mode 100644
index 0000000..a26e32a
--- /dev/null
+++ b/setuptools/tests/test_upload_docs.py
@@ -0,0 +1,71 @@
+import os
+import zipfile
+import contextlib
+
+import pytest
+
+from setuptools.command.upload_docs import upload_docs
+from setuptools.dist import Distribution
+
+from .textwrap import DALS
+from . import contexts
+
+SETUP_PY = DALS(
+ """
+ from setuptools import setup
+
+ setup(name='foo')
+ """)
+
+
+@pytest.fixture
+def sample_project(tmpdir_cwd):
+ # setup.py
+ with open('setup.py', 'wt') as f:
+ f.write(SETUP_PY)
+
+ os.mkdir('build')
+
+ # A test document.
+ with open('build/index.html', 'w') as f:
+ f.write("Hello world.")
+
+ # An empty folder.
+ os.mkdir('build/empty')
+
+
+@pytest.mark.usefixtures('sample_project')
+@pytest.mark.usefixtures('user_override')
+class TestUploadDocsTest:
+ def test_create_zipfile(self):
+ """
+ Ensure zipfile creation handles common cases, including a folder
+ containing an empty folder.
+ """
+
+ dist = Distribution()
+
+ cmd = upload_docs(dist)
+ cmd.target_dir = cmd.upload_dir = 'build'
+ with contexts.tempdir() as tmp_dir:
+ tmp_file = os.path.join(tmp_dir, 'foo.zip')
+ zip_file = cmd.create_zipfile(tmp_file)
+
+ assert zipfile.is_zipfile(tmp_file)
+
+ with contextlib.closing(zipfile.ZipFile(tmp_file)) as zip_file:
+ assert zip_file.namelist() == ['index.html']
+
+ def test_build_multipart(self):
+ data = dict(
+ a="foo",
+ b="bar",
+ file=('file.txt', b'content'),
+ )
+ body, content_type = upload_docs._build_multipart(data)
+ assert 'form-data' in content_type
+ assert "b'" not in content_type
+ assert 'b"' not in content_type
+ assert isinstance(body, bytes)
+ assert b'foo' in body
+ assert b'content' in body
diff --git a/setuptools/tests/test_virtualenv.py b/setuptools/tests/test_virtualenv.py
new file mode 100644
index 0000000..b66a311
--- /dev/null
+++ b/setuptools/tests/test_virtualenv.py
@@ -0,0 +1,139 @@
+import glob
+import os
+import sys
+
+import pytest
+from pytest import yield_fixture
+from pytest_fixture_config import yield_requires_config
+
+import pytest_virtualenv
+
+from .textwrap import DALS
+from .test_easy_install import make_nspkg_sdist
+
+
+@pytest.fixture(autouse=True)
+def pytest_virtualenv_works(virtualenv):
+ """
+ pytest_virtualenv may not work. if it doesn't, skip these
+ tests. See #1284.
+ """
+ venv_prefix = virtualenv.run(
+ 'python -c "import sys; print(sys.prefix)"',
+ capture=True,
+ ).strip()
+ if venv_prefix == sys.prefix:
+ pytest.skip("virtualenv is broken (see pypa/setuptools#1284)")
+
+
+@yield_requires_config(pytest_virtualenv.CONFIG, ['virtualenv_executable'])
+@yield_fixture(scope='function')
+def bare_virtualenv():
+ """ Bare virtualenv (no pip/setuptools/wheel).
+ """
+ with pytest_virtualenv.VirtualEnv(args=(
+ '--no-wheel',
+ '--no-pip',
+ '--no-setuptools',
+ )) as venv:
+ yield venv
+
+
+SOURCE_DIR = os.path.join(os.path.dirname(__file__), '../..')
+
+
+def test_clean_env_install(bare_virtualenv):
+ """
+ Check setuptools can be installed in a clean environment.
+ """
+ bare_virtualenv.run(' && '.join((
+ 'cd {source}',
+ 'python setup.py install',
+ )).format(source=SOURCE_DIR))
+
+
+def test_pip_upgrade_from_source(virtualenv):
+ """
+ Check pip can upgrade setuptools from source.
+ """
+ dist_dir = virtualenv.workspace
+ if sys.version_info < (2, 7):
+ # Python 2.6 support was dropped in wheel 0.30.0.
+ virtualenv.run('pip install -U "wheel<0.30.0"')
+ # Generate source distribution / wheel.
+ virtualenv.run(' && '.join((
+ 'cd {source}',
+ 'python setup.py -q sdist -d {dist}',
+ 'python setup.py -q bdist_wheel -d {dist}',
+ )).format(source=SOURCE_DIR, dist=dist_dir))
+ sdist = glob.glob(os.path.join(dist_dir, '*.zip'))[0]
+ wheel = glob.glob(os.path.join(dist_dir, '*.whl'))[0]
+ # Then update from wheel.
+ virtualenv.run('pip install ' + wheel)
+ # And finally try to upgrade from source.
+ virtualenv.run('pip install --no-cache-dir --upgrade ' + sdist)
+
+
+def test_test_command_install_requirements(bare_virtualenv, tmpdir):
+ """
+ Check the test command will install all required dependencies.
+ """
+ bare_virtualenv.run(' && '.join((
+ 'cd {source}',
+ 'python setup.py develop',
+ )).format(source=SOURCE_DIR))
+
+ def sdist(distname, version):
+ dist_path = tmpdir.join('%s-%s.tar.gz' % (distname, version))
+ make_nspkg_sdist(str(dist_path), distname, version)
+ return dist_path
+ dependency_links = [
+ str(dist_path)
+ for dist_path in (
+ sdist('foobar', '2.4'),
+ sdist('bits', '4.2'),
+ sdist('bobs', '6.0'),
+ sdist('pieces', '0.6'),
+ )
+ ]
+ with tmpdir.join('setup.py').open('w') as fp:
+ fp.write(DALS(
+ '''
+ from setuptools import setup
+
+ setup(
+ dependency_links={dependency_links!r},
+ install_requires=[
+ 'barbazquux1; sys_platform in ""',
+ 'foobar==2.4',
+ ],
+ setup_requires='bits==4.2',
+ tests_require="""
+ bobs==6.0
+ """,
+ extras_require={{
+ 'test': ['barbazquux2'],
+ ':"" in sys_platform': 'pieces==0.6',
+ ':python_version > "1"': """
+ pieces
+ foobar
+ """,
+ }}
+ )
+ '''.format(dependency_links=dependency_links)))
+ with tmpdir.join('test.py').open('w') as fp:
+ fp.write(DALS(
+ '''
+ import foobar
+ import bits
+ import bobs
+ import pieces
+
+ open('success', 'w').close()
+ '''))
+ # Run test command for test package.
+ bare_virtualenv.run(' && '.join((
+ 'cd {tmpdir}',
+ 'python setup.py test -s test',
+ )).format(tmpdir=tmpdir))
+ assert tmpdir.join('success').check()
diff --git a/setuptools/tests/test_wheel.py b/setuptools/tests/test_wheel.py
new file mode 100644
index 0000000..150ac4c
--- /dev/null
+++ b/setuptools/tests/test_wheel.py
@@ -0,0 +1,508 @@
+# -*- coding: utf-8 -*-
+
+"""wheel tests
+"""
+
+from distutils.sysconfig import get_config_var
+from distutils.util import get_platform
+import contextlib
+import glob
+import inspect
+import os
+import subprocess
+import sys
+
+import pytest
+
+from pkg_resources import Distribution, PathMetadata, PY_MAJOR
+from setuptools.wheel import Wheel
+
+from .contexts import tempdir
+from .files import build_files
+from .textwrap import DALS
+
+
+WHEEL_INFO_TESTS = (
+ ('invalid.whl', ValueError),
+ ('simplewheel-2.0-1-py2.py3-none-any.whl', {
+ 'project_name': 'simplewheel',
+ 'version': '2.0',
+ 'build': '1',
+ 'py_version': 'py2.py3',
+ 'abi': 'none',
+ 'platform': 'any',
+ }),
+ ('simple.dist-0.1-py2.py3-none-any.whl', {
+ 'project_name': 'simple.dist',
+ 'version': '0.1',
+ 'build': None,
+ 'py_version': 'py2.py3',
+ 'abi': 'none',
+ 'platform': 'any',
+ }),
+ ('example_pkg_a-1-py3-none-any.whl', {
+ 'project_name': 'example_pkg_a',
+ 'version': '1',
+ 'build': None,
+ 'py_version': 'py3',
+ 'abi': 'none',
+ 'platform': 'any',
+ }),
+ ('PyQt5-5.9-5.9.1-cp35.cp36.cp37-abi3-manylinux1_x86_64.whl', {
+ 'project_name': 'PyQt5',
+ 'version': '5.9',
+ 'build': '5.9.1',
+ 'py_version': 'cp35.cp36.cp37',
+ 'abi': 'abi3',
+ 'platform': 'manylinux1_x86_64',
+ }),
+)
+
+@pytest.mark.parametrize(
+ ('filename', 'info'), WHEEL_INFO_TESTS,
+ ids=[t[0] for t in WHEEL_INFO_TESTS]
+)
+def test_wheel_info(filename, info):
+ if inspect.isclass(info):
+ with pytest.raises(info):
+ Wheel(filename)
+ return
+ w = Wheel(filename)
+ assert {k: getattr(w, k) for k in info.keys()} == info
+
+
+@contextlib.contextmanager
+def build_wheel(extra_file_defs=None, **kwargs):
+ file_defs = {
+ 'setup.py': (DALS(
+ '''
+ # -*- coding: utf-8 -*-
+ from setuptools import setup
+ import setuptools
+ setup(**%r)
+ '''
+ ) % kwargs).encode('utf-8'),
+ }
+ if extra_file_defs:
+ file_defs.update(extra_file_defs)
+ with tempdir() as source_dir:
+ build_files(file_defs, source_dir)
+ subprocess.check_call((sys.executable, 'setup.py',
+ '-q', 'bdist_wheel'), cwd=source_dir)
+ yield glob.glob(os.path.join(source_dir, 'dist', '*.whl'))[0]
+
+
+def tree_set(root):
+ contents = set()
+ for dirpath, dirnames, filenames in os.walk(root):
+ for filename in filenames:
+ contents.add(os.path.join(os.path.relpath(dirpath, root),
+ filename))
+ return contents
+
+
+def flatten_tree(tree):
+ """Flatten nested dicts and lists into a full list of paths"""
+ output = set()
+ for node, contents in tree.items():
+ if isinstance(contents, dict):
+ contents = flatten_tree(contents)
+
+ for elem in contents:
+ if isinstance(elem, dict):
+ output |= {os.path.join(node, val)
+ for val in flatten_tree(elem)}
+ else:
+ output.add(os.path.join(node, elem))
+ return output
+
+
+def format_install_tree(tree):
+ return {x.format(
+ py_version=PY_MAJOR,
+ platform=get_platform(),
+ shlib_ext=get_config_var('EXT_SUFFIX') or get_config_var('SO'))
+ for x in tree}
+
+
+def _check_wheel_install(filename, install_dir, install_tree_includes,
+ project_name, version, requires_txt):
+ w = Wheel(filename)
+ egg_path = os.path.join(install_dir, w.egg_name())
+ w.install_as_egg(egg_path)
+ if install_tree_includes is not None:
+ install_tree = format_install_tree(install_tree_includes)
+ exp = tree_set(install_dir)
+ assert install_tree.issubset(exp), (install_tree - exp)
+
+ metadata = PathMetadata(egg_path, os.path.join(egg_path, 'EGG-INFO'))
+ dist = Distribution.from_filename(egg_path, metadata=metadata)
+ assert dist.project_name == project_name
+ assert dist.version == version
+ if requires_txt is None:
+ assert not dist.has_metadata('requires.txt')
+ else:
+ assert requires_txt == dist.get_metadata('requires.txt').lstrip()
+
+
+class Record(object):
+
+ def __init__(self, id, **kwargs):
+ self._id = id
+ self._fields = kwargs
+
+ def __repr__(self):
+ return '%s(**%r)' % (self._id, self._fields)
+
+
+WHEEL_INSTALL_TESTS = (
+
+ dict(
+ id='basic',
+ file_defs={
+ 'foo': {
+ '__init__.py': ''
+ }
+ },
+ setup_kwargs=dict(
+ packages=['foo'],
+ ),
+ install_tree=flatten_tree({
+ 'foo-1.0-py{py_version}.egg': {
+ 'EGG-INFO': [
+ 'PKG-INFO',
+ 'RECORD',
+ 'WHEEL',
+ 'top_level.txt'
+ ],
+ 'foo': ['__init__.py']
+ }
+ }),
+ ),
+
+ dict(
+ id='utf-8',
+ setup_kwargs=dict(
+ description='Description accentuée',
+ )
+ ),
+
+ dict(
+ id='data',
+ file_defs={
+ 'data.txt': DALS(
+ '''
+ Some data...
+ '''
+ ),
+ },
+ setup_kwargs=dict(
+ data_files=[('data_dir', ['data.txt'])],
+ ),
+ install_tree=flatten_tree({
+ 'foo-1.0-py{py_version}.egg': {
+ 'EGG-INFO': [
+ 'PKG-INFO',
+ 'RECORD',
+ 'WHEEL',
+ 'top_level.txt'
+ ],
+ 'data_dir': [
+ 'data.txt'
+ ]
+ }
+ }),
+ ),
+
+ dict(
+ id='extension',
+ file_defs={
+ 'extension.c': DALS(
+ '''
+ #include "Python.h"
+
+ #if PY_MAJOR_VERSION >= 3
+
+ static struct PyModuleDef moduledef = {
+ PyModuleDef_HEAD_INIT,
+ "extension",
+ NULL,
+ 0,
+ NULL,
+ NULL,
+ NULL,
+ NULL,
+ NULL
+ };
+
+ #define INITERROR return NULL
+
+ PyMODINIT_FUNC PyInit_extension(void)
+
+ #else
+
+ #define INITERROR return
+
+ void initextension(void)
+
+ #endif
+ {
+ #if PY_MAJOR_VERSION >= 3
+ PyObject *module = PyModule_Create(&moduledef);
+ #else
+ PyObject *module = Py_InitModule("extension", NULL);
+ #endif
+ if (module == NULL)
+ INITERROR;
+ #if PY_MAJOR_VERSION >= 3
+ return module;
+ #endif
+ }
+ '''
+ ),
+ },
+ setup_kwargs=dict(
+ ext_modules=[
+ Record('setuptools.Extension',
+ name='extension',
+ sources=['extension.c'])
+ ],
+ ),
+ install_tree=flatten_tree({
+ 'foo-1.0-py{py_version}-{platform}.egg': [
+ 'extension{shlib_ext}',
+ {'EGG-INFO': [
+ 'PKG-INFO',
+ 'RECORD',
+ 'WHEEL',
+ 'top_level.txt',
+ ]},
+ ]
+ }),
+ ),
+
+ dict(
+ id='header',
+ file_defs={
+ 'header.h': DALS(
+ '''
+ '''
+ ),
+ },
+ setup_kwargs=dict(
+ headers=['header.h'],
+ ),
+ install_tree=flatten_tree({
+ 'foo-1.0-py{py_version}.egg': [
+ 'header.h',
+ {'EGG-INFO': [
+ 'PKG-INFO',
+ 'RECORD',
+ 'WHEEL',
+ 'top_level.txt',
+ ]},
+ ]
+ }),
+ ),
+
+ dict(
+ id='script',
+ file_defs={
+ 'script.py': DALS(
+ '''
+ #/usr/bin/python
+ print('hello world!')
+ '''
+ ),
+ 'script.sh': DALS(
+ '''
+ #/bin/sh
+ echo 'hello world!'
+ '''
+ ),
+ },
+ setup_kwargs=dict(
+ scripts=['script.py', 'script.sh'],
+ ),
+ install_tree=flatten_tree({
+ 'foo-1.0-py{py_version}.egg': {
+ 'EGG-INFO': [
+ 'PKG-INFO',
+ 'RECORD',
+ 'WHEEL',
+ 'top_level.txt',
+ {'scripts': [
+ 'script.py',
+ 'script.sh'
+ ]}
+
+ ]
+ }
+ })
+ ),
+
+ dict(
+ id='requires1',
+ install_requires='foobar==2.0',
+ install_tree=flatten_tree({
+ 'foo-1.0-py{py_version}.egg': {
+ 'EGG-INFO': [
+ 'PKG-INFO',
+ 'RECORD',
+ 'WHEEL',
+ 'requires.txt',
+ 'top_level.txt',
+ ]
+ }
+ }),
+ requires_txt=DALS(
+ '''
+ foobar==2.0
+ '''
+ ),
+ ),
+
+ dict(
+ id='requires2',
+ install_requires='''
+ bar
+ foo<=2.0; %r in sys_platform
+ ''' % sys.platform,
+ requires_txt=DALS(
+ '''
+ bar
+ foo<=2.0
+ '''
+ ),
+ ),
+
+ dict(
+ id='requires3',
+ install_requires='''
+ bar; %r != sys_platform
+ ''' % sys.platform,
+ ),
+
+ dict(
+ id='requires4',
+ install_requires='''
+ foo
+ ''',
+ extras_require={
+ 'extra': 'foobar>3',
+ },
+ requires_txt=DALS(
+ '''
+ foo
+
+ [extra]
+ foobar>3
+ '''
+ ),
+ ),
+
+ dict(
+ id='requires5',
+ extras_require={
+ 'extra': 'foobar; %r != sys_platform' % sys.platform,
+ },
+ requires_txt=DALS(
+ '''
+ [extra]
+ '''
+ ),
+ ),
+
+ dict(
+ id='namespace_package',
+ file_defs={
+ 'foo': {
+ 'bar': {
+ '__init__.py': ''
+ },
+ },
+ },
+ setup_kwargs=dict(
+ namespace_packages=['foo'],
+ packages=['foo.bar'],
+ ),
+ install_tree=flatten_tree({
+ 'foo-1.0-py{py_version}.egg': [
+ 'foo-1.0-py{py_version}-nspkg.pth',
+ {'EGG-INFO': [
+ 'PKG-INFO',
+ 'RECORD',
+ 'WHEEL',
+ 'namespace_packages.txt',
+ 'top_level.txt',
+ ]},
+ {'foo': [
+ '__init__.py',
+ {'bar': ['__init__.py']},
+ ]},
+ ]
+ }),
+ ),
+
+ dict(
+ id='data_in_package',
+ file_defs={
+ 'foo': {
+ '__init__.py': '',
+ 'data_dir': {
+ 'data.txt': DALS(
+ '''
+ Some data...
+ '''
+ ),
+ }
+ }
+ },
+ setup_kwargs=dict(
+ packages=['foo'],
+ data_files=[('foo/data_dir', ['foo/data_dir/data.txt'])],
+ ),
+ install_tree=flatten_tree({
+ 'foo-1.0-py{py_version}.egg': {
+ 'EGG-INFO': [
+ 'PKG-INFO',
+ 'RECORD',
+ 'WHEEL',
+ 'top_level.txt',
+ ],
+ 'foo': [
+ '__init__.py',
+ {'data_dir': [
+ 'data.txt',
+ ]}
+ ]
+ }
+ }),
+ ),
+
+)
+
+@pytest.mark.parametrize(
+ 'params', WHEEL_INSTALL_TESTS,
+ ids=list(params['id'] for params in WHEEL_INSTALL_TESTS),
+)
+def test_wheel_install(params):
+ project_name = params.get('name', 'foo')
+ version = params.get('version', '1.0')
+ install_requires = params.get('install_requires', [])
+ extras_require = params.get('extras_require', {})
+ requires_txt = params.get('requires_txt', None)
+ install_tree = params.get('install_tree')
+ file_defs = params.get('file_defs', {})
+ setup_kwargs = params.get('setup_kwargs', {})
+ with build_wheel(
+ name=project_name,
+ version=version,
+ install_requires=install_requires,
+ extras_require=extras_require,
+ extra_file_defs=file_defs,
+ **setup_kwargs
+ ) as filename, tempdir() as install_dir:
+ _check_wheel_install(filename, install_dir,
+ install_tree, project_name,
+ version, requires_txt)
diff --git a/setuptools/tests/test_windows_wrappers.py b/setuptools/tests/test_windows_wrappers.py
new file mode 100644
index 0000000..d2871c0
--- /dev/null
+++ b/setuptools/tests/test_windows_wrappers.py
@@ -0,0 +1,181 @@
+"""
+Python Script Wrapper for Windows
+=================================
+
+setuptools includes wrappers for Python scripts that allows them to be
+executed like regular windows programs. There are 2 wrappers, one
+for command-line programs, cli.exe, and one for graphical programs,
+gui.exe. These programs are almost identical, function pretty much
+the same way, and are generated from the same source file. The
+wrapper programs are used by copying them to the directory containing
+the script they are to wrap and with the same name as the script they
+are to wrap.
+"""
+
+from __future__ import absolute_import
+
+import sys
+import textwrap
+import subprocess
+
+import pytest
+
+from setuptools.command.easy_install import nt_quote_arg
+import pkg_resources
+
+pytestmark = pytest.mark.skipif(sys.platform != 'win32', reason="Windows only")
+
+
+class WrapperTester:
+ @classmethod
+ def prep_script(cls, template):
+ python_exe = nt_quote_arg(sys.executable)
+ return template % locals()
+
+ @classmethod
+ def create_script(cls, tmpdir):
+ """
+ Create a simple script, foo-script.py
+
+ Note that the script starts with a Unix-style '#!' line saying which
+ Python executable to run. The wrapper will use this line to find the
+ correct Python executable.
+ """
+
+ script = cls.prep_script(cls.script_tmpl)
+
+ with (tmpdir / cls.script_name).open('w') as f:
+ f.write(script)
+
+ # also copy cli.exe to the sample directory
+ with (tmpdir / cls.wrapper_name).open('wb') as f:
+ w = pkg_resources.resource_string('setuptools', cls.wrapper_source)
+ f.write(w)
+
+
+class TestCLI(WrapperTester):
+ script_name = 'foo-script.py'
+ wrapper_source = 'cli-32.exe'
+ wrapper_name = 'foo.exe'
+ script_tmpl = textwrap.dedent("""
+ #!%(python_exe)s
+ import sys
+ input = repr(sys.stdin.read())
+ print(sys.argv[0][-14:])
+ print(sys.argv[1:])
+ print(input)
+ if __debug__:
+ print('non-optimized')
+ """).lstrip()
+
+ def test_basic(self, tmpdir):
+ """
+ When the copy of cli.exe, foo.exe in this example, runs, it examines
+ the path name it was run with and computes a Python script path name
+ by removing the '.exe' suffix and adding the '-script.py' suffix. (For
+ GUI programs, the suffix '-script.pyw' is added.) This is why we
+ named out script the way we did. Now we can run out script by running
+ the wrapper:
+
+ This example was a little pathological in that it exercised windows
+ (MS C runtime) quoting rules:
+
+ - Strings containing spaces are surrounded by double quotes.
+
+ - Double quotes in strings need to be escaped by preceding them with
+ back slashes.
+
+ - One or more backslashes preceding double quotes need to be escaped
+ by preceding each of them with back slashes.
+ """
+ self.create_script(tmpdir)
+ cmd = [
+ str(tmpdir / 'foo.exe'),
+ 'arg1',
+ 'arg 2',
+ 'arg "2\\"',
+ 'arg 4\\',
+ 'arg5 a\\\\b',
+ ]
+ proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE)
+ stdout, stderr = proc.communicate('hello\nworld\n'.encode('ascii'))
+ actual = stdout.decode('ascii').replace('\r\n', '\n')
+ expected = textwrap.dedent(r"""
+ \foo-script.py
+ ['arg1', 'arg 2', 'arg "2\\"', 'arg 4\\', 'arg5 a\\\\b']
+ 'hello\nworld\n'
+ non-optimized
+ """).lstrip()
+ assert actual == expected
+
+ def test_with_options(self, tmpdir):
+ """
+ Specifying Python Command-line Options
+ --------------------------------------
+
+ You can specify a single argument on the '#!' line. This can be used
+ to specify Python options like -O, to run in optimized mode or -i
+ to start the interactive interpreter. You can combine multiple
+ options as usual. For example, to run in optimized mode and
+ enter the interpreter after running the script, you could use -Oi:
+ """
+ self.create_script(tmpdir)
+ tmpl = textwrap.dedent("""
+ #!%(python_exe)s -Oi
+ import sys
+ input = repr(sys.stdin.read())
+ print(sys.argv[0][-14:])
+ print(sys.argv[1:])
+ print(input)
+ if __debug__:
+ print('non-optimized')
+ sys.ps1 = '---'
+ """).lstrip()
+ with (tmpdir / 'foo-script.py').open('w') as f:
+ f.write(self.prep_script(tmpl))
+ cmd = [str(tmpdir / 'foo.exe')]
+ proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT)
+ stdout, stderr = proc.communicate()
+ actual = stdout.decode('ascii').replace('\r\n', '\n')
+ expected = textwrap.dedent(r"""
+ \foo-script.py
+ []
+ ''
+ ---
+ """).lstrip()
+ assert actual == expected
+
+
+class TestGUI(WrapperTester):
+ """
+ Testing the GUI Version
+ -----------------------
+ """
+ script_name = 'bar-script.pyw'
+ wrapper_source = 'gui-32.exe'
+ wrapper_name = 'bar.exe'
+
+ script_tmpl = textwrap.dedent("""
+ #!%(python_exe)s
+ import sys
+ f = open(sys.argv[1], 'wb')
+ bytes_written = f.write(repr(sys.argv[2]).encode('utf-8'))
+ f.close()
+ """).strip()
+
+ def test_basic(self, tmpdir):
+ """Test the GUI version with the simple scipt, bar-script.py"""
+ self.create_script(tmpdir)
+
+ cmd = [
+ str(tmpdir / 'bar.exe'),
+ str(tmpdir / 'test_output.txt'),
+ 'Test Argument',
+ ]
+ proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT)
+ stdout, stderr = proc.communicate()
+ assert not stdout
+ assert not stderr
+ with (tmpdir / 'test_output.txt').open('rb') as f_out:
+ actual = f_out.read().decode('ascii')
+ assert actual == repr('Test Argument')
diff --git a/setuptools/tests/text.py b/setuptools/tests/text.py
new file mode 100644
index 0000000..ad2c624
--- /dev/null
+++ b/setuptools/tests/text.py
@@ -0,0 +1,9 @@
+# -*- coding: utf-8 -*-
+
+from __future__ import unicode_literals
+
+
+class Filenames:
+ unicode = 'smörbröd.py'
+ latin_1 = unicode.encode('latin-1')
+ utf_8 = unicode.encode('utf-8')
diff --git a/setuptools/tests/textwrap.py b/setuptools/tests/textwrap.py
new file mode 100644
index 0000000..5cd9e5b
--- /dev/null
+++ b/setuptools/tests/textwrap.py
@@ -0,0 +1,8 @@
+from __future__ import absolute_import
+
+import textwrap
+
+
+def DALS(s):
+ "dedent and left-strip"
+ return textwrap.dedent(s).lstrip()
diff --git a/setuptools/unicode_utils.py b/setuptools/unicode_utils.py
new file mode 100644
index 0000000..7c63efd
--- /dev/null
+++ b/setuptools/unicode_utils.py
@@ -0,0 +1,44 @@
+import unicodedata
+import sys
+
+from setuptools.extern import six
+
+
+# HFS Plus uses decomposed UTF-8
+def decompose(path):
+ if isinstance(path, six.text_type):
+ return unicodedata.normalize('NFD', path)
+ try:
+ path = path.decode('utf-8')
+ path = unicodedata.normalize('NFD', path)
+ path = path.encode('utf-8')
+ except UnicodeError:
+ pass # Not UTF-8
+ return path
+
+
+def filesys_decode(path):
+ """
+ Ensure that the given path is decoded,
+ NONE when no expected encoding works
+ """
+
+ if isinstance(path, six.text_type):
+ return path
+
+ fs_enc = sys.getfilesystemencoding() or 'utf-8'
+ candidates = fs_enc, 'utf-8'
+
+ for enc in candidates:
+ try:
+ return path.decode(enc)
+ except UnicodeDecodeError:
+ continue
+
+
+def try_encode(string, enc):
+ "turn unicode encoding into a functional routine"
+ try:
+ return string.encode(enc)
+ except UnicodeEncodeError:
+ return None
diff --git a/setuptools/version.py b/setuptools/version.py
new file mode 100644
index 0000000..95e1869
--- /dev/null
+++ b/setuptools/version.py
@@ -0,0 +1,6 @@
+import pkg_resources
+
+try:
+ __version__ = pkg_resources.get_distribution('setuptools').version
+except Exception:
+ __version__ = 'unknown'
diff --git a/setuptools/wheel.py b/setuptools/wheel.py
new file mode 100644
index 0000000..37dfa53
--- /dev/null
+++ b/setuptools/wheel.py
@@ -0,0 +1,163 @@
+'''Wheels support.'''
+
+from distutils.util import get_platform
+import email
+import itertools
+import os
+import re
+import zipfile
+
+from pkg_resources import Distribution, PathMetadata, parse_version
+from setuptools.extern.six import PY3
+from setuptools import Distribution as SetuptoolsDistribution
+from setuptools import pep425tags
+from setuptools.command.egg_info import write_requirements
+
+
+WHEEL_NAME = re.compile(
+ r"""^(?P<project_name>.+?)-(?P<version>\d.*?)
+ ((-(?P<build>\d.*?))?-(?P<py_version>.+?)-(?P<abi>.+?)-(?P<platform>.+?)
+ )\.whl$""",
+re.VERBOSE).match
+
+NAMESPACE_PACKAGE_INIT = '''\
+try:
+ __import__('pkg_resources').declare_namespace(__name__)
+except ImportError:
+ __path__ = __import__('pkgutil').extend_path(__path__, __name__)
+'''
+
+
+def unpack(src_dir, dst_dir):
+ '''Move everything under `src_dir` to `dst_dir`, and delete the former.'''
+ for dirpath, dirnames, filenames in os.walk(src_dir):
+ subdir = os.path.relpath(dirpath, src_dir)
+ for f in filenames:
+ src = os.path.join(dirpath, f)
+ dst = os.path.join(dst_dir, subdir, f)
+ os.renames(src, dst)
+ for n, d in reversed(list(enumerate(dirnames))):
+ src = os.path.join(dirpath, d)
+ dst = os.path.join(dst_dir, subdir, d)
+ if not os.path.exists(dst):
+ # Directory does not exist in destination,
+ # rename it and prune it from os.walk list.
+ os.renames(src, dst)
+ del dirnames[n]
+ # Cleanup.
+ for dirpath, dirnames, filenames in os.walk(src_dir, topdown=True):
+ assert not filenames
+ os.rmdir(dirpath)
+
+
+class Wheel(object):
+
+ def __init__(self, filename):
+ match = WHEEL_NAME(os.path.basename(filename))
+ if match is None:
+ raise ValueError('invalid wheel name: %r' % filename)
+ self.filename = filename
+ for k, v in match.groupdict().items():
+ setattr(self, k, v)
+
+ def tags(self):
+ '''List tags (py_version, abi, platform) supported by this wheel.'''
+ return itertools.product(self.py_version.split('.'),
+ self.abi.split('.'),
+ self.platform.split('.'))
+
+ def is_compatible(self):
+ '''Is the wheel is compatible with the current platform?'''
+ supported_tags = pep425tags.get_supported()
+ return next((True for t in self.tags() if t in supported_tags), False)
+
+ def egg_name(self):
+ return Distribution(
+ project_name=self.project_name, version=self.version,
+ platform=(None if self.platform == 'any' else get_platform()),
+ ).egg_name() + '.egg'
+
+ def install_as_egg(self, destination_eggdir):
+ '''Install wheel as an egg directory.'''
+ with zipfile.ZipFile(self.filename) as zf:
+ dist_basename = '%s-%s' % (self.project_name, self.version)
+ dist_info = '%s.dist-info' % dist_basename
+ dist_data = '%s.data' % dist_basename
+ def get_metadata(name):
+ with zf.open('%s/%s' % (dist_info, name)) as fp:
+ value = fp.read().decode('utf-8') if PY3 else fp.read()
+ return email.parser.Parser().parsestr(value)
+ wheel_metadata = get_metadata('WHEEL')
+ dist_metadata = get_metadata('METADATA')
+ # Check wheel format version is supported.
+ wheel_version = parse_version(wheel_metadata.get('Wheel-Version'))
+ if not parse_version('1.0') <= wheel_version < parse_version('2.0dev0'):
+ raise ValueError('unsupported wheel format version: %s' % wheel_version)
+ # Extract to target directory.
+ os.mkdir(destination_eggdir)
+ zf.extractall(destination_eggdir)
+ # Convert metadata.
+ dist_info = os.path.join(destination_eggdir, dist_info)
+ dist = Distribution.from_location(
+ destination_eggdir, dist_info,
+ metadata=PathMetadata(destination_eggdir, dist_info)
+ )
+ # Note: we need to evaluate and strip markers now,
+ # as we can't easily convert back from the syntax:
+ # foobar; "linux" in sys_platform and extra == 'test'
+ def raw_req(req):
+ req.marker = None
+ return str(req)
+ install_requires = list(sorted(map(raw_req, dist.requires())))
+ extras_require = {
+ extra: list(sorted(
+ req
+ for req in map(raw_req, dist.requires((extra,)))
+ if req not in install_requires
+ ))
+ for extra in dist.extras
+ }
+ egg_info = os.path.join(destination_eggdir, 'EGG-INFO')
+ os.rename(dist_info, egg_info)
+ os.rename(os.path.join(egg_info, 'METADATA'),
+ os.path.join(egg_info, 'PKG-INFO'))
+ setup_dist = SetuptoolsDistribution(attrs=dict(
+ install_requires=install_requires,
+ extras_require=extras_require,
+ ))
+ write_requirements(setup_dist.get_command_obj('egg_info'),
+ None, os.path.join(egg_info, 'requires.txt'))
+ # Move data entries to their correct location.
+ dist_data = os.path.join(destination_eggdir, dist_data)
+ dist_data_scripts = os.path.join(dist_data, 'scripts')
+ if os.path.exists(dist_data_scripts):
+ egg_info_scripts = os.path.join(destination_eggdir,
+ 'EGG-INFO', 'scripts')
+ os.mkdir(egg_info_scripts)
+ for entry in os.listdir(dist_data_scripts):
+ # Remove bytecode, as it's not properly handled
+ # during easy_install scripts install phase.
+ if entry.endswith('.pyc'):
+ os.unlink(os.path.join(dist_data_scripts, entry))
+ else:
+ os.rename(os.path.join(dist_data_scripts, entry),
+ os.path.join(egg_info_scripts, entry))
+ os.rmdir(dist_data_scripts)
+ for subdir in filter(os.path.exists, (
+ os.path.join(dist_data, d)
+ for d in ('data', 'headers', 'purelib', 'platlib')
+ )):
+ unpack(subdir, destination_eggdir)
+ if os.path.exists(dist_data):
+ os.rmdir(dist_data)
+ # Fix namespace packages.
+ namespace_packages = os.path.join(egg_info, 'namespace_packages.txt')
+ if os.path.exists(namespace_packages):
+ with open(namespace_packages) as fp:
+ namespace_packages = fp.read().split()
+ for mod in namespace_packages:
+ mod_dir = os.path.join(destination_eggdir, *mod.split('.'))
+ mod_init = os.path.join(mod_dir, '__init__.py')
+ if os.path.exists(mod_dir) and not os.path.exists(mod_init):
+ with open(mod_init, 'w') as fp:
+ fp.write(NAMESPACE_PACKAGE_INIT)
diff --git a/setuptools/windows_support.py b/setuptools/windows_support.py
new file mode 100644
index 0000000..cb977cf
--- /dev/null
+++ b/setuptools/windows_support.py
@@ -0,0 +1,29 @@
+import platform
+import ctypes
+
+
+def windows_only(func):
+ if platform.system() != 'Windows':
+ return lambda *args, **kwargs: None
+ return func
+
+
+@windows_only
+def hide_file(path):
+ """
+ Set the hidden attribute on a file or directory.
+
+ From http://stackoverflow.com/questions/19622133/
+
+ `path` must be text.
+ """
+ __import__('ctypes.wintypes')
+ SetFileAttributes = ctypes.windll.kernel32.SetFileAttributesW
+ SetFileAttributes.argtypes = ctypes.wintypes.LPWSTR, ctypes.wintypes.DWORD
+ SetFileAttributes.restype = ctypes.wintypes.BOOL
+
+ FILE_ATTRIBUTE_HIDDEN = 0x02
+
+ ret = SetFileAttributes(path, FILE_ATTRIBUTE_HIDDEN)
+ if not ret:
+ raise ctypes.WinError()