diff options
Diffstat (limited to 'jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools')
67 files changed, 0 insertions, 11984 deletions
diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/__init__.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/__init__.py deleted file mode 100644 index 8188f12..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/__init__.py +++ /dev/null @@ -1,149 +0,0 @@ -"""Extensions to the 'distutils' for large or complex distributions""" - -import os -import distutils.core -import distutils.filelist -from distutils.core import Command as _Command -from distutils.util import convert_path -from fnmatch import fnmatchcase - -import setuptools.version -from setuptools.extension import Extension -from setuptools.dist import Distribution, Feature, _get_unpatched -from setuptools.depends import Require -from setuptools.compat import filterfalse - -__all__ = [ - 'setup', 'Distribution', 'Feature', 'Command', 'Extension', 'Require', - 'find_packages' -] - -__version__ = setuptools.version.__version__ - -bootstrap_install_from = None - -# If we run 2to3 on .py files, should we also convert docstrings? -# Default: yes; assume that we can detect doctests reliably -run_2to3_on_doctests = True -# Standard package names for fixer packages -lib2to3_fixer_packages = ['lib2to3.fixes'] - - -class PackageFinder(object): - @classmethod - def find(cls, where='.', exclude=(), include=('*',)): - """Return a list all Python packages found within directory 'where' - - 'where' should be supplied as a "cross-platform" (i.e. URL-style) - path; it will be converted to the appropriate local path syntax. - 'exclude' is a sequence of package names to exclude; '*' can be used - as a wildcard in the names, such that 'foo.*' will exclude all - subpackages of 'foo' (but not 'foo' itself). - - 'include' is a sequence of package names to include. If it's - specified, only the named packages will be included. If it's not - specified, all found packages will be included. 'include' can contain - shell style wildcard patterns just like 'exclude'. - - The list of included packages is built up first and then any - explicitly excluded packages are removed from it. - """ - out = cls._find_packages_iter(convert_path(where)) - out = cls.require_parents(out) - includes = cls._build_filter(*include) - excludes = cls._build_filter('ez_setup', '*__pycache__', *exclude) - out = filter(includes, out) - out = filterfalse(excludes, out) - return list(out) - - @staticmethod - def require_parents(packages): - """ - Exclude any apparent package that apparently doesn't include its - parent. - - For example, exclude 'foo.bar' if 'foo' is not present. - """ - found = [] - for pkg in packages: - base, sep, child = pkg.rpartition('.') - if base and base not in found: - continue - found.append(pkg) - yield pkg - - @staticmethod - def _all_dirs(base_path): - """ - Return all dirs in base_path, relative to base_path - """ - for root, dirs, files in os.walk(base_path, followlinks=True): - for dir in dirs: - yield os.path.relpath(os.path.join(root, dir), base_path) - - @classmethod - def _find_packages_iter(cls, base_path): - dirs = cls._all_dirs(base_path) - suitable = filterfalse(lambda n: '.' in n, dirs) - return ( - path.replace(os.path.sep, '.') - for path in suitable - if cls._looks_like_package(os.path.join(base_path, path)) - ) - - @staticmethod - def _looks_like_package(path): - return os.path.isfile(os.path.join(path, '__init__.py')) - - @staticmethod - def _build_filter(*patterns): - """ - Given a list of patterns, return a callable that will be true only if - the input matches one of the patterns. - """ - return lambda name: any(fnmatchcase(name, pat=pat) for pat in patterns) - -class PEP420PackageFinder(PackageFinder): - @staticmethod - def _looks_like_package(path): - return True - -find_packages = PackageFinder.find - -setup = distutils.core.setup - -_Command = _get_unpatched(_Command) - -class Command(_Command): - __doc__ = _Command.__doc__ - - command_consumes_arguments = False - - def __init__(self, dist, **kw): - # Add support for keyword arguments - _Command.__init__(self,dist) - for k,v in kw.items(): - setattr(self,k,v) - - def reinitialize_command(self, command, reinit_subcommands=0, **kw): - cmd = _Command.reinitialize_command(self, command, reinit_subcommands) - for k,v in kw.items(): - setattr(cmd,k,v) # update command with keywords - return cmd - -distutils.core.Command = Command # we can't patch distutils.cmd, alas - -def findall(dir = os.curdir): - """Find all files under 'dir' and return the list of full filenames - (relative to 'dir'). - """ - all_files = [] - for base, dirs, files in os.walk(dir, followlinks=True): - if base==os.curdir or base.startswith(os.curdir+os.sep): - base = base[2:] - if base: - files = [os.path.join(base, f) for f in files] - all_files.extend(filter(os.path.isfile, files)) - return all_files - -distutils.filelist.findall = findall # fix findall bug in distutils. diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/archive_util.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/archive_util.py deleted file mode 100644 index b3c9fa5..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/archive_util.py +++ /dev/null @@ -1,170 +0,0 @@ -"""Utilities for extracting common archive formats""" - - -__all__ = [ - "unpack_archive", "unpack_zipfile", "unpack_tarfile", "default_filter", - "UnrecognizedFormat", "extraction_drivers", "unpack_directory", -] - -import zipfile -import tarfile -import os -import shutil -import posixpath -import contextlib -from pkg_resources import ensure_directory, ContextualZipFile -from distutils.errors import DistutilsError - -class UnrecognizedFormat(DistutilsError): - """Couldn't recognize the archive type""" - -def default_filter(src,dst): - """The default progress/filter callback; returns True for all files""" - return dst - - -def unpack_archive(filename, extract_dir, progress_filter=default_filter, - drivers=None): - """Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat`` - - `progress_filter` is a function taking two arguments: a source path - internal to the archive ('/'-separated), and a filesystem path where it - will be extracted. The callback must return the desired extract path - (which may be the same as the one passed in), or else ``None`` to skip - that file or directory. The callback can thus be used to report on the - progress of the extraction, as well as to filter the items extracted or - alter their extraction paths. - - `drivers`, if supplied, must be a non-empty sequence of functions with the - same signature as this function (minus the `drivers` argument), that raise - ``UnrecognizedFormat`` if they do not support extracting the designated - archive type. The `drivers` are tried in sequence until one is found that - does not raise an error, or until all are exhausted (in which case - ``UnrecognizedFormat`` is raised). If you do not supply a sequence of - drivers, the module's ``extraction_drivers`` constant will be used, which - means that ``unpack_zipfile`` and ``unpack_tarfile`` will be tried, in that - order. - """ - for driver in drivers or extraction_drivers: - try: - driver(filename, extract_dir, progress_filter) - except UnrecognizedFormat: - continue - else: - return - else: - raise UnrecognizedFormat( - "Not a recognized archive type: %s" % filename - ) - - -def unpack_directory(filename, extract_dir, progress_filter=default_filter): - """"Unpack" a directory, using the same interface as for archives - - Raises ``UnrecognizedFormat`` if `filename` is not a directory - """ - if not os.path.isdir(filename): - raise UnrecognizedFormat("%s is not a directory" % filename) - - paths = { - filename: ('', extract_dir), - } - for base, dirs, files in os.walk(filename): - src, dst = paths[base] - for d in dirs: - paths[os.path.join(base, d)] = src + d + '/', os.path.join(dst, d) - for f in files: - target = os.path.join(dst, f) - target = progress_filter(src + f, target) - if not target: - # skip non-files - continue - ensure_directory(target) - f = os.path.join(base, f) - shutil.copyfile(f, target) - shutil.copystat(f, target) - - -def unpack_zipfile(filename, extract_dir, progress_filter=default_filter): - """Unpack zip `filename` to `extract_dir` - - Raises ``UnrecognizedFormat`` if `filename` is not a zipfile (as determined - by ``zipfile.is_zipfile()``). See ``unpack_archive()`` for an explanation - of the `progress_filter` argument. - """ - - if not zipfile.is_zipfile(filename): - raise UnrecognizedFormat("%s is not a zip file" % (filename,)) - - with ContextualZipFile(filename) as z: - for info in z.infolist(): - name = info.filename - - # don't extract absolute paths or ones with .. in them - if name.startswith('/') or '..' in name.split('/'): - continue - - target = os.path.join(extract_dir, *name.split('/')) - target = progress_filter(name, target) - if not target: - continue - if name.endswith('/'): - # directory - ensure_directory(target) - else: - # file - ensure_directory(target) - data = z.read(info.filename) - with open(target, 'wb') as f: - f.write(data) - unix_attributes = info.external_attr >> 16 - if unix_attributes: - os.chmod(target, unix_attributes) - - -def unpack_tarfile(filename, extract_dir, progress_filter=default_filter): - """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir` - - Raises ``UnrecognizedFormat`` if `filename` is not a tarfile (as determined - by ``tarfile.open()``). See ``unpack_archive()`` for an explanation - of the `progress_filter` argument. - """ - try: - tarobj = tarfile.open(filename) - except tarfile.TarError: - raise UnrecognizedFormat( - "%s is not a compressed or uncompressed tar file" % (filename,) - ) - with contextlib.closing(tarobj): - # don't do any chowning! - tarobj.chown = lambda *args: None - for member in tarobj: - name = member.name - # don't extract absolute paths or ones with .. in them - if not name.startswith('/') and '..' not in name.split('/'): - prelim_dst = os.path.join(extract_dir, *name.split('/')) - - # resolve any links and to extract the link targets as normal - # files - while member is not None and (member.islnk() or member.issym()): - linkpath = member.linkname - if member.issym(): - base = posixpath.dirname(member.name) - linkpath = posixpath.join(base, linkpath) - linkpath = posixpath.normpath(linkpath) - member = tarobj._getmember(linkpath) - - if member is not None and (member.isfile() or member.isdir()): - final_dst = progress_filter(name, prelim_dst) - if final_dst: - if final_dst.endswith(os.sep): - final_dst = final_dst[:-1] - try: - # XXX Ugh - tarobj._extract_member(member, final_dst) - except tarfile.ExtractError: - # chown/chmod/mkfifo/mknode/makedev failed - pass - return True - -extraction_drivers = unpack_directory, unpack_zipfile, unpack_tarfile diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/__init__.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/__init__.py deleted file mode 100644 index f6dbc39..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/__init__.py +++ /dev/null @@ -1,18 +0,0 @@ -__all__ = [ - 'alias', 'bdist_egg', 'bdist_rpm', 'build_ext', 'build_py', 'develop', - 'easy_install', 'egg_info', 'install', 'install_lib', 'rotate', 'saveopts', - 'sdist', 'setopt', 'test', 'install_egg_info', 'install_scripts', - 'register', 'bdist_wininst', 'upload_docs', -] - -from distutils.command.bdist import bdist -import sys - -from setuptools.command import install_scripts - - -if 'egg' not in bdist.format_commands: - bdist.format_command['egg'] = ('bdist_egg', "Python .egg file") - bdist.format_commands.append('egg') - -del bdist, sys diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/alias.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/alias.py deleted file mode 100644 index 452a924..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/alias.py +++ /dev/null @@ -1,78 +0,0 @@ -from distutils.errors import DistutilsOptionError - -from setuptools.command.setopt import edit_config, option_base, config_file - - -def shquote(arg): - """Quote an argument for later parsing by shlex.split()""" - for c in '"', "'", "\\", "#": - if c in arg: - return repr(arg) - if arg.split() != [arg]: - return repr(arg) - return arg - - -class alias(option_base): - """Define a shortcut that invokes one or more commands""" - - description = "define a shortcut to invoke one or more commands" - command_consumes_arguments = True - - user_options = [ - ('remove', 'r', 'remove (unset) the alias'), - ] + option_base.user_options - - boolean_options = option_base.boolean_options + ['remove'] - - def initialize_options(self): - option_base.initialize_options(self) - self.args = None - self.remove = None - - def finalize_options(self): - option_base.finalize_options(self) - if self.remove and len(self.args) != 1: - raise DistutilsOptionError( - "Must specify exactly one argument (the alias name) when " - "using --remove" - ) - - def run(self): - aliases = self.distribution.get_option_dict('aliases') - - if not self.args: - print("Command Aliases") - print("---------------") - for alias in aliases: - print("setup.py alias", format_alias(alias, aliases)) - return - - elif len(self.args) == 1: - alias, = self.args - if self.remove: - command = None - elif alias in aliases: - print("setup.py alias", format_alias(alias, aliases)) - return - else: - print("No alias definition found for %r" % alias) - return - else: - alias = self.args[0] - command = ' '.join(map(shquote, self.args[1:])) - - edit_config(self.filename, {'aliases': {alias: command}}, self.dry_run) - - -def format_alias(name, aliases): - source, command = aliases[name] - if source == config_file('global'): - source = '--global-config ' - elif source == config_file('user'): - source = '--user-config ' - elif source == config_file('local'): - source = '' - else: - source = '--filename=%r' % source - return source + name + ' ' + command diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/bdist_egg.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/bdist_egg.py deleted file mode 100644 index 87dce88..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/bdist_egg.py +++ /dev/null @@ -1,470 +0,0 @@ -"""setuptools.command.bdist_egg - -Build .egg distributions""" - -from distutils.errors import DistutilsSetupError -from distutils.dir_util import remove_tree, mkpath -from distutils import log -from types import CodeType -import sys -import os -import marshal -import textwrap - -from pkg_resources import get_build_platform, Distribution, ensure_directory -from pkg_resources import EntryPoint -from setuptools.compat import basestring -from setuptools.extension import Library -from setuptools import Command - -try: - # Python 2.7 or >=3.2 - from sysconfig import get_path, get_python_version - - def _get_purelib(): - return get_path("purelib") -except ImportError: - from distutils.sysconfig import get_python_lib, get_python_version - - def _get_purelib(): - return get_python_lib(False) - - -def strip_module(filename): - if '.' in filename: - filename = os.path.splitext(filename)[0] - if filename.endswith('module'): - filename = filename[:-6] - return filename - - -def write_stub(resource, pyfile): - _stub_template = textwrap.dedent(""" - def __bootstrap__(): - global __bootstrap__, __loader__, __file__ - import sys, pkg_resources, imp - __file__ = pkg_resources.resource_filename(__name__, %r) - __loader__ = None; del __bootstrap__, __loader__ - imp.load_dynamic(__name__,__file__) - __bootstrap__() - """).lstrip() - with open(pyfile, 'w') as f: - f.write(_stub_template % resource) - - -class bdist_egg(Command): - description = "create an \"egg\" distribution" - - user_options = [ - ('bdist-dir=', 'b', - "temporary directory for creating the distribution"), - ('plat-name=', 'p', "platform name to embed in generated filenames " - "(default: %s)" % get_build_platform()), - ('exclude-source-files', None, - "remove all .py files from the generated egg"), - ('keep-temp', 'k', - "keep the pseudo-installation tree around after " + - "creating the distribution archive"), - ('dist-dir=', 'd', - "directory to put final built distributions in"), - ('skip-build', None, - "skip rebuilding everything (for testing/debugging)"), - ] - - boolean_options = [ - 'keep-temp', 'skip-build', 'exclude-source-files' - ] - - def initialize_options(self): - self.bdist_dir = None - self.plat_name = None - self.keep_temp = 0 - self.dist_dir = None - self.skip_build = 0 - self.egg_output = None - self.exclude_source_files = None - - def finalize_options(self): - ei_cmd = self.ei_cmd = self.get_finalized_command("egg_info") - self.egg_info = ei_cmd.egg_info - - if self.bdist_dir is None: - bdist_base = self.get_finalized_command('bdist').bdist_base - self.bdist_dir = os.path.join(bdist_base, 'egg') - - if self.plat_name is None: - self.plat_name = get_build_platform() - - self.set_undefined_options('bdist', ('dist_dir', 'dist_dir')) - - if self.egg_output is None: - - # Compute filename of the output egg - basename = Distribution( - None, None, ei_cmd.egg_name, ei_cmd.egg_version, - get_python_version(), - self.distribution.has_ext_modules() and self.plat_name - ).egg_name() - - self.egg_output = os.path.join(self.dist_dir, basename + '.egg') - - def do_install_data(self): - # Hack for packages that install data to install's --install-lib - self.get_finalized_command('install').install_lib = self.bdist_dir - - site_packages = os.path.normcase(os.path.realpath(_get_purelib())) - old, self.distribution.data_files = self.distribution.data_files, [] - - for item in old: - if isinstance(item, tuple) and len(item) == 2: - if os.path.isabs(item[0]): - realpath = os.path.realpath(item[0]) - normalized = os.path.normcase(realpath) - if normalized == site_packages or normalized.startswith( - site_packages + os.sep - ): - item = realpath[len(site_packages) + 1:], item[1] - # XXX else: raise ??? - self.distribution.data_files.append(item) - - try: - log.info("installing package data to %s" % self.bdist_dir) - self.call_command('install_data', force=0, root=None) - finally: - self.distribution.data_files = old - - def get_outputs(self): - return [self.egg_output] - - def call_command(self, cmdname, **kw): - """Invoke reinitialized command `cmdname` with keyword args""" - for dirname in INSTALL_DIRECTORY_ATTRS: - kw.setdefault(dirname, self.bdist_dir) - kw.setdefault('skip_build', self.skip_build) - kw.setdefault('dry_run', self.dry_run) - cmd = self.reinitialize_command(cmdname, **kw) - self.run_command(cmdname) - return cmd - - def run(self): - # Generate metadata first - self.run_command("egg_info") - # We run install_lib before install_data, because some data hacks - # pull their data path from the install_lib command. - log.info("installing library code to %s" % self.bdist_dir) - instcmd = self.get_finalized_command('install') - old_root = instcmd.root - instcmd.root = None - if self.distribution.has_c_libraries() and not self.skip_build: - self.run_command('build_clib') - cmd = self.call_command('install_lib', warn_dir=0) - instcmd.root = old_root - - all_outputs, ext_outputs = self.get_ext_outputs() - self.stubs = [] - to_compile = [] - for (p, ext_name) in enumerate(ext_outputs): - filename, ext = os.path.splitext(ext_name) - pyfile = os.path.join(self.bdist_dir, strip_module(filename) + - '.py') - self.stubs.append(pyfile) - log.info("creating stub loader for %s" % ext_name) - if not self.dry_run: - write_stub(os.path.basename(ext_name), pyfile) - to_compile.append(pyfile) - ext_outputs[p] = ext_name.replace(os.sep, '/') - - if to_compile: - cmd.byte_compile(to_compile) - if self.distribution.data_files: - self.do_install_data() - - # Make the EGG-INFO directory - archive_root = self.bdist_dir - egg_info = os.path.join(archive_root, 'EGG-INFO') - self.mkpath(egg_info) - if self.distribution.scripts: - script_dir = os.path.join(egg_info, 'scripts') - log.info("installing scripts to %s" % script_dir) - self.call_command('install_scripts', install_dir=script_dir, - no_ep=1) - - self.copy_metadata_to(egg_info) - native_libs = os.path.join(egg_info, "native_libs.txt") - if all_outputs: - log.info("writing %s" % native_libs) - if not self.dry_run: - ensure_directory(native_libs) - libs_file = open(native_libs, 'wt') - libs_file.write('\n'.join(all_outputs)) - libs_file.write('\n') - libs_file.close() - elif os.path.isfile(native_libs): - log.info("removing %s" % native_libs) - if not self.dry_run: - os.unlink(native_libs) - - write_safety_flag( - os.path.join(archive_root, 'EGG-INFO'), self.zip_safe() - ) - - if os.path.exists(os.path.join(self.egg_info, 'depends.txt')): - log.warn( - "WARNING: 'depends.txt' will not be used by setuptools 0.6!\n" - "Use the install_requires/extras_require setup() args instead." - ) - - if self.exclude_source_files: - self.zap_pyfiles() - - # Make the archive - make_zipfile(self.egg_output, archive_root, verbose=self.verbose, - dry_run=self.dry_run, mode=self.gen_header()) - if not self.keep_temp: - remove_tree(self.bdist_dir, dry_run=self.dry_run) - - # Add to 'Distribution.dist_files' so that the "upload" command works - getattr(self.distribution, 'dist_files', []).append( - ('bdist_egg', get_python_version(), self.egg_output)) - - def zap_pyfiles(self): - log.info("Removing .py files from temporary directory") - for base, dirs, files in walk_egg(self.bdist_dir): - for name in files: - if name.endswith('.py'): - path = os.path.join(base, name) - log.debug("Deleting %s", path) - os.unlink(path) - - def zip_safe(self): - safe = getattr(self.distribution, 'zip_safe', None) - if safe is not None: - return safe - log.warn("zip_safe flag not set; analyzing archive contents...") - return analyze_egg(self.bdist_dir, self.stubs) - - def gen_header(self): - epm = EntryPoint.parse_map(self.distribution.entry_points or '') - ep = epm.get('setuptools.installation', {}).get('eggsecutable') - if ep is None: - return 'w' # not an eggsecutable, do it the usual way. - - if not ep.attrs or ep.extras: - raise DistutilsSetupError( - "eggsecutable entry point (%r) cannot have 'extras' " - "or refer to a module" % (ep,) - ) - - pyver = sys.version[:3] - pkg = ep.module_name - full = '.'.join(ep.attrs) - base = ep.attrs[0] - basename = os.path.basename(self.egg_output) - - header = ( - "#!/bin/sh\n" - 'if [ `basename $0` = "%(basename)s" ]\n' - 'then exec python%(pyver)s -c "' - "import sys, os; sys.path.insert(0, os.path.abspath('$0')); " - "from %(pkg)s import %(base)s; sys.exit(%(full)s())" - '" "$@"\n' - 'else\n' - ' echo $0 is not the correct name for this egg file.\n' - ' echo Please rename it back to %(basename)s and try again.\n' - ' exec false\n' - 'fi\n' - ) % locals() - - if not self.dry_run: - mkpath(os.path.dirname(self.egg_output), dry_run=self.dry_run) - f = open(self.egg_output, 'w') - f.write(header) - f.close() - return 'a' - - def copy_metadata_to(self, target_dir): - "Copy metadata (egg info) to the target_dir" - # normalize the path (so that a forward-slash in egg_info will - # match using startswith below) - norm_egg_info = os.path.normpath(self.egg_info) - prefix = os.path.join(norm_egg_info, '') - for path in self.ei_cmd.filelist.files: - if path.startswith(prefix): - target = os.path.join(target_dir, path[len(prefix):]) - ensure_directory(target) - self.copy_file(path, target) - - def get_ext_outputs(self): - """Get a list of relative paths to C extensions in the output distro""" - - all_outputs = [] - ext_outputs = [] - - paths = {self.bdist_dir: ''} - for base, dirs, files in os.walk(self.bdist_dir): - for filename in files: - if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS: - all_outputs.append(paths[base] + filename) - for filename in dirs: - paths[os.path.join(base, filename)] = (paths[base] + - filename + '/') - - if self.distribution.has_ext_modules(): - build_cmd = self.get_finalized_command('build_ext') - for ext in build_cmd.extensions: - if isinstance(ext, Library): - continue - fullname = build_cmd.get_ext_fullname(ext.name) - filename = build_cmd.get_ext_filename(fullname) - if not os.path.basename(filename).startswith('dl-'): - if os.path.exists(os.path.join(self.bdist_dir, filename)): - ext_outputs.append(filename) - - return all_outputs, ext_outputs - - -NATIVE_EXTENSIONS = dict.fromkeys('.dll .so .dylib .pyd'.split()) - - -def walk_egg(egg_dir): - """Walk an unpacked egg's contents, skipping the metadata directory""" - walker = os.walk(egg_dir) - base, dirs, files = next(walker) - if 'EGG-INFO' in dirs: - dirs.remove('EGG-INFO') - yield base, dirs, files - for bdf in walker: - yield bdf - - -def analyze_egg(egg_dir, stubs): - # check for existing flag in EGG-INFO - for flag, fn in safety_flags.items(): - if os.path.exists(os.path.join(egg_dir, 'EGG-INFO', fn)): - return flag - if not can_scan(): - return False - safe = True - for base, dirs, files in walk_egg(egg_dir): - for name in files: - if name.endswith('.py') or name.endswith('.pyw'): - continue - elif name.endswith('.pyc') or name.endswith('.pyo'): - # always scan, even if we already know we're not safe - safe = scan_module(egg_dir, base, name, stubs) and safe - return safe - - -def write_safety_flag(egg_dir, safe): - # Write or remove zip safety flag file(s) - for flag, fn in safety_flags.items(): - fn = os.path.join(egg_dir, fn) - if os.path.exists(fn): - if safe is None or bool(safe) != flag: - os.unlink(fn) - elif safe is not None and bool(safe) == flag: - f = open(fn, 'wt') - f.write('\n') - f.close() - - -safety_flags = { - True: 'zip-safe', - False: 'not-zip-safe', -} - - -def scan_module(egg_dir, base, name, stubs): - """Check whether module possibly uses unsafe-for-zipfile stuff""" - - filename = os.path.join(base, name) - if filename[:-1] in stubs: - return True # Extension module - pkg = base[len(egg_dir) + 1:].replace(os.sep, '.') - module = pkg + (pkg and '.' or '') + os.path.splitext(name)[0] - if sys.version_info < (3, 3): - skip = 8 # skip magic & date - else: - skip = 12 # skip magic & date & file size - f = open(filename, 'rb') - f.read(skip) - code = marshal.load(f) - f.close() - safe = True - symbols = dict.fromkeys(iter_symbols(code)) - for bad in ['__file__', '__path__']: - if bad in symbols: - log.warn("%s: module references %s", module, bad) - safe = False - if 'inspect' in symbols: - for bad in [ - 'getsource', 'getabsfile', 'getsourcefile', 'getfile' - 'getsourcelines', 'findsource', 'getcomments', 'getframeinfo', - 'getinnerframes', 'getouterframes', 'stack', 'trace' - ]: - if bad in symbols: - log.warn("%s: module MAY be using inspect.%s", module, bad) - safe = False - return safe - - -def iter_symbols(code): - """Yield names and strings used by `code` and its nested code objects""" - for name in code.co_names: - yield name - for const in code.co_consts: - if isinstance(const, basestring): - yield const - elif isinstance(const, CodeType): - for name in iter_symbols(const): - yield name - - -def can_scan(): - if not sys.platform.startswith('java') and sys.platform != 'cli': - # CPython, PyPy, etc. - return True - log.warn("Unable to analyze compiled code on this platform.") - log.warn("Please ask the author to include a 'zip_safe'" - " setting (either True or False) in the package's setup.py") - -# Attribute names of options for commands that might need to be convinced to -# install to the egg build directory - -INSTALL_DIRECTORY_ATTRS = [ - 'install_lib', 'install_dir', 'install_data', 'install_base' -] - - -def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=True, - mode='w'): - """Create a zip file from all the files under 'base_dir'. The output - zip file will be named 'base_dir' + ".zip". Uses either the "zipfile" - Python module (if available) or the InfoZIP "zip" utility (if installed - and found on the default search path). If neither tool is available, - raises DistutilsExecError. Returns the name of the output zip file. - """ - import zipfile - - mkpath(os.path.dirname(zip_filename), dry_run=dry_run) - log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir) - - def visit(z, dirname, names): - for name in names: - path = os.path.normpath(os.path.join(dirname, name)) - if os.path.isfile(path): - p = path[len(base_dir) + 1:] - if not dry_run: - z.write(path, p) - log.debug("adding '%s'" % p) - - compression = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED - if not dry_run: - z = zipfile.ZipFile(zip_filename, mode, compression=compression) - for dirname, dirs, files in os.walk(base_dir): - visit(z, dirname, files) - z.close() - else: - for dirname, dirs, files in os.walk(base_dir): - visit(None, dirname, files) - return zip_filename diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/bdist_rpm.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/bdist_rpm.py deleted file mode 100644 index 7073092..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/bdist_rpm.py +++ /dev/null @@ -1,43 +0,0 @@ -import distutils.command.bdist_rpm as orig - - -class bdist_rpm(orig.bdist_rpm): - """ - Override the default bdist_rpm behavior to do the following: - - 1. Run egg_info to ensure the name and version are properly calculated. - 2. Always run 'install' using --single-version-externally-managed to - disable eggs in RPM distributions. - 3. Replace dash with underscore in the version numbers for better RPM - compatibility. - """ - - def run(self): - # ensure distro name is up-to-date - self.run_command('egg_info') - - orig.bdist_rpm.run(self) - - def _make_spec_file(self): - version = self.distribution.get_version() - rpmversion = version.replace('-', '_') - spec = orig.bdist_rpm._make_spec_file(self) - line23 = '%define version ' + version - line24 = '%define version ' + rpmversion - spec = [ - line.replace( - "Source0: %{name}-%{version}.tar", - "Source0: %{name}-%{unmangled_version}.tar" - ).replace( - "setup.py install ", - "setup.py install --single-version-externally-managed " - ).replace( - "%setup", - "%setup -n %{name}-%{unmangled_version}" - ).replace(line23, line24) - for line in spec - ] - insert_loc = spec.index(line24) + 1 - unmangled_version = "%define unmangled_version " + version - spec.insert(insert_loc, unmangled_version) - return spec diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/bdist_wininst.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/bdist_wininst.py deleted file mode 100644 index 073de97..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/bdist_wininst.py +++ /dev/null @@ -1,21 +0,0 @@ -import distutils.command.bdist_wininst as orig - - -class bdist_wininst(orig.bdist_wininst): - def reinitialize_command(self, command, reinit_subcommands=0): - """ - Supplement reinitialize_command to work around - http://bugs.python.org/issue20819 - """ - cmd = self.distribution.reinitialize_command( - command, reinit_subcommands) - if command in ('install', 'install_lib'): - cmd.install_lib = None - return cmd - - def run(self): - self._is_running = True - try: - orig.bdist_wininst.run(self) - finally: - self._is_running = False diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/build_ext.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/build_ext.py deleted file mode 100644 index e4b2c59..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/build_ext.py +++ /dev/null @@ -1,305 +0,0 @@ -from distutils.command.build_ext import build_ext as _du_build_ext -from distutils.file_util import copy_file -from distutils.ccompiler import new_compiler -from distutils.sysconfig import customize_compiler -from distutils.errors import DistutilsError -from distutils import log -import os -import sys -import itertools - -from setuptools.extension import Library - -try: - # Attempt to use Pyrex for building extensions, if available - from Pyrex.Distutils.build_ext import build_ext as _build_ext -except ImportError: - _build_ext = _du_build_ext - -try: - # Python 2.7 or >=3.2 - from sysconfig import _CONFIG_VARS -except ImportError: - from distutils.sysconfig import get_config_var - - get_config_var("LDSHARED") # make sure _config_vars is initialized - del get_config_var - from distutils.sysconfig import _config_vars as _CONFIG_VARS - -have_rtld = False -use_stubs = False -libtype = 'shared' - -if sys.platform == "darwin": - use_stubs = True -elif os.name != 'nt': - try: - import dl - use_stubs = have_rtld = hasattr(dl, 'RTLD_NOW') - except ImportError: - pass - - -if_dl = lambda s: s if have_rtld else '' - - -class build_ext(_build_ext): - def run(self): - """Build extensions in build directory, then copy if --inplace""" - old_inplace, self.inplace = self.inplace, 0 - _build_ext.run(self) - self.inplace = old_inplace - if old_inplace: - self.copy_extensions_to_source() - - def copy_extensions_to_source(self): - build_py = self.get_finalized_command('build_py') - for ext in self.extensions: - fullname = self.get_ext_fullname(ext.name) - filename = self.get_ext_filename(fullname) - modpath = fullname.split('.') - package = '.'.join(modpath[:-1]) - package_dir = build_py.get_package_dir(package) - dest_filename = os.path.join(package_dir, - os.path.basename(filename)) - src_filename = os.path.join(self.build_lib, filename) - - # Always copy, even if source is older than destination, to ensure - # that the right extensions for the current Python/platform are - # used. - copy_file( - src_filename, dest_filename, verbose=self.verbose, - dry_run=self.dry_run - ) - if ext._needs_stub: - self.write_stub(package_dir or os.curdir, ext, True) - - if _build_ext is not _du_build_ext and not hasattr(_build_ext, - 'pyrex_sources'): - # Workaround for problems using some Pyrex versions w/SWIG and/or 2.4 - def swig_sources(self, sources, *otherargs): - # first do any Pyrex processing - sources = _build_ext.swig_sources(self, sources) or sources - # Then do any actual SWIG stuff on the remainder - return _du_build_ext.swig_sources(self, sources, *otherargs) - - def get_ext_filename(self, fullname): - filename = _build_ext.get_ext_filename(self, fullname) - if fullname in self.ext_map: - ext = self.ext_map[fullname] - if isinstance(ext, Library): - fn, ext = os.path.splitext(filename) - return self.shlib_compiler.library_filename(fn, libtype) - elif use_stubs and ext._links_to_dynamic: - d, fn = os.path.split(filename) - return os.path.join(d, 'dl-' + fn) - return filename - - def initialize_options(self): - _build_ext.initialize_options(self) - self.shlib_compiler = None - self.shlibs = [] - self.ext_map = {} - - def finalize_options(self): - _build_ext.finalize_options(self) - self.extensions = self.extensions or [] - self.check_extensions_list(self.extensions) - self.shlibs = [ext for ext in self.extensions - if isinstance(ext, Library)] - if self.shlibs: - self.setup_shlib_compiler() - for ext in self.extensions: - ext._full_name = self.get_ext_fullname(ext.name) - for ext in self.extensions: - fullname = ext._full_name - self.ext_map[fullname] = ext - - # distutils 3.1 will also ask for module names - # XXX what to do with conflicts? - self.ext_map[fullname.split('.')[-1]] = ext - - ltd = self.shlibs and self.links_to_dynamic(ext) or False - ns = ltd and use_stubs and not isinstance(ext, Library) - ext._links_to_dynamic = ltd - ext._needs_stub = ns - filename = ext._file_name = self.get_ext_filename(fullname) - libdir = os.path.dirname(os.path.join(self.build_lib, filename)) - if ltd and libdir not in ext.library_dirs: - ext.library_dirs.append(libdir) - if ltd and use_stubs and os.curdir not in ext.runtime_library_dirs: - ext.runtime_library_dirs.append(os.curdir) - - def setup_shlib_compiler(self): - compiler = self.shlib_compiler = new_compiler( - compiler=self.compiler, dry_run=self.dry_run, force=self.force - ) - if sys.platform == "darwin": - tmp = _CONFIG_VARS.copy() - try: - # XXX Help! I don't have any idea whether these are right... - _CONFIG_VARS['LDSHARED'] = ( - "gcc -Wl,-x -dynamiclib -undefined dynamic_lookup") - _CONFIG_VARS['CCSHARED'] = " -dynamiclib" - _CONFIG_VARS['SO'] = ".dylib" - customize_compiler(compiler) - finally: - _CONFIG_VARS.clear() - _CONFIG_VARS.update(tmp) - else: - customize_compiler(compiler) - - if self.include_dirs is not None: - compiler.set_include_dirs(self.include_dirs) - if self.define is not None: - # 'define' option is a list of (name,value) tuples - for (name, value) in self.define: - compiler.define_macro(name, value) - if self.undef is not None: - for macro in self.undef: - compiler.undefine_macro(macro) - if self.libraries is not None: - compiler.set_libraries(self.libraries) - if self.library_dirs is not None: - compiler.set_library_dirs(self.library_dirs) - if self.rpath is not None: - compiler.set_runtime_library_dirs(self.rpath) - if self.link_objects is not None: - compiler.set_link_objects(self.link_objects) - - # hack so distutils' build_extension() builds a library instead - compiler.link_shared_object = link_shared_object.__get__(compiler) - - def get_export_symbols(self, ext): - if isinstance(ext, Library): - return ext.export_symbols - return _build_ext.get_export_symbols(self, ext) - - def build_extension(self, ext): - _compiler = self.compiler - try: - if isinstance(ext, Library): - self.compiler = self.shlib_compiler - _build_ext.build_extension(self, ext) - if ext._needs_stub: - cmd = self.get_finalized_command('build_py').build_lib - self.write_stub(cmd, ext) - finally: - self.compiler = _compiler - - def links_to_dynamic(self, ext): - """Return true if 'ext' links to a dynamic lib in the same package""" - # XXX this should check to ensure the lib is actually being built - # XXX as dynamic, and not just using a locally-found version or a - # XXX static-compiled version - libnames = dict.fromkeys([lib._full_name for lib in self.shlibs]) - pkg = '.'.join(ext._full_name.split('.')[:-1] + ['']) - return any(pkg + libname in libnames for libname in ext.libraries) - - def get_outputs(self): - return _build_ext.get_outputs(self) + self.__get_stubs_outputs() - - def __get_stubs_outputs(self): - # assemble the base name for each extension that needs a stub - ns_ext_bases = ( - os.path.join(self.build_lib, *ext._full_name.split('.')) - for ext in self.extensions - if ext._needs_stub - ) - # pair each base with the extension - pairs = itertools.product(ns_ext_bases, self.__get_output_extensions()) - return list(base + fnext for base, fnext in pairs) - - def __get_output_extensions(self): - yield '.py' - yield '.pyc' - if self.get_finalized_command('build_py').optimize: - yield '.pyo' - - def write_stub(self, output_dir, ext, compile=False): - log.info("writing stub loader for %s to %s", ext._full_name, - output_dir) - stub_file = (os.path.join(output_dir, *ext._full_name.split('.')) + - '.py') - if compile and os.path.exists(stub_file): - raise DistutilsError(stub_file + " already exists! Please delete.") - if not self.dry_run: - f = open(stub_file, 'w') - f.write( - '\n'.join([ - "def __bootstrap__():", - " global __bootstrap__, __file__, __loader__", - " import sys, os, pkg_resources, imp" + if_dl(", dl"), - " __file__ = pkg_resources.resource_filename" - "(__name__,%r)" - % os.path.basename(ext._file_name), - " del __bootstrap__", - " if '__loader__' in globals():", - " del __loader__", - if_dl(" old_flags = sys.getdlopenflags()"), - " old_dir = os.getcwd()", - " try:", - " os.chdir(os.path.dirname(__file__))", - if_dl(" sys.setdlopenflags(dl.RTLD_NOW)"), - " imp.load_dynamic(__name__,__file__)", - " finally:", - if_dl(" sys.setdlopenflags(old_flags)"), - " os.chdir(old_dir)", - "__bootstrap__()", - "" # terminal \n - ]) - ) - f.close() - if compile: - from distutils.util import byte_compile - - byte_compile([stub_file], optimize=0, - force=True, dry_run=self.dry_run) - optimize = self.get_finalized_command('install_lib').optimize - if optimize > 0: - byte_compile([stub_file], optimize=optimize, - force=True, dry_run=self.dry_run) - if os.path.exists(stub_file) and not self.dry_run: - os.unlink(stub_file) - - -if use_stubs or os.name == 'nt': - # Build shared libraries - # - def link_shared_object( - self, objects, output_libname, output_dir=None, libraries=None, - library_dirs=None, runtime_library_dirs=None, export_symbols=None, - debug=0, extra_preargs=None, extra_postargs=None, build_temp=None, - target_lang=None): - self.link( - self.SHARED_LIBRARY, objects, output_libname, - output_dir, libraries, library_dirs, runtime_library_dirs, - export_symbols, debug, extra_preargs, extra_postargs, - build_temp, target_lang - ) -else: - # Build static libraries everywhere else - libtype = 'static' - - def link_shared_object( - self, objects, output_libname, output_dir=None, libraries=None, - library_dirs=None, runtime_library_dirs=None, export_symbols=None, - debug=0, extra_preargs=None, extra_postargs=None, build_temp=None, - target_lang=None): - # XXX we need to either disallow these attrs on Library instances, - # or warn/abort here if set, or something... - # libraries=None, library_dirs=None, runtime_library_dirs=None, - # export_symbols=None, extra_preargs=None, extra_postargs=None, - # build_temp=None - - assert output_dir is None # distutils build_ext doesn't pass this - output_dir, filename = os.path.split(output_libname) - basename, ext = os.path.splitext(filename) - if self.library_filename("x").startswith('lib'): - # strip 'lib' prefix; this is kludgy if some platform uses - # a different prefix - basename = basename[3:] - - self.create_static_lib( - objects, basename, output_dir, debug, target_lang - ) diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/build_py.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/build_py.py deleted file mode 100644 index a873d54..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/build_py.py +++ /dev/null @@ -1,215 +0,0 @@ -from glob import glob -from distutils.util import convert_path -import distutils.command.build_py as orig -import os -import sys -import fnmatch -import textwrap - -try: - from setuptools.lib2to3_ex import Mixin2to3 -except ImportError: - class Mixin2to3: - def run_2to3(self, files, doctests=True): - "do nothing" - - -class build_py(orig.build_py, Mixin2to3): - """Enhanced 'build_py' command that includes data files with packages - - The data files are specified via a 'package_data' argument to 'setup()'. - See 'setuptools.dist.Distribution' for more details. - - Also, this version of the 'build_py' command allows you to specify both - 'py_modules' and 'packages' in the same setup operation. - """ - - def finalize_options(self): - orig.build_py.finalize_options(self) - self.package_data = self.distribution.package_data - self.exclude_package_data = (self.distribution.exclude_package_data or - {}) - if 'data_files' in self.__dict__: - del self.__dict__['data_files'] - self.__updated_files = [] - self.__doctests_2to3 = [] - - def run(self): - """Build modules, packages, and copy data files to build directory""" - if not self.py_modules and not self.packages: - return - - if self.py_modules: - self.build_modules() - - if self.packages: - self.build_packages() - self.build_package_data() - - self.run_2to3(self.__updated_files, False) - self.run_2to3(self.__updated_files, True) - self.run_2to3(self.__doctests_2to3, True) - - # Only compile actual .py files, using our base class' idea of what our - # output files are. - self.byte_compile(orig.build_py.get_outputs(self, include_bytecode=0)) - - def __getattr__(self, attr): - if attr == 'data_files': # lazily compute data files - self.data_files = files = self._get_data_files() - return files - return orig.build_py.__getattr__(self, attr) - - def build_module(self, module, module_file, package): - outfile, copied = orig.build_py.build_module(self, module, module_file, - package) - if copied: - self.__updated_files.append(outfile) - return outfile, copied - - def _get_data_files(self): - """Generate list of '(package,src_dir,build_dir,filenames)' tuples""" - self.analyze_manifest() - data = [] - for package in self.packages or (): - # Locate package source directory - src_dir = self.get_package_dir(package) - - # Compute package build directory - build_dir = os.path.join(*([self.build_lib] + package.split('.'))) - - # Length of path to strip from found files - plen = len(src_dir) + 1 - - # Strip directory from globbed filenames - filenames = [ - file[plen:] for file in self.find_data_files(package, src_dir) - ] - data.append((package, src_dir, build_dir, filenames)) - return data - - def find_data_files(self, package, src_dir): - """Return filenames for package's data files in 'src_dir'""" - globs = (self.package_data.get('', []) - + self.package_data.get(package, [])) - files = self.manifest_files.get(package, [])[:] - for pattern in globs: - # Each pattern has to be converted to a platform-specific path - files.extend(glob(os.path.join(src_dir, convert_path(pattern)))) - return self.exclude_data_files(package, src_dir, files) - - def build_package_data(self): - """Copy data files into build directory""" - for package, src_dir, build_dir, filenames in self.data_files: - for filename in filenames: - target = os.path.join(build_dir, filename) - self.mkpath(os.path.dirname(target)) - srcfile = os.path.join(src_dir, filename) - outf, copied = self.copy_file(srcfile, target) - srcfile = os.path.abspath(srcfile) - if (copied and - srcfile in self.distribution.convert_2to3_doctests): - self.__doctests_2to3.append(outf) - - def analyze_manifest(self): - self.manifest_files = mf = {} - if not self.distribution.include_package_data: - return - src_dirs = {} - for package in self.packages or (): - # Locate package source directory - src_dirs[assert_relative(self.get_package_dir(package))] = package - - self.run_command('egg_info') - ei_cmd = self.get_finalized_command('egg_info') - for path in ei_cmd.filelist.files: - d, f = os.path.split(assert_relative(path)) - prev = None - oldf = f - while d and d != prev and d not in src_dirs: - prev = d - d, df = os.path.split(d) - f = os.path.join(df, f) - if d in src_dirs: - if path.endswith('.py') and f == oldf: - continue # it's a module, not data - mf.setdefault(src_dirs[d], []).append(path) - - def get_data_files(self): - pass # Lazily compute data files in _get_data_files() function. - - def check_package(self, package, package_dir): - """Check namespace packages' __init__ for declare_namespace""" - try: - return self.packages_checked[package] - except KeyError: - pass - - init_py = orig.build_py.check_package(self, package, package_dir) - self.packages_checked[package] = init_py - - if not init_py or not self.distribution.namespace_packages: - return init_py - - for pkg in self.distribution.namespace_packages: - if pkg == package or pkg.startswith(package + '.'): - break - else: - return init_py - - f = open(init_py, 'rbU') - if 'declare_namespace'.encode() not in f.read(): - from distutils.errors import DistutilsError - - raise DistutilsError( - "Namespace package problem: %s is a namespace package, but " - "its\n__init__.py does not call declare_namespace()! Please " - 'fix it.\n(See the setuptools manual under ' - '"Namespace Packages" for details.)\n"' % (package,) - ) - f.close() - return init_py - - def initialize_options(self): - self.packages_checked = {} - orig.build_py.initialize_options(self) - - def get_package_dir(self, package): - res = orig.build_py.get_package_dir(self, package) - if self.distribution.src_root is not None: - return os.path.join(self.distribution.src_root, res) - return res - - def exclude_data_files(self, package, src_dir, files): - """Filter filenames for package's data files in 'src_dir'""" - globs = (self.exclude_package_data.get('', []) - + self.exclude_package_data.get(package, [])) - bad = [] - for pattern in globs: - bad.extend( - fnmatch.filter( - files, os.path.join(src_dir, convert_path(pattern)) - ) - ) - bad = dict.fromkeys(bad) - seen = {} - return [ - f for f in files if f not in bad - and f not in seen and seen.setdefault(f, 1) # ditch dupes - ] - - -def assert_relative(path): - if not os.path.isabs(path): - return path - from distutils.errors import DistutilsSetupError - - msg = textwrap.dedent(""" - Error: setup script specifies an absolute path: - - %s - - setup() arguments must *always* be /-separated paths relative to the - setup.py directory, *never* absolute paths. - """).lstrip() % path - raise DistutilsSetupError(msg) diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/develop.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/develop.py deleted file mode 100644 index 368b64f..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/develop.py +++ /dev/null @@ -1,169 +0,0 @@ -from distutils.util import convert_path -from distutils import log -from distutils.errors import DistutilsError, DistutilsOptionError -import os -import glob - -from pkg_resources import Distribution, PathMetadata, normalize_path -from setuptools.command.easy_install import easy_install -from setuptools.compat import PY3 -import setuptools - - -class develop(easy_install): - """Set up package for development""" - - description = "install package in 'development mode'" - - user_options = easy_install.user_options + [ - ("uninstall", "u", "Uninstall this source package"), - ("egg-path=", None, "Set the path to be used in the .egg-link file"), - ] - - boolean_options = easy_install.boolean_options + ['uninstall'] - - command_consumes_arguments = False # override base - - def run(self): - if self.uninstall: - self.multi_version = True - self.uninstall_link() - else: - self.install_for_development() - self.warn_deprecated_options() - - def initialize_options(self): - self.uninstall = None - self.egg_path = None - easy_install.initialize_options(self) - self.setup_path = None - self.always_copy_from = '.' # always copy eggs installed in curdir - - def finalize_options(self): - ei = self.get_finalized_command("egg_info") - if ei.broken_egg_info: - template = "Please rename %r to %r before using 'develop'" - args = ei.egg_info, ei.broken_egg_info - raise DistutilsError(template % args) - self.args = [ei.egg_name] - - easy_install.finalize_options(self) - self.expand_basedirs() - self.expand_dirs() - # pick up setup-dir .egg files only: no .egg-info - self.package_index.scan(glob.glob('*.egg')) - - self.egg_link = os.path.join(self.install_dir, ei.egg_name + - '.egg-link') - self.egg_base = ei.egg_base - if self.egg_path is None: - self.egg_path = os.path.abspath(ei.egg_base) - - target = normalize_path(self.egg_base) - egg_path = normalize_path(os.path.join(self.install_dir, - self.egg_path)) - if egg_path != target: - raise DistutilsOptionError( - "--egg-path must be a relative path from the install" - " directory to " + target - ) - - # Make a distribution for the package's source - self.dist = Distribution( - target, - PathMetadata(target, os.path.abspath(ei.egg_info)), - project_name=ei.egg_name - ) - - p = self.egg_base.replace(os.sep, '/') - if p != os.curdir: - p = '../' * (p.count('/') + 1) - self.setup_path = p - p = normalize_path(os.path.join(self.install_dir, self.egg_path, p)) - if p != normalize_path(os.curdir): - raise DistutilsOptionError( - "Can't get a consistent path to setup script from" - " installation directory", p, normalize_path(os.curdir)) - - def install_for_development(self): - if PY3 and getattr(self.distribution, 'use_2to3', False): - # If we run 2to3 we can not do this inplace: - - # Ensure metadata is up-to-date - self.reinitialize_command('build_py', inplace=0) - self.run_command('build_py') - bpy_cmd = self.get_finalized_command("build_py") - build_path = normalize_path(bpy_cmd.build_lib) - - # Build extensions - self.reinitialize_command('egg_info', egg_base=build_path) - self.run_command('egg_info') - - self.reinitialize_command('build_ext', inplace=0) - self.run_command('build_ext') - - # Fixup egg-link and easy-install.pth - ei_cmd = self.get_finalized_command("egg_info") - self.egg_path = build_path - self.dist.location = build_path - # XXX - self.dist._provider = PathMetadata(build_path, ei_cmd.egg_info) - else: - # Without 2to3 inplace works fine: - self.run_command('egg_info') - - # Build extensions in-place - self.reinitialize_command('build_ext', inplace=1) - self.run_command('build_ext') - - self.install_site_py() # ensure that target dir is site-safe - if setuptools.bootstrap_install_from: - self.easy_install(setuptools.bootstrap_install_from) - setuptools.bootstrap_install_from = None - - # create an .egg-link in the installation dir, pointing to our egg - log.info("Creating %s (link to %s)", self.egg_link, self.egg_base) - if not self.dry_run: - f = open(self.egg_link, "w") - f.write(self.egg_path + "\n" + self.setup_path) - f.close() - # postprocess the installed distro, fixing up .pth, installing scripts, - # and handling requirements - self.process_distribution(None, self.dist, not self.no_deps) - - def uninstall_link(self): - if os.path.exists(self.egg_link): - log.info("Removing %s (link to %s)", self.egg_link, self.egg_base) - egg_link_file = open(self.egg_link) - contents = [line.rstrip() for line in egg_link_file] - egg_link_file.close() - if contents not in ([self.egg_path], - [self.egg_path, self.setup_path]): - log.warn("Link points to %s: uninstall aborted", contents) - return - if not self.dry_run: - os.unlink(self.egg_link) - if not self.dry_run: - self.update_pth(self.dist) # remove any .pth link to us - if self.distribution.scripts: - # XXX should also check for entry point scripts! - log.warn("Note: you must uninstall or replace scripts manually!") - - def install_egg_scripts(self, dist): - if dist is not self.dist: - # Installing a dependency, so fall back to normal behavior - return easy_install.install_egg_scripts(self, dist) - - # create wrapper scripts in the script dir, pointing to dist.scripts - - # new-style... - self.install_wrapper_scripts(dist) - - # ...and old-style - for script_name in self.distribution.scripts or []: - script_path = os.path.abspath(convert_path(script_name)) - script_name = os.path.basename(script_path) - f = open(script_path, 'rU') - script_text = f.read() - f.close() - self.install_script(dist, script_name, script_text, script_path) diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/easy_install.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/easy_install.py deleted file mode 100644 index daf9fe5..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/easy_install.py +++ /dev/null @@ -1,2262 +0,0 @@ -#!/usr/bin/env python - -""" -Easy Install ------------- - -A tool for doing automatic download/extract/build of distutils-based Python -packages. For detailed documentation, see the accompanying EasyInstall.txt -file, or visit the `EasyInstall home page`__. - -__ https://pythonhosted.org/setuptools/easy_install.html - -""" - -from glob import glob -from distutils.util import get_platform -from distutils.util import convert_path, subst_vars -from distutils.errors import DistutilsArgError, DistutilsOptionError, \ - DistutilsError, DistutilsPlatformError -from distutils.command.install import INSTALL_SCHEMES, SCHEME_KEYS -from distutils import log, dir_util -from distutils.command.build_scripts import first_line_re -import sys -import os -import zipimport -import shutil -import tempfile -import zipfile -import re -import stat -import random -import platform -import textwrap -import warnings -import site -import struct -import contextlib -import subprocess -import shlex -import io - -from setuptools import Command -from setuptools.sandbox import run_setup -from setuptools.py31compat import get_path, get_config_vars -from setuptools.command import setopt -from setuptools.archive_util import unpack_archive -from setuptools.package_index import PackageIndex -from setuptools.package_index import URL_SCHEME -from setuptools.command import bdist_egg, egg_info -from setuptools.compat import (iteritems, maxsize, basestring, unicode, - reraise, PY2, PY3, IS_WINDOWS) -from pkg_resources import ( - yield_lines, normalize_path, resource_string, ensure_directory, - get_distribution, find_distributions, Environment, Requirement, - Distribution, PathMetadata, EggMetadata, WorkingSet, DistributionNotFound, - VersionConflict, DEVELOP_DIST, -) -import pkg_resources - -# Turn on PEP440Warnings -warnings.filterwarnings("default", category=pkg_resources.PEP440Warning) - - -__all__ = [ - 'samefile', 'easy_install', 'PthDistributions', 'extract_wininst_cfg', - 'main', 'get_exe_prefixes', -] - - -def is_64bit(): - return struct.calcsize("P") == 8 - - -def samefile(p1, p2): - both_exist = os.path.exists(p1) and os.path.exists(p2) - use_samefile = hasattr(os.path, 'samefile') and both_exist - if use_samefile: - return os.path.samefile(p1, p2) - norm_p1 = os.path.normpath(os.path.normcase(p1)) - norm_p2 = os.path.normpath(os.path.normcase(p2)) - return norm_p1 == norm_p2 - - -if PY2: - def _to_ascii(s): - return s - - def isascii(s): - try: - unicode(s, 'ascii') - return True - except UnicodeError: - return False -else: - def _to_ascii(s): - return s.encode('ascii') - - def isascii(s): - try: - s.encode('ascii') - return True - except UnicodeError: - return False - - -class easy_install(Command): - """Manage a download/build/install process""" - description = "Find/get/install Python packages" - command_consumes_arguments = True - - user_options = [ - ('prefix=', None, "installation prefix"), - ("zip-ok", "z", "install package as a zipfile"), - ("multi-version", "m", "make apps have to require() a version"), - ("upgrade", "U", "force upgrade (searches PyPI for latest versions)"), - ("install-dir=", "d", "install package to DIR"), - ("script-dir=", "s", "install scripts to DIR"), - ("exclude-scripts", "x", "Don't install scripts"), - ("always-copy", "a", "Copy all needed packages to install dir"), - ("index-url=", "i", "base URL of Python Package Index"), - ("find-links=", "f", "additional URL(s) to search for packages"), - ("build-directory=", "b", - "download/extract/build in DIR; keep the results"), - ('optimize=', 'O', - "also compile with optimization: -O1 for \"python -O\", " - "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"), - ('record=', None, - "filename in which to record list of installed files"), - ('always-unzip', 'Z', "don't install as a zipfile, no matter what"), - ('site-dirs=', 'S', "list of directories where .pth files work"), - ('editable', 'e', "Install specified packages in editable form"), - ('no-deps', 'N', "don't install dependencies"), - ('allow-hosts=', 'H', "pattern(s) that hostnames must match"), - ('local-snapshots-ok', 'l', - "allow building eggs from local checkouts"), - ('version', None, "print version information and exit"), - ('no-find-links', None, - "Don't load find-links defined in packages being installed") - ] - boolean_options = [ - 'zip-ok', 'multi-version', 'exclude-scripts', 'upgrade', 'always-copy', - 'editable', - 'no-deps', 'local-snapshots-ok', 'version' - ] - - if site.ENABLE_USER_SITE: - help_msg = "install in user site-package '%s'" % site.USER_SITE - user_options.append(('user', None, help_msg)) - boolean_options.append('user') - - negative_opt = {'always-unzip': 'zip-ok'} - create_index = PackageIndex - - def initialize_options(self): - # the --user option seemst to be an opt-in one, - # so the default should be False. - self.user = 0 - self.zip_ok = self.local_snapshots_ok = None - self.install_dir = self.script_dir = self.exclude_scripts = None - self.index_url = None - self.find_links = None - self.build_directory = None - self.args = None - self.optimize = self.record = None - self.upgrade = self.always_copy = self.multi_version = None - self.editable = self.no_deps = self.allow_hosts = None - self.root = self.prefix = self.no_report = None - self.version = None - self.install_purelib = None # for pure module distributions - self.install_platlib = None # non-pure (dists w/ extensions) - self.install_headers = None # for C/C++ headers - self.install_lib = None # set to either purelib or platlib - self.install_scripts = None - self.install_data = None - self.install_base = None - self.install_platbase = None - if site.ENABLE_USER_SITE: - self.install_userbase = site.USER_BASE - self.install_usersite = site.USER_SITE - else: - self.install_userbase = None - self.install_usersite = None - self.no_find_links = None - - # Options not specifiable via command line - self.package_index = None - self.pth_file = self.always_copy_from = None - self.site_dirs = None - self.installed_projects = {} - self.sitepy_installed = False - # Always read easy_install options, even if we are subclassed, or have - # an independent instance created. This ensures that defaults will - # always come from the standard configuration file(s)' "easy_install" - # section, even if this is a "develop" or "install" command, or some - # other embedding. - self._dry_run = None - self.verbose = self.distribution.verbose - self.distribution._set_command_options( - self, self.distribution.get_option_dict('easy_install') - ) - - def delete_blockers(self, blockers): - extant_blockers = ( - filename for filename in blockers - if os.path.exists(filename) or os.path.islink(filename) - ) - list(map(self._delete_path, extant_blockers)) - - def _delete_path(self, path): - log.info("Deleting %s", path) - if self.dry_run: - return - - is_tree = os.path.isdir(path) and not os.path.islink(path) - remover = rmtree if is_tree else os.unlink - remover(path) - - def finalize_options(self): - if self.version: - print('setuptools %s' % get_distribution('setuptools').version) - sys.exit() - - py_version = sys.version.split()[0] - prefix, exec_prefix = get_config_vars('prefix', 'exec_prefix') - - self.config_vars = { - 'dist_name': self.distribution.get_name(), - 'dist_version': self.distribution.get_version(), - 'dist_fullname': self.distribution.get_fullname(), - 'py_version': py_version, - 'py_version_short': py_version[0:3], - 'py_version_nodot': py_version[0] + py_version[2], - 'sys_prefix': prefix, - 'prefix': prefix, - 'sys_exec_prefix': exec_prefix, - 'exec_prefix': exec_prefix, - # Only python 3.2+ has abiflags - 'abiflags': getattr(sys, 'abiflags', ''), - } - - if site.ENABLE_USER_SITE: - self.config_vars['userbase'] = self.install_userbase - self.config_vars['usersite'] = self.install_usersite - - self._fix_install_dir_for_user_site() - - self.expand_basedirs() - self.expand_dirs() - - self._expand('install_dir', 'script_dir', 'build_directory', - 'site_dirs') - # If a non-default installation directory was specified, default the - # script directory to match it. - if self.script_dir is None: - self.script_dir = self.install_dir - - if self.no_find_links is None: - self.no_find_links = False - - # Let install_dir get set by install_lib command, which in turn - # gets its info from the install command, and takes into account - # --prefix and --home and all that other crud. - self.set_undefined_options( - 'install_lib', ('install_dir', 'install_dir') - ) - # Likewise, set default script_dir from 'install_scripts.install_dir' - self.set_undefined_options( - 'install_scripts', ('install_dir', 'script_dir') - ) - - if self.user and self.install_purelib: - self.install_dir = self.install_purelib - self.script_dir = self.install_scripts - # default --record from the install command - self.set_undefined_options('install', ('record', 'record')) - # Should this be moved to the if statement below? It's not used - # elsewhere - normpath = map(normalize_path, sys.path) - self.all_site_dirs = get_site_dirs() - if self.site_dirs is not None: - site_dirs = [ - os.path.expanduser(s.strip()) for s in - self.site_dirs.split(',') - ] - for d in site_dirs: - if not os.path.isdir(d): - log.warn("%s (in --site-dirs) does not exist", d) - elif normalize_path(d) not in normpath: - raise DistutilsOptionError( - d + " (in --site-dirs) is not on sys.path" - ) - else: - self.all_site_dirs.append(normalize_path(d)) - if not self.editable: - self.check_site_dir() - self.index_url = self.index_url or "https://pypi.python.org/simple" - self.shadow_path = self.all_site_dirs[:] - for path_item in self.install_dir, normalize_path(self.script_dir): - if path_item not in self.shadow_path: - self.shadow_path.insert(0, path_item) - - if self.allow_hosts is not None: - hosts = [s.strip() for s in self.allow_hosts.split(',')] - else: - hosts = ['*'] - if self.package_index is None: - self.package_index = self.create_index( - self.index_url, search_path=self.shadow_path, hosts=hosts, - ) - self.local_index = Environment(self.shadow_path + sys.path) - - if self.find_links is not None: - if isinstance(self.find_links, basestring): - self.find_links = self.find_links.split() - else: - self.find_links = [] - if self.local_snapshots_ok: - self.package_index.scan_egg_links(self.shadow_path + sys.path) - if not self.no_find_links: - self.package_index.add_find_links(self.find_links) - self.set_undefined_options('install_lib', ('optimize', 'optimize')) - if not isinstance(self.optimize, int): - try: - self.optimize = int(self.optimize) - if not (0 <= self.optimize <= 2): - raise ValueError - except ValueError: - raise DistutilsOptionError("--optimize must be 0, 1, or 2") - - if self.editable and not self.build_directory: - raise DistutilsArgError( - "Must specify a build directory (-b) when using --editable" - ) - if not self.args: - raise DistutilsArgError( - "No urls, filenames, or requirements specified (see --help)") - - self.outputs = [] - - def _fix_install_dir_for_user_site(self): - """ - Fix the install_dir if "--user" was used. - """ - if not self.user or not site.ENABLE_USER_SITE: - return - - self.create_home_path() - if self.install_userbase is None: - msg = "User base directory is not specified" - raise DistutilsPlatformError(msg) - self.install_base = self.install_platbase = self.install_userbase - scheme_name = os.name.replace('posix', 'unix') + '_user' - self.select_scheme(scheme_name) - - def _expand_attrs(self, attrs): - for attr in attrs: - val = getattr(self, attr) - if val is not None: - if os.name == 'posix' or IS_WINDOWS: - val = os.path.expanduser(val) - val = subst_vars(val, self.config_vars) - setattr(self, attr, val) - - def expand_basedirs(self): - """Calls `os.path.expanduser` on install_base, install_platbase and - root.""" - self._expand_attrs(['install_base', 'install_platbase', 'root']) - - def expand_dirs(self): - """Calls `os.path.expanduser` on install dirs.""" - self._expand_attrs(['install_purelib', 'install_platlib', - 'install_lib', 'install_headers', - 'install_scripts', 'install_data', ]) - - def run(self): - if self.verbose != self.distribution.verbose: - log.set_verbosity(self.verbose) - try: - for spec in self.args: - self.easy_install(spec, not self.no_deps) - if self.record: - outputs = self.outputs - if self.root: # strip any package prefix - root_len = len(self.root) - for counter in range(len(outputs)): - outputs[counter] = outputs[counter][root_len:] - from distutils import file_util - - self.execute( - file_util.write_file, (self.record, outputs), - "writing list of installed files to '%s'" % - self.record - ) - self.warn_deprecated_options() - finally: - log.set_verbosity(self.distribution.verbose) - - def pseudo_tempname(self): - """Return a pseudo-tempname base in the install directory. - This code is intentionally naive; if a malicious party can write to - the target directory you're already in deep doodoo. - """ - try: - pid = os.getpid() - except: - pid = random.randint(0, maxsize) - return os.path.join(self.install_dir, "test-easy-install-%s" % pid) - - def warn_deprecated_options(self): - pass - - def check_site_dir(self): - """Verify that self.install_dir is .pth-capable dir, if needed""" - - instdir = normalize_path(self.install_dir) - pth_file = os.path.join(instdir, 'easy-install.pth') - - # Is it a configured, PYTHONPATH, implicit, or explicit site dir? - is_site_dir = instdir in self.all_site_dirs - - if not is_site_dir and not self.multi_version: - # No? Then directly test whether it does .pth file processing - is_site_dir = self.check_pth_processing() - else: - # make sure we can write to target dir - testfile = self.pseudo_tempname() + '.write-test' - test_exists = os.path.exists(testfile) - try: - if test_exists: - os.unlink(testfile) - open(testfile, 'w').close() - os.unlink(testfile) - except (OSError, IOError): - self.cant_write_to_target() - - if not is_site_dir and not self.multi_version: - # Can't install non-multi to non-site dir - raise DistutilsError(self.no_default_version_msg()) - - if is_site_dir: - if self.pth_file is None: - self.pth_file = PthDistributions(pth_file, self.all_site_dirs) - else: - self.pth_file = None - - PYTHONPATH = os.environ.get('PYTHONPATH', '').split(os.pathsep) - if instdir not in map(normalize_path, filter(None, PYTHONPATH)): - # only PYTHONPATH dirs need a site.py, so pretend it's there - self.sitepy_installed = True - elif self.multi_version and not os.path.exists(pth_file): - self.sitepy_installed = True # don't need site.py in this case - self.pth_file = None # and don't create a .pth file - self.install_dir = instdir - - __cant_write_msg = textwrap.dedent(""" - can't create or remove files in install directory - - The following error occurred while trying to add or remove files in the - installation directory: - - %s - - The installation directory you specified (via --install-dir, --prefix, or - the distutils default setting) was: - - %s - """).lstrip() - - __not_exists_id = textwrap.dedent(""" - This directory does not currently exist. Please create it and try again, or - choose a different installation directory (using the -d or --install-dir - option). - """).lstrip() - - __access_msg = textwrap.dedent(""" - Perhaps your account does not have write access to this directory? If the - installation directory is a system-owned directory, you may need to sign in - as the administrator or "root" account. If you do not have administrative - access to this machine, you may wish to choose a different installation - directory, preferably one that is listed in your PYTHONPATH environment - variable. - - For information on other options, you may wish to consult the - documentation at: - - https://pythonhosted.org/setuptools/easy_install.html - - Please make the appropriate changes for your system and try again. - """).lstrip() - - def cant_write_to_target(self): - msg = self.__cant_write_msg % (sys.exc_info()[1], self.install_dir,) - - if not os.path.exists(self.install_dir): - msg += '\n' + self.__not_exists_id - else: - msg += '\n' + self.__access_msg - raise DistutilsError(msg) - - def check_pth_processing(self): - """Empirically verify whether .pth files are supported in inst. dir""" - instdir = self.install_dir - log.info("Checking .pth file support in %s", instdir) - pth_file = self.pseudo_tempname() + ".pth" - ok_file = pth_file + '.ok' - ok_exists = os.path.exists(ok_file) - try: - if ok_exists: - os.unlink(ok_file) - dirname = os.path.dirname(ok_file) - if not os.path.exists(dirname): - os.makedirs(dirname) - f = open(pth_file, 'w') - except (OSError, IOError): - self.cant_write_to_target() - else: - try: - f.write("import os; f = open(%r, 'w'); f.write('OK'); " - "f.close()\n" % (ok_file,)) - f.close() - f = None - executable = sys.executable - if os.name == 'nt': - dirname, basename = os.path.split(executable) - alt = os.path.join(dirname, 'pythonw.exe') - if (basename.lower() == 'python.exe' and - os.path.exists(alt)): - # use pythonw.exe to avoid opening a console window - executable = alt - - from distutils.spawn import spawn - - spawn([executable, '-E', '-c', 'pass'], 0) - - if os.path.exists(ok_file): - log.info( - "TEST PASSED: %s appears to support .pth files", - instdir - ) - return True - finally: - if f: - f.close() - if os.path.exists(ok_file): - os.unlink(ok_file) - if os.path.exists(pth_file): - os.unlink(pth_file) - if not self.multi_version: - log.warn("TEST FAILED: %s does NOT support .pth files", instdir) - return False - - def install_egg_scripts(self, dist): - """Write all the scripts for `dist`, unless scripts are excluded""" - if not self.exclude_scripts and dist.metadata_isdir('scripts'): - for script_name in dist.metadata_listdir('scripts'): - if dist.metadata_isdir('scripts/' + script_name): - # The "script" is a directory, likely a Python 3 - # __pycache__ directory, so skip it. - continue - self.install_script( - dist, script_name, - dist.get_metadata('scripts/' + script_name) - ) - self.install_wrapper_scripts(dist) - - def add_output(self, path): - if os.path.isdir(path): - for base, dirs, files in os.walk(path): - for filename in files: - self.outputs.append(os.path.join(base, filename)) - else: - self.outputs.append(path) - - def not_editable(self, spec): - if self.editable: - raise DistutilsArgError( - "Invalid argument %r: you can't use filenames or URLs " - "with --editable (except via the --find-links option)." - % (spec,) - ) - - def check_editable(self, spec): - if not self.editable: - return - - if os.path.exists(os.path.join(self.build_directory, spec.key)): - raise DistutilsArgError( - "%r already exists in %s; can't do a checkout there" % - (spec.key, self.build_directory) - ) - - def easy_install(self, spec, deps=False): - tmpdir = tempfile.mkdtemp(prefix="easy_install-") - download = None - if not self.editable: - self.install_site_py() - - try: - if not isinstance(spec, Requirement): - if URL_SCHEME(spec): - # It's a url, download it to tmpdir and process - self.not_editable(spec) - download = self.package_index.download(spec, tmpdir) - return self.install_item(None, download, tmpdir, deps, - True) - - elif os.path.exists(spec): - # Existing file or directory, just process it directly - self.not_editable(spec) - return self.install_item(None, spec, tmpdir, deps, True) - else: - spec = parse_requirement_arg(spec) - - self.check_editable(spec) - dist = self.package_index.fetch_distribution( - spec, tmpdir, self.upgrade, self.editable, - not self.always_copy, self.local_index - ) - if dist is None: - msg = "Could not find suitable distribution for %r" % spec - if self.always_copy: - msg += " (--always-copy skips system and development eggs)" - raise DistutilsError(msg) - elif dist.precedence == DEVELOP_DIST: - # .egg-info dists don't need installing, just process deps - self.process_distribution(spec, dist, deps, "Using") - return dist - else: - return self.install_item(spec, dist.location, tmpdir, deps) - - finally: - if os.path.exists(tmpdir): - rmtree(tmpdir) - - def install_item(self, spec, download, tmpdir, deps, install_needed=False): - - # Installation is also needed if file in tmpdir or is not an egg - install_needed = install_needed or self.always_copy - install_needed = install_needed or os.path.dirname(download) == tmpdir - install_needed = install_needed or not download.endswith('.egg') - install_needed = install_needed or ( - self.always_copy_from is not None and - os.path.dirname(normalize_path(download)) == - normalize_path(self.always_copy_from) - ) - - if spec and not install_needed: - # at this point, we know it's a local .egg, we just don't know if - # it's already installed. - for dist in self.local_index[spec.project_name]: - if dist.location == download: - break - else: - install_needed = True # it's not in the local index - - log.info("Processing %s", os.path.basename(download)) - - if install_needed: - dists = self.install_eggs(spec, download, tmpdir) - for dist in dists: - self.process_distribution(spec, dist, deps) - else: - dists = [self.egg_distribution(download)] - self.process_distribution(spec, dists[0], deps, "Using") - - if spec is not None: - for dist in dists: - if dist in spec: - return dist - - def select_scheme(self, name): - """Sets the install directories by applying the install schemes.""" - # it's the caller's problem if they supply a bad name! - scheme = INSTALL_SCHEMES[name] - for key in SCHEME_KEYS: - attrname = 'install_' + key - if getattr(self, attrname) is None: - setattr(self, attrname, scheme[key]) - - def process_distribution(self, requirement, dist, deps=True, *info): - self.update_pth(dist) - self.package_index.add(dist) - if dist in self.local_index[dist.key]: - self.local_index.remove(dist) - self.local_index.add(dist) - self.install_egg_scripts(dist) - self.installed_projects[dist.key] = dist - log.info(self.installation_report(requirement, dist, *info)) - if (dist.has_metadata('dependency_links.txt') and - not self.no_find_links): - self.package_index.add_find_links( - dist.get_metadata_lines('dependency_links.txt') - ) - if not deps and not self.always_copy: - return - elif requirement is not None and dist.key != requirement.key: - log.warn("Skipping dependencies for %s", dist) - return # XXX this is not the distribution we were looking for - elif requirement is None or dist not in requirement: - # if we wound up with a different version, resolve what we've got - distreq = dist.as_requirement() - requirement = requirement or distreq - requirement = Requirement( - distreq.project_name, distreq.specs, requirement.extras - ) - log.info("Processing dependencies for %s", requirement) - try: - distros = WorkingSet([]).resolve( - [requirement], self.local_index, self.easy_install - ) - except DistributionNotFound as e: - raise DistutilsError( - "Could not find required distribution %s" % e.args - ) - except VersionConflict as e: - raise DistutilsError(e.report()) - if self.always_copy or self.always_copy_from: - # Force all the relevant distros to be copied or activated - for dist in distros: - if dist.key not in self.installed_projects: - self.easy_install(dist.as_requirement()) - log.info("Finished processing dependencies for %s", requirement) - - def should_unzip(self, dist): - if self.zip_ok is not None: - return not self.zip_ok - if dist.has_metadata('not-zip-safe'): - return True - if not dist.has_metadata('zip-safe'): - return True - return False - - def maybe_move(self, spec, dist_filename, setup_base): - dst = os.path.join(self.build_directory, spec.key) - if os.path.exists(dst): - msg = ("%r already exists in %s; build directory %s will not be " - "kept") - log.warn(msg, spec.key, self.build_directory, setup_base) - return setup_base - if os.path.isdir(dist_filename): - setup_base = dist_filename - else: - if os.path.dirname(dist_filename) == setup_base: - os.unlink(dist_filename) # get it out of the tmp dir - contents = os.listdir(setup_base) - if len(contents) == 1: - dist_filename = os.path.join(setup_base, contents[0]) - if os.path.isdir(dist_filename): - # if the only thing there is a directory, move it instead - setup_base = dist_filename - ensure_directory(dst) - shutil.move(setup_base, dst) - return dst - - def install_wrapper_scripts(self, dist): - if not self.exclude_scripts: - for args in ScriptWriter.best().get_args(dist): - self.write_script(*args) - - def install_script(self, dist, script_name, script_text, dev_path=None): - """Generate a legacy script wrapper and install it""" - spec = str(dist.as_requirement()) - is_script = is_python_script(script_text, script_name) - - if is_script: - script_text = (ScriptWriter.get_header(script_text) + - self._load_template(dev_path) % locals()) - self.write_script(script_name, _to_ascii(script_text), 'b') - - @staticmethod - def _load_template(dev_path): - """ - There are a couple of template scripts in the package. This - function loads one of them and prepares it for use. - """ - # See https://bitbucket.org/pypa/setuptools/issue/134 for info - # on script file naming and downstream issues with SVR4 - name = 'script.tmpl' - if dev_path: - name = name.replace('.tmpl', ' (dev).tmpl') - - raw_bytes = resource_string('setuptools', name) - return raw_bytes.decode('utf-8') - - def write_script(self, script_name, contents, mode="t", blockers=()): - """Write an executable file to the scripts directory""" - self.delete_blockers( # clean up old .py/.pyw w/o a script - [os.path.join(self.script_dir, x) for x in blockers] - ) - log.info("Installing %s script to %s", script_name, self.script_dir) - target = os.path.join(self.script_dir, script_name) - self.add_output(target) - - mask = current_umask() - if not self.dry_run: - ensure_directory(target) - if os.path.exists(target): - os.unlink(target) - f = open(target, "w" + mode) - f.write(contents) - f.close() - chmod(target, 0o777 - mask) - - def install_eggs(self, spec, dist_filename, tmpdir): - # .egg dirs or files are already built, so just return them - if dist_filename.lower().endswith('.egg'): - return [self.install_egg(dist_filename, tmpdir)] - elif dist_filename.lower().endswith('.exe'): - return [self.install_exe(dist_filename, tmpdir)] - - # Anything else, try to extract and build - setup_base = tmpdir - if os.path.isfile(dist_filename) and not dist_filename.endswith('.py'): - unpack_archive(dist_filename, tmpdir, self.unpack_progress) - elif os.path.isdir(dist_filename): - setup_base = os.path.abspath(dist_filename) - - if (setup_base.startswith(tmpdir) # something we downloaded - and self.build_directory and spec is not None): - setup_base = self.maybe_move(spec, dist_filename, setup_base) - - # Find the setup.py file - setup_script = os.path.join(setup_base, 'setup.py') - - if not os.path.exists(setup_script): - setups = glob(os.path.join(setup_base, '*', 'setup.py')) - if not setups: - raise DistutilsError( - "Couldn't find a setup script in %s" % - os.path.abspath(dist_filename) - ) - if len(setups) > 1: - raise DistutilsError( - "Multiple setup scripts in %s" % - os.path.abspath(dist_filename) - ) - setup_script = setups[0] - - # Now run it, and return the result - if self.editable: - log.info(self.report_editable(spec, setup_script)) - return [] - else: - return self.build_and_install(setup_script, setup_base) - - def egg_distribution(self, egg_path): - if os.path.isdir(egg_path): - metadata = PathMetadata(egg_path, os.path.join(egg_path, - 'EGG-INFO')) - else: - metadata = EggMetadata(zipimport.zipimporter(egg_path)) - return Distribution.from_filename(egg_path, metadata=metadata) - - def install_egg(self, egg_path, tmpdir): - destination = os.path.join(self.install_dir, - os.path.basename(egg_path)) - destination = os.path.abspath(destination) - if not self.dry_run: - ensure_directory(destination) - - dist = self.egg_distribution(egg_path) - if not samefile(egg_path, destination): - if os.path.isdir(destination) and not os.path.islink(destination): - dir_util.remove_tree(destination, dry_run=self.dry_run) - elif os.path.exists(destination): - self.execute(os.unlink, (destination,), "Removing " + - destination) - try: - new_dist_is_zipped = False - if os.path.isdir(egg_path): - if egg_path.startswith(tmpdir): - f, m = shutil.move, "Moving" - else: - f, m = shutil.copytree, "Copying" - elif self.should_unzip(dist): - self.mkpath(destination) - f, m = self.unpack_and_compile, "Extracting" - else: - new_dist_is_zipped = True - if egg_path.startswith(tmpdir): - f, m = shutil.move, "Moving" - else: - f, m = shutil.copy2, "Copying" - self.execute(f, (egg_path, destination), - (m + " %s to %s") % - (os.path.basename(egg_path), - os.path.dirname(destination))) - update_dist_caches(destination, - fix_zipimporter_caches=new_dist_is_zipped) - except: - update_dist_caches(destination, fix_zipimporter_caches=False) - raise - - self.add_output(destination) - return self.egg_distribution(destination) - - def install_exe(self, dist_filename, tmpdir): - # See if it's valid, get data - cfg = extract_wininst_cfg(dist_filename) - if cfg is None: - raise DistutilsError( - "%s is not a valid distutils Windows .exe" % dist_filename - ) - # Create a dummy distribution object until we build the real distro - dist = Distribution( - None, - project_name=cfg.get('metadata', 'name'), - version=cfg.get('metadata', 'version'), platform=get_platform(), - ) - - # Convert the .exe to an unpacked egg - egg_path = dist.location = os.path.join(tmpdir, dist.egg_name() + - '.egg') - egg_tmp = egg_path + '.tmp' - _egg_info = os.path.join(egg_tmp, 'EGG-INFO') - pkg_inf = os.path.join(_egg_info, 'PKG-INFO') - ensure_directory(pkg_inf) # make sure EGG-INFO dir exists - dist._provider = PathMetadata(egg_tmp, _egg_info) # XXX - self.exe_to_egg(dist_filename, egg_tmp) - - # Write EGG-INFO/PKG-INFO - if not os.path.exists(pkg_inf): - f = open(pkg_inf, 'w') - f.write('Metadata-Version: 1.0\n') - for k, v in cfg.items('metadata'): - if k != 'target_version': - f.write('%s: %s\n' % (k.replace('_', '-').title(), v)) - f.close() - script_dir = os.path.join(_egg_info, 'scripts') - # delete entry-point scripts to avoid duping - self.delete_blockers( - [os.path.join(script_dir, args[0]) for args in - ScriptWriter.get_args(dist)] - ) - # Build .egg file from tmpdir - bdist_egg.make_zipfile( - egg_path, egg_tmp, verbose=self.verbose, dry_run=self.dry_run - ) - # install the .egg - return self.install_egg(egg_path, tmpdir) - - def exe_to_egg(self, dist_filename, egg_tmp): - """Extract a bdist_wininst to the directories an egg would use""" - # Check for .pth file and set up prefix translations - prefixes = get_exe_prefixes(dist_filename) - to_compile = [] - native_libs = [] - top_level = {} - - def process(src, dst): - s = src.lower() - for old, new in prefixes: - if s.startswith(old): - src = new + src[len(old):] - parts = src.split('/') - dst = os.path.join(egg_tmp, *parts) - dl = dst.lower() - if dl.endswith('.pyd') or dl.endswith('.dll'): - parts[-1] = bdist_egg.strip_module(parts[-1]) - top_level[os.path.splitext(parts[0])[0]] = 1 - native_libs.append(src) - elif dl.endswith('.py') and old != 'SCRIPTS/': - top_level[os.path.splitext(parts[0])[0]] = 1 - to_compile.append(dst) - return dst - if not src.endswith('.pth'): - log.warn("WARNING: can't process %s", src) - return None - - # extract, tracking .pyd/.dll->native_libs and .py -> to_compile - unpack_archive(dist_filename, egg_tmp, process) - stubs = [] - for res in native_libs: - if res.lower().endswith('.pyd'): # create stubs for .pyd's - parts = res.split('/') - resource = parts[-1] - parts[-1] = bdist_egg.strip_module(parts[-1]) + '.py' - pyfile = os.path.join(egg_tmp, *parts) - to_compile.append(pyfile) - stubs.append(pyfile) - bdist_egg.write_stub(resource, pyfile) - self.byte_compile(to_compile) # compile .py's - bdist_egg.write_safety_flag( - os.path.join(egg_tmp, 'EGG-INFO'), - bdist_egg.analyze_egg(egg_tmp, stubs)) # write zip-safety flag - - for name in 'top_level', 'native_libs': - if locals()[name]: - txt = os.path.join(egg_tmp, 'EGG-INFO', name + '.txt') - if not os.path.exists(txt): - f = open(txt, 'w') - f.write('\n'.join(locals()[name]) + '\n') - f.close() - - __mv_warning = textwrap.dedent(""" - Because this distribution was installed --multi-version, before you can - import modules from this package in an application, you will need to - 'import pkg_resources' and then use a 'require()' call similar to one of - these examples, in order to select the desired version: - - pkg_resources.require("%(name)s") # latest installed version - pkg_resources.require("%(name)s==%(version)s") # this exact version - pkg_resources.require("%(name)s>=%(version)s") # this version or higher - """).lstrip() - - __id_warning = textwrap.dedent(""" - Note also that the installation directory must be on sys.path at runtime for - this to work. (e.g. by being the application's script directory, by being on - PYTHONPATH, or by being added to sys.path by your code.) - """) - - def installation_report(self, req, dist, what="Installed"): - """Helpful installation message for display to package users""" - msg = "\n%(what)s %(eggloc)s%(extras)s" - if self.multi_version and not self.no_report: - msg += '\n' + self.__mv_warning - if self.install_dir not in map(normalize_path, sys.path): - msg += '\n' + self.__id_warning - - eggloc = dist.location - name = dist.project_name - version = dist.version - extras = '' # TODO: self.report_extras(req, dist) - return msg % locals() - - __editable_msg = textwrap.dedent(""" - Extracted editable version of %(spec)s to %(dirname)s - - If it uses setuptools in its setup script, you can activate it in - "development" mode by going to that directory and running:: - - %(python)s setup.py develop - - See the setuptools documentation for the "develop" command for more info. - """).lstrip() - - def report_editable(self, spec, setup_script): - dirname = os.path.dirname(setup_script) - python = sys.executable - return '\n' + self.__editable_msg % locals() - - def run_setup(self, setup_script, setup_base, args): - sys.modules.setdefault('distutils.command.bdist_egg', bdist_egg) - sys.modules.setdefault('distutils.command.egg_info', egg_info) - - args = list(args) - if self.verbose > 2: - v = 'v' * (self.verbose - 1) - args.insert(0, '-' + v) - elif self.verbose < 2: - args.insert(0, '-q') - if self.dry_run: - args.insert(0, '-n') - log.info( - "Running %s %s", setup_script[len(setup_base) + 1:], ' '.join(args) - ) - try: - run_setup(setup_script, args) - except SystemExit as v: - raise DistutilsError("Setup script exited with %s" % (v.args[0],)) - - def build_and_install(self, setup_script, setup_base): - args = ['bdist_egg', '--dist-dir'] - - dist_dir = tempfile.mkdtemp( - prefix='egg-dist-tmp-', dir=os.path.dirname(setup_script) - ) - try: - self._set_fetcher_options(os.path.dirname(setup_script)) - args.append(dist_dir) - - self.run_setup(setup_script, setup_base, args) - all_eggs = Environment([dist_dir]) - eggs = [] - for key in all_eggs: - for dist in all_eggs[key]: - eggs.append(self.install_egg(dist.location, setup_base)) - if not eggs and not self.dry_run: - log.warn("No eggs found in %s (setup script problem?)", - dist_dir) - return eggs - finally: - rmtree(dist_dir) - log.set_verbosity(self.verbose) # restore our log verbosity - - def _set_fetcher_options(self, base): - """ - When easy_install is about to run bdist_egg on a source dist, that - source dist might have 'setup_requires' directives, requiring - additional fetching. Ensure the fetcher options given to easy_install - are available to that command as well. - """ - # find the fetch options from easy_install and write them out - # to the setup.cfg file. - ei_opts = self.distribution.get_option_dict('easy_install').copy() - fetch_directives = ( - 'find_links', 'site_dirs', 'index_url', 'optimize', - 'site_dirs', 'allow_hosts', - ) - fetch_options = {} - for key, val in ei_opts.items(): - if key not in fetch_directives: - continue - fetch_options[key.replace('_', '-')] = val[1] - # create a settings dictionary suitable for `edit_config` - settings = dict(easy_install=fetch_options) - cfg_filename = os.path.join(base, 'setup.cfg') - setopt.edit_config(cfg_filename, settings) - - def update_pth(self, dist): - if self.pth_file is None: - return - - for d in self.pth_file[dist.key]: # drop old entries - if self.multi_version or d.location != dist.location: - log.info("Removing %s from easy-install.pth file", d) - self.pth_file.remove(d) - if d.location in self.shadow_path: - self.shadow_path.remove(d.location) - - if not self.multi_version: - if dist.location in self.pth_file.paths: - log.info( - "%s is already the active version in easy-install.pth", - dist - ) - else: - log.info("Adding %s to easy-install.pth file", dist) - self.pth_file.add(dist) # add new entry - if dist.location not in self.shadow_path: - self.shadow_path.append(dist.location) - - if not self.dry_run: - - self.pth_file.save() - - if dist.key == 'setuptools': - # Ensure that setuptools itself never becomes unavailable! - # XXX should this check for latest version? - filename = os.path.join(self.install_dir, 'setuptools.pth') - if os.path.islink(filename): - os.unlink(filename) - f = open(filename, 'wt') - f.write(self.pth_file.make_relative(dist.location) + '\n') - f.close() - - def unpack_progress(self, src, dst): - # Progress filter for unpacking - log.debug("Unpacking %s to %s", src, dst) - return dst # only unpack-and-compile skips files for dry run - - def unpack_and_compile(self, egg_path, destination): - to_compile = [] - to_chmod = [] - - def pf(src, dst): - if dst.endswith('.py') and not src.startswith('EGG-INFO/'): - to_compile.append(dst) - elif dst.endswith('.dll') or dst.endswith('.so'): - to_chmod.append(dst) - self.unpack_progress(src, dst) - return not self.dry_run and dst or None - - unpack_archive(egg_path, destination, pf) - self.byte_compile(to_compile) - if not self.dry_run: - for f in to_chmod: - mode = ((os.stat(f)[stat.ST_MODE]) | 0o555) & 0o7755 - chmod(f, mode) - - def byte_compile(self, to_compile): - if sys.dont_write_bytecode: - self.warn('byte-compiling is disabled, skipping.') - return - - from distutils.util import byte_compile - - try: - # try to make the byte compile messages quieter - log.set_verbosity(self.verbose - 1) - - byte_compile(to_compile, optimize=0, force=1, dry_run=self.dry_run) - if self.optimize: - byte_compile( - to_compile, optimize=self.optimize, force=1, - dry_run=self.dry_run - ) - finally: - log.set_verbosity(self.verbose) # restore original verbosity - - __no_default_msg = textwrap.dedent(""" - bad install directory or PYTHONPATH - - You are attempting to install a package to a directory that is not - on PYTHONPATH and which Python does not read ".pth" files from. The - installation directory you specified (via --install-dir, --prefix, or - the distutils default setting) was: - - %s - - and your PYTHONPATH environment variable currently contains: - - %r - - Here are some of your options for correcting the problem: - - * You can choose a different installation directory, i.e., one that is - on PYTHONPATH or supports .pth files - - * You can add the installation directory to the PYTHONPATH environment - variable. (It must then also be on PYTHONPATH whenever you run - Python and want to use the package(s) you are installing.) - - * You can set up the installation directory to support ".pth" files by - using one of the approaches described here: - - https://pythonhosted.org/setuptools/easy_install.html#custom-installation-locations - - Please make the appropriate changes for your system and try again.""").lstrip() - - def no_default_version_msg(self): - template = self.__no_default_msg - return template % (self.install_dir, os.environ.get('PYTHONPATH', '')) - - def install_site_py(self): - """Make sure there's a site.py in the target dir, if needed""" - - if self.sitepy_installed: - return # already did it, or don't need to - - sitepy = os.path.join(self.install_dir, "site.py") - source = resource_string("setuptools", "site-patch.py") - current = "" - - if os.path.exists(sitepy): - log.debug("Checking existing site.py in %s", self.install_dir) - f = open(sitepy, 'rb') - current = f.read() - # we want str, not bytes - if PY3: - current = current.decode() - - f.close() - if not current.startswith('def __boot():'): - raise DistutilsError( - "%s is not a setuptools-generated site.py; please" - " remove it." % sitepy - ) - - if current != source: - log.info("Creating %s", sitepy) - if not self.dry_run: - ensure_directory(sitepy) - f = open(sitepy, 'wb') - f.write(source) - f.close() - self.byte_compile([sitepy]) - - self.sitepy_installed = True - - def create_home_path(self): - """Create directories under ~.""" - if not self.user: - return - home = convert_path(os.path.expanduser("~")) - for name, path in iteritems(self.config_vars): - if path.startswith(home) and not os.path.isdir(path): - self.debug_print("os.makedirs('%s', 0o700)" % path) - os.makedirs(path, 0o700) - - INSTALL_SCHEMES = dict( - posix=dict( - install_dir='$base/lib/python$py_version_short/site-packages', - script_dir='$base/bin', - ), - ) - - DEFAULT_SCHEME = dict( - install_dir='$base/Lib/site-packages', - script_dir='$base/Scripts', - ) - - def _expand(self, *attrs): - config_vars = self.get_finalized_command('install').config_vars - - if self.prefix: - # Set default install_dir/scripts from --prefix - config_vars = config_vars.copy() - config_vars['base'] = self.prefix - scheme = self.INSTALL_SCHEMES.get(os.name, self.DEFAULT_SCHEME) - for attr, val in scheme.items(): - if getattr(self, attr, None) is None: - setattr(self, attr, val) - - from distutils.util import subst_vars - - for attr in attrs: - val = getattr(self, attr) - if val is not None: - val = subst_vars(val, config_vars) - if os.name == 'posix': - val = os.path.expanduser(val) - setattr(self, attr, val) - - -def get_site_dirs(): - # return a list of 'site' dirs - sitedirs = [_f for _f in os.environ.get('PYTHONPATH', - '').split(os.pathsep) if _f] - prefixes = [sys.prefix] - if sys.exec_prefix != sys.prefix: - prefixes.append(sys.exec_prefix) - for prefix in prefixes: - if prefix: - if sys.platform in ('os2emx', 'riscos'): - sitedirs.append(os.path.join(prefix, "Lib", "site-packages")) - elif os.sep == '/': - sitedirs.extend([os.path.join(prefix, - "lib", - "python" + sys.version[:3], - "site-packages"), - os.path.join(prefix, "lib", "site-python")]) - else: - sitedirs.extend( - [prefix, os.path.join(prefix, "lib", "site-packages")] - ) - if sys.platform == 'darwin': - # for framework builds *only* we add the standard Apple - # locations. Currently only per-user, but /Library and - # /Network/Library could be added too - if 'Python.framework' in prefix: - home = os.environ.get('HOME') - if home: - sitedirs.append( - os.path.join(home, - 'Library', - 'Python', - sys.version[:3], - 'site-packages')) - lib_paths = get_path('purelib'), get_path('platlib') - for site_lib in lib_paths: - if site_lib not in sitedirs: - sitedirs.append(site_lib) - - if site.ENABLE_USER_SITE: - sitedirs.append(site.USER_SITE) - - sitedirs = list(map(normalize_path, sitedirs)) - - return sitedirs - - -def expand_paths(inputs): - """Yield sys.path directories that might contain "old-style" packages""" - - seen = {} - - for dirname in inputs: - dirname = normalize_path(dirname) - if dirname in seen: - continue - - seen[dirname] = 1 - if not os.path.isdir(dirname): - continue - - files = os.listdir(dirname) - yield dirname, files - - for name in files: - if not name.endswith('.pth'): - # We only care about the .pth files - continue - if name in ('easy-install.pth', 'setuptools.pth'): - # Ignore .pth files that we control - continue - - # Read the .pth file - f = open(os.path.join(dirname, name)) - lines = list(yield_lines(f)) - f.close() - - # Yield existing non-dupe, non-import directory lines from it - for line in lines: - if not line.startswith("import"): - line = normalize_path(line.rstrip()) - if line not in seen: - seen[line] = 1 - if not os.path.isdir(line): - continue - yield line, os.listdir(line) - - -def extract_wininst_cfg(dist_filename): - """Extract configuration data from a bdist_wininst .exe - - Returns a ConfigParser.RawConfigParser, or None - """ - f = open(dist_filename, 'rb') - try: - endrec = zipfile._EndRecData(f) - if endrec is None: - return None - - prepended = (endrec[9] - endrec[5]) - endrec[6] - if prepended < 12: # no wininst data here - return None - f.seek(prepended - 12) - - from setuptools.compat import StringIO, ConfigParser - import struct - - tag, cfglen, bmlen = struct.unpack("<iii", f.read(12)) - if tag not in (0x1234567A, 0x1234567B): - return None # not a valid tag - - f.seek(prepended - (12 + cfglen)) - cfg = ConfigParser.RawConfigParser( - {'version': '', 'target_version': ''}) - try: - part = f.read(cfglen) - # Read up to the first null byte. - config = part.split(b'\0', 1)[0] - # Now the config is in bytes, but for RawConfigParser, it should - # be text, so decode it. - config = config.decode(sys.getfilesystemencoding()) - cfg.readfp(StringIO(config)) - except ConfigParser.Error: - return None - if not cfg.has_section('metadata') or not cfg.has_section('Setup'): - return None - return cfg - - finally: - f.close() - - -def get_exe_prefixes(exe_filename): - """Get exe->egg path translations for a given .exe file""" - - prefixes = [ - ('PURELIB/', ''), ('PLATLIB/pywin32_system32', ''), - ('PLATLIB/', ''), - ('SCRIPTS/', 'EGG-INFO/scripts/'), - ('DATA/lib/site-packages', ''), - ] - z = zipfile.ZipFile(exe_filename) - try: - for info in z.infolist(): - name = info.filename - parts = name.split('/') - if len(parts) == 3 and parts[2] == 'PKG-INFO': - if parts[1].endswith('.egg-info'): - prefixes.insert(0, ('/'.join(parts[:2]), 'EGG-INFO/')) - break - if len(parts) != 2 or not name.endswith('.pth'): - continue - if name.endswith('-nspkg.pth'): - continue - if parts[0].upper() in ('PURELIB', 'PLATLIB'): - contents = z.read(name) - if PY3: - contents = contents.decode() - for pth in yield_lines(contents): - pth = pth.strip().replace('\\', '/') - if not pth.startswith('import'): - prefixes.append((('%s/%s/' % (parts[0], pth)), '')) - finally: - z.close() - prefixes = [(x.lower(), y) for x, y in prefixes] - prefixes.sort() - prefixes.reverse() - return prefixes - - -def parse_requirement_arg(spec): - try: - return Requirement.parse(spec) - except ValueError: - raise DistutilsError( - "Not a URL, existing file, or requirement spec: %r" % (spec,) - ) - - -class PthDistributions(Environment): - """A .pth file with Distribution paths in it""" - - dirty = False - - def __init__(self, filename, sitedirs=()): - self.filename = filename - self.sitedirs = list(map(normalize_path, sitedirs)) - self.basedir = normalize_path(os.path.dirname(self.filename)) - self._load() - Environment.__init__(self, [], None, None) - for path in yield_lines(self.paths): - list(map(self.add, find_distributions(path, True))) - - def _load(self): - self.paths = [] - saw_import = False - seen = dict.fromkeys(self.sitedirs) - if os.path.isfile(self.filename): - f = open(self.filename, 'rt') - for line in f: - if line.startswith('import'): - saw_import = True - continue - path = line.rstrip() - self.paths.append(path) - if not path.strip() or path.strip().startswith('#'): - continue - # skip non-existent paths, in case somebody deleted a package - # manually, and duplicate paths as well - path = self.paths[-1] = normalize_path( - os.path.join(self.basedir, path) - ) - if not os.path.exists(path) or path in seen: - self.paths.pop() # skip it - self.dirty = True # we cleaned up, so we're dirty now :) - continue - seen[path] = 1 - f.close() - - if self.paths and not saw_import: - self.dirty = True # ensure anything we touch has import wrappers - while self.paths and not self.paths[-1].strip(): - self.paths.pop() - - def save(self): - """Write changed .pth file back to disk""" - if not self.dirty: - return - - data = '\n'.join(map(self.make_relative, self.paths)) - if data: - log.debug("Saving %s", self.filename) - data = ( - "import sys; sys.__plen = len(sys.path)\n" - "%s\n" - "import sys; new=sys.path[sys.__plen:];" - " del sys.path[sys.__plen:];" - " p=getattr(sys,'__egginsert',0); sys.path[p:p]=new;" - " sys.__egginsert = p+len(new)\n" - ) % data - - if os.path.islink(self.filename): - os.unlink(self.filename) - f = open(self.filename, 'wt') - f.write(data) - f.close() - - elif os.path.exists(self.filename): - log.debug("Deleting empty %s", self.filename) - os.unlink(self.filename) - - self.dirty = False - - def add(self, dist): - """Add `dist` to the distribution map""" - new_path = ( - dist.location not in self.paths and ( - dist.location not in self.sitedirs or - # account for '.' being in PYTHONPATH - dist.location == os.getcwd() - ) - ) - if new_path: - self.paths.append(dist.location) - self.dirty = True - Environment.add(self, dist) - - def remove(self, dist): - """Remove `dist` from the distribution map""" - while dist.location in self.paths: - self.paths.remove(dist.location) - self.dirty = True - Environment.remove(self, dist) - - def make_relative(self, path): - npath, last = os.path.split(normalize_path(path)) - baselen = len(self.basedir) - parts = [last] - sep = os.altsep == '/' and '/' or os.sep - while len(npath) >= baselen: - if npath == self.basedir: - parts.append(os.curdir) - parts.reverse() - return sep.join(parts) - npath, last = os.path.split(npath) - parts.append(last) - else: - return path - - -def _first_line_re(): - """ - Return a regular expression based on first_line_re suitable for matching - strings. - """ - if isinstance(first_line_re.pattern, str): - return first_line_re - - # first_line_re in Python >=3.1.4 and >=3.2.1 is a bytes pattern. - return re.compile(first_line_re.pattern.decode()) - - -def auto_chmod(func, arg, exc): - if func is os.remove and IS_WINDOWS: - chmod(arg, stat.S_IWRITE) - return func(arg) - et, ev, _ = sys.exc_info() - reraise(et, (ev[0], ev[1] + (" %s %s" % (func, arg)))) - - -def update_dist_caches(dist_path, fix_zipimporter_caches): - """ - Fix any globally cached `dist_path` related data - - `dist_path` should be a path of a newly installed egg distribution (zipped - or unzipped). - - sys.path_importer_cache contains finder objects that have been cached when - importing data from the original distribution. Any such finders need to be - cleared since the replacement distribution might be packaged differently, - e.g. a zipped egg distribution might get replaced with an unzipped egg - folder or vice versa. Having the old finders cached may then cause Python - to attempt loading modules from the replacement distribution using an - incorrect loader. - - zipimport.zipimporter objects are Python loaders charged with importing - data packaged inside zip archives. If stale loaders referencing the - original distribution, are left behind, they can fail to load modules from - the replacement distribution. E.g. if an old zipimport.zipimporter instance - is used to load data from a new zipped egg archive, it may cause the - operation to attempt to locate the requested data in the wrong location - - one indicated by the original distribution's zip archive directory - information. Such an operation may then fail outright, e.g. report having - read a 'bad local file header', or even worse, it may fail silently & - return invalid data. - - zipimport._zip_directory_cache contains cached zip archive directory - information for all existing zipimport.zipimporter instances and all such - instances connected to the same archive share the same cached directory - information. - - If asked, and the underlying Python implementation allows it, we can fix - all existing zipimport.zipimporter instances instead of having to track - them down and remove them one by one, by updating their shared cached zip - archive directory information. This, of course, assumes that the - replacement distribution is packaged as a zipped egg. - - If not asked to fix existing zipimport.zipimporter instances, we still do - our best to clear any remaining zipimport.zipimporter related cached data - that might somehow later get used when attempting to load data from the new - distribution and thus cause such load operations to fail. Note that when - tracking down such remaining stale data, we can not catch every conceivable - usage from here, and we clear only those that we know of and have found to - cause problems if left alive. Any remaining caches should be updated by - whomever is in charge of maintaining them, i.e. they should be ready to - handle us replacing their zip archives with new distributions at runtime. - - """ - # There are several other known sources of stale zipimport.zipimporter - # instances that we do not clear here, but might if ever given a reason to - # do so: - # * Global setuptools pkg_resources.working_set (a.k.a. 'master working - # set') may contain distributions which may in turn contain their - # zipimport.zipimporter loaders. - # * Several zipimport.zipimporter loaders held by local variables further - # up the function call stack when running the setuptools installation. - # * Already loaded modules may have their __loader__ attribute set to the - # exact loader instance used when importing them. Python 3.4 docs state - # that this information is intended mostly for introspection and so is - # not expected to cause us problems. - normalized_path = normalize_path(dist_path) - _uncache(normalized_path, sys.path_importer_cache) - if fix_zipimporter_caches: - _replace_zip_directory_cache_data(normalized_path) - else: - # Here, even though we do not want to fix existing and now stale - # zipimporter cache information, we still want to remove it. Related to - # Python's zip archive directory information cache, we clear each of - # its stale entries in two phases: - # 1. Clear the entry so attempting to access zip archive information - # via any existing stale zipimport.zipimporter instances fails. - # 2. Remove the entry from the cache so any newly constructed - # zipimport.zipimporter instances do not end up using old stale - # zip archive directory information. - # This whole stale data removal step does not seem strictly necessary, - # but has been left in because it was done before we started replacing - # the zip archive directory information cache content if possible, and - # there are no relevant unit tests that we can depend on to tell us if - # this is really needed. - _remove_and_clear_zip_directory_cache_data(normalized_path) - - -def _collect_zipimporter_cache_entries(normalized_path, cache): - """ - Return zipimporter cache entry keys related to a given normalized path. - - Alternative path spellings (e.g. those using different character case or - those using alternative path separators) related to the same path are - included. Any sub-path entries are included as well, i.e. those - corresponding to zip archives embedded in other zip archives. - - """ - result = [] - prefix_len = len(normalized_path) - for p in cache: - np = normalize_path(p) - if (np.startswith(normalized_path) and - np[prefix_len:prefix_len + 1] in (os.sep, '')): - result.append(p) - return result - - -def _update_zipimporter_cache(normalized_path, cache, updater=None): - """ - Update zipimporter cache data for a given normalized path. - - Any sub-path entries are processed as well, i.e. those corresponding to zip - archives embedded in other zip archives. - - Given updater is a callable taking a cache entry key and the original entry - (after already removing the entry from the cache), and expected to update - the entry and possibly return a new one to be inserted in its place. - Returning None indicates that the entry should not be replaced with a new - one. If no updater is given, the cache entries are simply removed without - any additional processing, the same as if the updater simply returned None. - - """ - for p in _collect_zipimporter_cache_entries(normalized_path, cache): - # N.B. pypy's custom zipimport._zip_directory_cache implementation does - # not support the complete dict interface: - # * Does not support item assignment, thus not allowing this function - # to be used only for removing existing cache entries. - # * Does not support the dict.pop() method, forcing us to use the - # get/del patterns instead. For more detailed information see the - # following links: - # https://bitbucket.org/pypa/setuptools/issue/202/more-robust-zipimporter-cache-invalidation#comment-10495960 - # https://bitbucket.org/pypy/pypy/src/dd07756a34a41f674c0cacfbc8ae1d4cc9ea2ae4/pypy/module/zipimport/interp_zipimport.py#cl-99 - old_entry = cache[p] - del cache[p] - new_entry = updater and updater(p, old_entry) - if new_entry is not None: - cache[p] = new_entry - - -def _uncache(normalized_path, cache): - _update_zipimporter_cache(normalized_path, cache) - - -def _remove_and_clear_zip_directory_cache_data(normalized_path): - def clear_and_remove_cached_zip_archive_directory_data(path, old_entry): - old_entry.clear() - - _update_zipimporter_cache( - normalized_path, zipimport._zip_directory_cache, - updater=clear_and_remove_cached_zip_archive_directory_data) - -# PyPy Python implementation does not allow directly writing to the -# zipimport._zip_directory_cache and so prevents us from attempting to correct -# its content. The best we can do there is clear the problematic cache content -# and have PyPy repopulate it as needed. The downside is that if there are any -# stale zipimport.zipimporter instances laying around, attempting to use them -# will fail due to not having its zip archive directory information available -# instead of being automatically corrected to use the new correct zip archive -# directory information. -if '__pypy__' in sys.builtin_module_names: - _replace_zip_directory_cache_data = \ - _remove_and_clear_zip_directory_cache_data -else: - def _replace_zip_directory_cache_data(normalized_path): - def replace_cached_zip_archive_directory_data(path, old_entry): - # N.B. In theory, we could load the zip directory information just - # once for all updated path spellings, and then copy it locally and - # update its contained path strings to contain the correct - # spelling, but that seems like a way too invasive move (this cache - # structure is not officially documented anywhere and could in - # theory change with new Python releases) for no significant - # benefit. - old_entry.clear() - zipimport.zipimporter(path) - old_entry.update(zipimport._zip_directory_cache[path]) - return old_entry - - _update_zipimporter_cache( - normalized_path, zipimport._zip_directory_cache, - updater=replace_cached_zip_archive_directory_data) - - -def is_python(text, filename='<string>'): - "Is this string a valid Python script?" - try: - compile(text, filename, 'exec') - except (SyntaxError, TypeError): - return False - else: - return True - - -def is_sh(executable): - """Determine if the specified executable is a .sh (contains a #! line)""" - try: - with io.open(executable, encoding='latin-1') as fp: - magic = fp.read(2) - except (OSError, IOError): - return executable - return magic == '#!' - - -def nt_quote_arg(arg): - """Quote a command line argument according to Windows parsing rules""" - return subprocess.list2cmdline([arg]) - - -def is_python_script(script_text, filename): - """Is this text, as a whole, a Python script? (as opposed to shell/bat/etc. - """ - if filename.endswith('.py') or filename.endswith('.pyw'): - return True # extension says it's Python - if is_python(script_text, filename): - return True # it's syntactically valid Python - if script_text.startswith('#!'): - # It begins with a '#!' line, so check if 'python' is in it somewhere - return 'python' in script_text.splitlines()[0].lower() - - return False # Not any Python I can recognize - - -try: - from os import chmod as _chmod -except ImportError: - # Jython compatibility - def _chmod(*args): - pass - - -def chmod(path, mode): - log.debug("changing mode of %s to %o", path, mode) - try: - _chmod(path, mode) - except os.error as e: - log.debug("chmod failed: %s", e) - - -def fix_jython_executable(executable, options): - warnings.warn("Use JythonCommandSpec", DeprecationWarning, stacklevel=2) - - if not JythonCommandSpec.relevant(): - return executable - - cmd = CommandSpec.best().from_param(executable) - cmd.install_options(options) - return cmd.as_header().lstrip('#!').rstrip('\n') - - -class CommandSpec(list): - """ - A command spec for a #! header, specified as a list of arguments akin to - those passed to Popen. - """ - - options = [] - split_args = dict() - - @classmethod - def best(cls): - """ - Choose the best CommandSpec class based on environmental conditions. - """ - return cls if not JythonCommandSpec.relevant() else JythonCommandSpec - - @classmethod - def _sys_executable(cls): - _default = os.path.normpath(sys.executable) - return os.environ.get('__PYVENV_LAUNCHER__', _default) - - @classmethod - def from_param(cls, param): - """ - Construct a CommandSpec from a parameter to build_scripts, which may - be None. - """ - if isinstance(param, cls): - return param - if isinstance(param, list): - return cls(param) - if param is None: - return cls.from_environment() - # otherwise, assume it's a string. - return cls.from_string(param) - - @classmethod - def from_environment(cls): - return cls([cls._sys_executable()]) - - @classmethod - def from_string(cls, string): - """ - Construct a command spec from a simple string representing a command - line parseable by shlex.split. - """ - items = shlex.split(string, **cls.split_args) - return cls(items) - - def install_options(self, script_text): - self.options = shlex.split(self._extract_options(script_text)) - cmdline = subprocess.list2cmdline(self) - if not isascii(cmdline): - self.options[:0] = ['-x'] - - @staticmethod - def _extract_options(orig_script): - """ - Extract any options from the first line of the script. - """ - first = (orig_script + '\n').splitlines()[0] - match = _first_line_re().match(first) - options = match.group(1) or '' if match else '' - return options.strip() - - def as_header(self): - return self._render(self + list(self.options)) - - @staticmethod - def _render(items): - cmdline = subprocess.list2cmdline(items) - return '#!' + cmdline + '\n' - -# For pbr compat; will be removed in a future version. -sys_executable = CommandSpec._sys_executable() - - -class WindowsCommandSpec(CommandSpec): - split_args = dict(posix=False) - - -class JythonCommandSpec(CommandSpec): - @classmethod - def relevant(cls): - return ( - sys.platform.startswith('java') - and - __import__('java').lang.System.getProperty('os.name') != 'Linux' - ) - - @classmethod - def from_string(cls, string): - return cls([string]) - - def as_header(self): - """ - Workaround Jython's sys.executable being a .sh (an invalid - shebang line interpreter) - """ - if not is_sh(self[0]): - return super(JythonCommandSpec, self).as_header() - - if self.options: - # Can't apply the workaround, leave it broken - log.warn( - "WARNING: Unable to adapt shebang line for Jython," - " the following script is NOT executable\n" - " see http://bugs.jython.org/issue1112 for" - " more information.") - return super(JythonCommandSpec, self).as_header() - - items = ['/usr/bin/env'] + self + list(self.options) - return self._render(items) - - -class ScriptWriter(object): - """ - Encapsulates behavior around writing entry point scripts for console and - gui apps. - """ - - template = textwrap.dedent(""" - # EASY-INSTALL-ENTRY-SCRIPT: %(spec)r,%(group)r,%(name)r - __requires__ = %(spec)r - import sys - from pkg_resources import load_entry_point - - if __name__ == '__main__': - sys.exit( - load_entry_point(%(spec)r, %(group)r, %(name)r)() - ) - """).lstrip() - - command_spec_class = CommandSpec - - @classmethod - def get_script_args(cls, dist, executable=None, wininst=False): - # for backward compatibility - warnings.warn("Use get_args", DeprecationWarning) - writer = (WindowsScriptWriter if wininst else ScriptWriter).best() - header = cls.get_script_header("", executable, wininst) - return writer.get_args(dist, header) - - @classmethod - def get_script_header(cls, script_text, executable=None, wininst=False): - # for backward compatibility - warnings.warn("Use get_header", DeprecationWarning) - if wininst: - executable = "python.exe" - cmd = cls.command_spec_class.best().from_param(executable) - cmd.install_options(script_text) - return cmd.as_header() - - @classmethod - def get_args(cls, dist, header=None): - """ - Yield write_script() argument tuples for a distribution's entrypoints - """ - if header is None: - header = cls.get_header() - spec = str(dist.as_requirement()) - for type_ in 'console', 'gui': - group = type_ + '_scripts' - for name, ep in dist.get_entry_map(group).items(): - script_text = cls.template % locals() - for res in cls._get_script_args(type_, name, header, - script_text): - yield res - - @classmethod - def get_writer(cls, force_windows): - # for backward compatibility - warnings.warn("Use best", DeprecationWarning) - return WindowsScriptWriter.best() if force_windows else cls.best() - - @classmethod - def best(cls): - """ - Select the best ScriptWriter for this environment. - """ - return WindowsScriptWriter.best() if IS_WINDOWS else cls - - @classmethod - def _get_script_args(cls, type_, name, header, script_text): - # Simply write the stub with no extension. - yield (name, header + script_text) - - @classmethod - def get_header(cls, script_text="", executable=None): - """Create a #! line, getting options (if any) from script_text""" - cmd = cls.command_spec_class.best().from_param(executable) - cmd.install_options(script_text) - return cmd.as_header() - - -class WindowsScriptWriter(ScriptWriter): - command_spec_class = WindowsCommandSpec - - @classmethod - def get_writer(cls): - # for backward compatibility - warnings.warn("Use best", DeprecationWarning) - return cls.best() - - @classmethod - def best(cls): - """ - Select the best ScriptWriter suitable for Windows - """ - writer_lookup = dict( - executable=WindowsExecutableLauncherWriter, - natural=cls, - ) - # for compatibility, use the executable launcher by default - launcher = os.environ.get('SETUPTOOLS_LAUNCHER', 'executable') - return writer_lookup[launcher] - - @classmethod - def _get_script_args(cls, type_, name, header, script_text): - "For Windows, add a .py extension" - ext = dict(console='.pya', gui='.pyw')[type_] - if ext not in os.environ['PATHEXT'].lower().split(';'): - warnings.warn("%s not listed in PATHEXT; scripts will not be " - "recognized as executables." % ext, UserWarning) - old = ['.pya', '.py', '-script.py', '.pyc', '.pyo', '.pyw', '.exe'] - old.remove(ext) - header = cls._adjust_header(type_, header) - blockers = [name + x for x in old] - yield name + ext, header + script_text, 't', blockers - - @staticmethod - def _adjust_header(type_, orig_header): - """ - Make sure 'pythonw' is used for gui and and 'python' is used for - console (regardless of what sys.executable is). - """ - pattern = 'pythonw.exe' - repl = 'python.exe' - if type_ == 'gui': - pattern, repl = repl, pattern - pattern_ob = re.compile(re.escape(pattern), re.IGNORECASE) - new_header = pattern_ob.sub(string=orig_header, repl=repl) - clean_header = new_header[2:-1].strip('"') - if sys.platform == 'win32' and not os.path.exists(clean_header): - # the adjusted version doesn't exist, so return the original - return orig_header - return new_header - - -class WindowsExecutableLauncherWriter(WindowsScriptWriter): - @classmethod - def _get_script_args(cls, type_, name, header, script_text): - """ - For Windows, add a .py extension and an .exe launcher - """ - if type_ == 'gui': - launcher_type = 'gui' - ext = '-script.pyw' - old = ['.pyw'] - else: - launcher_type = 'cli' - ext = '-script.py' - old = ['.py', '.pyc', '.pyo'] - hdr = cls._adjust_header(type_, header) - blockers = [name + x for x in old] - yield (name + ext, hdr + script_text, 't', blockers) - yield ( - name + '.exe', get_win_launcher(launcher_type), - 'b' # write in binary mode - ) - if not is_64bit(): - # install a manifest for the launcher to prevent Windows - # from detecting it as an installer (which it will for - # launchers like easy_install.exe). Consider only - # adding a manifest for launchers detected as installers. - # See Distribute #143 for details. - m_name = name + '.exe.manifest' - yield (m_name, load_launcher_manifest(name), 't') - - -# for backward-compatibility -get_script_args = ScriptWriter.get_script_args -get_script_header = ScriptWriter.get_script_header - - -def get_win_launcher(type): - """ - Load the Windows launcher (executable) suitable for launching a script. - - `type` should be either 'cli' or 'gui' - - Returns the executable as a byte string. - """ - launcher_fn = '%s.exe' % type - if platform.machine().lower() == 'arm': - launcher_fn = launcher_fn.replace(".", "-arm.") - if is_64bit(): - launcher_fn = launcher_fn.replace(".", "-64.") - else: - launcher_fn = launcher_fn.replace(".", "-32.") - return resource_string('setuptools', launcher_fn) - - -def load_launcher_manifest(name): - manifest = pkg_resources.resource_string(__name__, 'launcher manifest.xml') - if PY2: - return manifest % vars() - else: - return manifest.decode('utf-8') % vars() - - -def rmtree(path, ignore_errors=False, onerror=auto_chmod): - """Recursively delete a directory tree. - - This code is taken from the Python 2.4 version of 'shutil', because - the 2.3 version doesn't really work right. - """ - if ignore_errors: - def onerror(*args): - pass - elif onerror is None: - def onerror(*args): - raise - names = [] - try: - names = os.listdir(path) - except os.error: - onerror(os.listdir, path, sys.exc_info()) - for name in names: - fullname = os.path.join(path, name) - try: - mode = os.lstat(fullname).st_mode - except os.error: - mode = 0 - if stat.S_ISDIR(mode): - rmtree(fullname, ignore_errors, onerror) - else: - try: - os.remove(fullname) - except os.error: - onerror(os.remove, fullname, sys.exc_info()) - try: - os.rmdir(path) - except os.error: - onerror(os.rmdir, path, sys.exc_info()) - - -def current_umask(): - tmp = os.umask(0o022) - os.umask(tmp) - return tmp - - -def bootstrap(): - # This function is called when setuptools*.egg is run using /bin/sh - import setuptools - - argv0 = os.path.dirname(setuptools.__path__[0]) - sys.argv[0] = argv0 - sys.argv.append(argv0) - main() - - -def main(argv=None, **kw): - from setuptools import setup - from setuptools.dist import Distribution - - class DistributionWithoutHelpCommands(Distribution): - common_usage = "" - - def _show_help(self, *args, **kw): - with _patch_usage(): - Distribution._show_help(self, *args, **kw) - - if argv is None: - argv = sys.argv[1:] - - with _patch_usage(): - setup( - script_args=['-q', 'easy_install', '-v'] + argv, - script_name=sys.argv[0] or 'easy_install', - distclass=DistributionWithoutHelpCommands, **kw - ) - - -@contextlib.contextmanager -def _patch_usage(): - import distutils.core - USAGE = textwrap.dedent(""" - usage: %(script)s [options] requirement_or_url ... - or: %(script)s --help - """).lstrip() - - def gen_usage(script_name): - return USAGE % dict( - script=os.path.basename(script_name), - ) - - saved = distutils.core.gen_usage - distutils.core.gen_usage = gen_usage - try: - yield - finally: - distutils.core.gen_usage = saved diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/egg_info.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/egg_info.py deleted file mode 100644 index 50f3d5c..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/egg_info.py +++ /dev/null @@ -1,480 +0,0 @@ -"""setuptools.command.egg_info - -Create a distribution's .egg-info directory and contents""" - -from distutils.filelist import FileList as _FileList -from distutils.util import convert_path -from distutils import log -import distutils.errors -import distutils.filelist -import os -import re -import sys - -try: - from setuptools_svn import svn_utils -except ImportError: - pass - -from setuptools import Command -from setuptools.command.sdist import sdist -from setuptools.compat import basestring, PY3, StringIO -from setuptools.command.sdist import walk_revctrl -from pkg_resources import ( - parse_requirements, safe_name, parse_version, - safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename) -import setuptools.unicode_utils as unicode_utils - -from pkg_resources import packaging - -class egg_info(Command): - description = "create a distribution's .egg-info directory" - - user_options = [ - ('egg-base=', 'e', "directory containing .egg-info directories" - " (default: top of the source tree)"), - ('tag-svn-revision', 'r', - "Add subversion revision ID to version number"), - ('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"), - ('tag-build=', 'b', "Specify explicit tag to add to version number"), - ('no-svn-revision', 'R', - "Don't add subversion revision ID [default]"), - ('no-date', 'D', "Don't include date stamp [default]"), - ] - - boolean_options = ['tag-date', 'tag-svn-revision'] - negative_opt = {'no-svn-revision': 'tag-svn-revision', - 'no-date': 'tag-date'} - - def initialize_options(self): - self.egg_name = None - self.egg_version = None - self.egg_base = None - self.egg_info = None - self.tag_build = None - self.tag_svn_revision = 0 - self.tag_date = 0 - self.broken_egg_info = False - self.vtags = None - - def save_version_info(self, filename): - from setuptools.command.setopt import edit_config - - values = dict( - egg_info=dict( - tag_svn_revision=0, - tag_date=0, - tag_build=self.tags(), - ) - ) - edit_config(filename, values) - - def finalize_options(self): - self.egg_name = safe_name(self.distribution.get_name()) - self.vtags = self.tags() - self.egg_version = self.tagged_version() - - parsed_version = parse_version(self.egg_version) - - try: - is_version = isinstance(parsed_version, packaging.version.Version) - spec = ( - "%s==%s" if is_version else "%s===%s" - ) - list( - parse_requirements(spec % (self.egg_name, self.egg_version)) - ) - except ValueError: - raise distutils.errors.DistutilsOptionError( - "Invalid distribution name or version syntax: %s-%s" % - (self.egg_name, self.egg_version) - ) - - if self.egg_base is None: - dirs = self.distribution.package_dir - self.egg_base = (dirs or {}).get('', os.curdir) - - self.ensure_dirname('egg_base') - self.egg_info = to_filename(self.egg_name) + '.egg-info' - if self.egg_base != os.curdir: - self.egg_info = os.path.join(self.egg_base, self.egg_info) - if '-' in self.egg_name: - self.check_broken_egg_info() - - # Set package version for the benefit of dumber commands - # (e.g. sdist, bdist_wininst, etc.) - # - self.distribution.metadata.version = self.egg_version - - # If we bootstrapped around the lack of a PKG-INFO, as might be the - # case in a fresh checkout, make sure that any special tags get added - # to the version info - # - pd = self.distribution._patched_dist - if pd is not None and pd.key == self.egg_name.lower(): - pd._version = self.egg_version - pd._parsed_version = parse_version(self.egg_version) - self.distribution._patched_dist = None - - def write_or_delete_file(self, what, filename, data, force=False): - """Write `data` to `filename` or delete if empty - - If `data` is non-empty, this routine is the same as ``write_file()``. - If `data` is empty but not ``None``, this is the same as calling - ``delete_file(filename)`. If `data` is ``None``, then this is a no-op - unless `filename` exists, in which case a warning is issued about the - orphaned file (if `force` is false), or deleted (if `force` is true). - """ - if data: - self.write_file(what, filename, data) - elif os.path.exists(filename): - if data is None and not force: - log.warn( - "%s not set in setup(), but %s exists", what, filename - ) - return - else: - self.delete_file(filename) - - def write_file(self, what, filename, data): - """Write `data` to `filename` (if not a dry run) after announcing it - - `what` is used in a log message to identify what is being written - to the file. - """ - log.info("writing %s to %s", what, filename) - if PY3: - data = data.encode("utf-8") - if not self.dry_run: - f = open(filename, 'wb') - f.write(data) - f.close() - - def delete_file(self, filename): - """Delete `filename` (if not a dry run) after announcing it""" - log.info("deleting %s", filename) - if not self.dry_run: - os.unlink(filename) - - def tagged_version(self): - version = self.distribution.get_version() - # egg_info may be called more than once for a distribution, - # in which case the version string already contains all tags. - if self.vtags and version.endswith(self.vtags): - return safe_version(version) - return safe_version(version + self.vtags) - - def run(self): - self.mkpath(self.egg_info) - installer = self.distribution.fetch_build_egg - for ep in iter_entry_points('egg_info.writers'): - ep.require(installer=installer) - writer = ep.resolve() - writer(self, ep.name, os.path.join(self.egg_info, ep.name)) - - # Get rid of native_libs.txt if it was put there by older bdist_egg - nl = os.path.join(self.egg_info, "native_libs.txt") - if os.path.exists(nl): - self.delete_file(nl) - - self.find_sources() - - def tags(self): - version = '' - if self.tag_build: - version += self.tag_build - if self.tag_svn_revision: - rev = self.get_svn_revision() - if rev: # is 0 if it's not an svn working copy - version += '-r%s' % rev - if self.tag_date: - import time - - version += time.strftime("-%Y%m%d") - return version - - @staticmethod - def get_svn_revision(): - if 'svn_utils' not in globals(): - return "0" - return str(svn_utils.SvnInfo.load(os.curdir).get_revision()) - - def find_sources(self): - """Generate SOURCES.txt manifest file""" - manifest_filename = os.path.join(self.egg_info, "SOURCES.txt") - mm = manifest_maker(self.distribution) - mm.manifest = manifest_filename - mm.run() - self.filelist = mm.filelist - - def check_broken_egg_info(self): - bei = self.egg_name + '.egg-info' - if self.egg_base != os.curdir: - bei = os.path.join(self.egg_base, bei) - if os.path.exists(bei): - log.warn( - "-" * 78 + '\n' - "Note: Your current .egg-info directory has a '-' in its name;" - '\nthis will not work correctly with "setup.py develop".\n\n' - 'Please rename %s to %s to correct this problem.\n' + '-' * 78, - bei, self.egg_info - ) - self.broken_egg_info = self.egg_info - self.egg_info = bei # make it work for now - - -class FileList(_FileList): - """File list that accepts only existing, platform-independent paths""" - - def append(self, item): - if item.endswith('\r'): # Fix older sdists built on Windows - item = item[:-1] - path = convert_path(item) - - if self._safe_path(path): - self.files.append(path) - - def extend(self, paths): - self.files.extend(filter(self._safe_path, paths)) - - def _repair(self): - """ - Replace self.files with only safe paths - - Because some owners of FileList manipulate the underlying - ``files`` attribute directly, this method must be called to - repair those paths. - """ - self.files = list(filter(self._safe_path, self.files)) - - def _safe_path(self, path): - enc_warn = "'%s' not %s encodable -- skipping" - - # To avoid accidental trans-codings errors, first to unicode - u_path = unicode_utils.filesys_decode(path) - if u_path is None: - log.warn("'%s' in unexpected encoding -- skipping" % path) - return False - - # Must ensure utf-8 encodability - utf8_path = unicode_utils.try_encode(u_path, "utf-8") - if utf8_path is None: - log.warn(enc_warn, path, 'utf-8') - return False - - try: - # accept is either way checks out - if os.path.exists(u_path) or os.path.exists(utf8_path): - return True - # this will catch any encode errors decoding u_path - except UnicodeEncodeError: - log.warn(enc_warn, path, sys.getfilesystemencoding()) - - -class manifest_maker(sdist): - template = "MANIFEST.in" - - def initialize_options(self): - self.use_defaults = 1 - self.prune = 1 - self.manifest_only = 1 - self.force_manifest = 1 - - def finalize_options(self): - pass - - def run(self): - self.filelist = FileList() - if not os.path.exists(self.manifest): - self.write_manifest() # it must exist so it'll get in the list - self.filelist.findall() - self.add_defaults() - if os.path.exists(self.template): - self.read_template() - self.prune_file_list() - self.filelist.sort() - self.filelist.remove_duplicates() - self.write_manifest() - - def _manifest_normalize(self, path): - path = unicode_utils.filesys_decode(path) - return path.replace(os.sep, '/') - - def write_manifest(self): - """ - Write the file list in 'self.filelist' to the manifest file - named by 'self.manifest'. - """ - self.filelist._repair() - - # Now _repairs should encodability, but not unicode - files = [self._manifest_normalize(f) for f in self.filelist.files] - msg = "writing manifest file '%s'" % self.manifest - self.execute(write_file, (self.manifest, files), msg) - - def warn(self, msg): # suppress missing-file warnings from sdist - if not msg.startswith("standard file not found:"): - sdist.warn(self, msg) - - def add_defaults(self): - sdist.add_defaults(self) - self.filelist.append(self.template) - self.filelist.append(self.manifest) - rcfiles = list(walk_revctrl()) - if rcfiles: - self.filelist.extend(rcfiles) - elif os.path.exists(self.manifest): - self.read_manifest() - ei_cmd = self.get_finalized_command('egg_info') - self._add_egg_info(cmd=ei_cmd) - self.filelist.include_pattern("*", prefix=ei_cmd.egg_info) - - def _add_egg_info(self, cmd): - """ - Add paths for egg-info files for an external egg-base. - - The egg-info files are written to egg-base. If egg-base is - outside the current working directory, this method - searchs the egg-base directory for files to include - in the manifest. Uses distutils.filelist.findall (which is - really the version monkeypatched in by setuptools/__init__.py) - to perform the search. - - Since findall records relative paths, prefix the returned - paths with cmd.egg_base, so add_default's include_pattern call - (which is looking for the absolute cmd.egg_info) will match - them. - """ - if cmd.egg_base == os.curdir: - # egg-info files were already added by something else - return - - discovered = distutils.filelist.findall(cmd.egg_base) - resolved = (os.path.join(cmd.egg_base, path) for path in discovered) - self.filelist.allfiles.extend(resolved) - - def prune_file_list(self): - build = self.get_finalized_command('build') - base_dir = self.distribution.get_fullname() - self.filelist.exclude_pattern(None, prefix=build.build_base) - self.filelist.exclude_pattern(None, prefix=base_dir) - sep = re.escape(os.sep) - self.filelist.exclude_pattern(r'(^|' + sep + r')(RCS|CVS|\.svn)' + sep, - is_regex=1) - - -def write_file(filename, contents): - """Create a file with the specified name and write 'contents' (a - sequence of strings without line terminators) to it. - """ - contents = "\n".join(contents) - - # assuming the contents has been vetted for utf-8 encoding - contents = contents.encode("utf-8") - - with open(filename, "wb") as f: # always write POSIX-style manifest - f.write(contents) - - -def write_pkg_info(cmd, basename, filename): - log.info("writing %s", filename) - if not cmd.dry_run: - metadata = cmd.distribution.metadata - metadata.version, oldver = cmd.egg_version, metadata.version - metadata.name, oldname = cmd.egg_name, metadata.name - try: - # write unescaped data to PKG-INFO, so older pkg_resources - # can still parse it - metadata.write_pkg_info(cmd.egg_info) - finally: - metadata.name, metadata.version = oldname, oldver - - safe = getattr(cmd.distribution, 'zip_safe', None) - from setuptools.command import bdist_egg - - bdist_egg.write_safety_flag(cmd.egg_info, safe) - - -def warn_depends_obsolete(cmd, basename, filename): - if os.path.exists(filename): - log.warn( - "WARNING: 'depends.txt' is not used by setuptools 0.6!\n" - "Use the install_requires/extras_require setup() args instead." - ) - - -def _write_requirements(stream, reqs): - lines = yield_lines(reqs or ()) - append_cr = lambda line: line + '\n' - lines = map(append_cr, lines) - stream.writelines(lines) - - -def write_requirements(cmd, basename, filename): - dist = cmd.distribution - data = StringIO() - _write_requirements(data, dist.install_requires) - extras_require = dist.extras_require or {} - for extra in sorted(extras_require): - data.write('\n[{extra}]\n'.format(**vars())) - _write_requirements(data, extras_require[extra]) - cmd.write_or_delete_file("requirements", filename, data.getvalue()) - - -def write_setup_requirements(cmd, basename, filename): - data = StringIO() - _write_requirements(data, cmd.distribution.setup_requires) - cmd.write_or_delete_file("setup-requirements", filename, data.getvalue()) - - -def write_toplevel_names(cmd, basename, filename): - pkgs = dict.fromkeys( - [ - k.split('.', 1)[0] - for k in cmd.distribution.iter_distribution_names() - ] - ) - cmd.write_file("top-level names", filename, '\n'.join(sorted(pkgs)) + '\n') - - -def overwrite_arg(cmd, basename, filename): - write_arg(cmd, basename, filename, True) - - -def write_arg(cmd, basename, filename, force=False): - argname = os.path.splitext(basename)[0] - value = getattr(cmd.distribution, argname, None) - if value is not None: - value = '\n'.join(value) + '\n' - cmd.write_or_delete_file(argname, filename, value, force) - - -def write_entries(cmd, basename, filename): - ep = cmd.distribution.entry_points - - if isinstance(ep, basestring) or ep is None: - data = ep - elif ep is not None: - data = [] - for section, contents in sorted(ep.items()): - if not isinstance(contents, basestring): - contents = EntryPoint.parse_group(section, contents) - contents = '\n'.join(sorted(map(str, contents.values()))) - data.append('[%s]\n%s\n\n' % (section, contents)) - data = ''.join(data) - - cmd.write_or_delete_file('entry points', filename, data, True) - - -def get_pkg_info_revision(): - # See if we can get a -r### off of PKG-INFO, in case this is an sdist of - # a subversion revision - # - if os.path.exists('PKG-INFO'): - f = open('PKG-INFO', 'rU') - for line in f: - match = re.match(r"Version:.*-r(\d+)\s*$", line) - if match: - return int(match.group(1)) - f.close() - return 0 diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/install.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/install.py deleted file mode 100644 index d2bca2e..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/install.py +++ /dev/null @@ -1,125 +0,0 @@ -from distutils.errors import DistutilsArgError -import inspect -import glob -import warnings -import platform -import distutils.command.install as orig - -import setuptools - -# Prior to numpy 1.9, NumPy relies on the '_install' name, so provide it for -# now. See https://bitbucket.org/pypa/setuptools/issue/199/ -_install = orig.install - - -class install(orig.install): - """Use easy_install to install the package, w/dependencies""" - - user_options = orig.install.user_options + [ - ('old-and-unmanageable', None, "Try not to use this!"), - ('single-version-externally-managed', None, - "used by system package builders to create 'flat' eggs"), - ] - boolean_options = orig.install.boolean_options + [ - 'old-and-unmanageable', 'single-version-externally-managed', - ] - new_commands = [ - ('install_egg_info', lambda self: True), - ('install_scripts', lambda self: True), - ] - _nc = dict(new_commands) - - def initialize_options(self): - orig.install.initialize_options(self) - self.old_and_unmanageable = None - self.single_version_externally_managed = None - - def finalize_options(self): - orig.install.finalize_options(self) - if self.root: - self.single_version_externally_managed = True - elif self.single_version_externally_managed: - if not self.root and not self.record: - raise DistutilsArgError( - "You must specify --record or --root when building system" - " packages" - ) - - def handle_extra_path(self): - if self.root or self.single_version_externally_managed: - # explicit backward-compatibility mode, allow extra_path to work - return orig.install.handle_extra_path(self) - - # Ignore extra_path when installing an egg (or being run by another - # command without --root or --single-version-externally-managed - self.path_file = None - self.extra_dirs = '' - - def run(self): - # Explicit request for old-style install? Just do it - if self.old_and_unmanageable or self.single_version_externally_managed: - return orig.install.run(self) - - if not self._called_from_setup(inspect.currentframe()): - # Run in backward-compatibility mode to support bdist_* commands. - orig.install.run(self) - else: - self.do_egg_install() - - @staticmethod - def _called_from_setup(run_frame): - """ - Attempt to detect whether run() was called from setup() or by another - command. If called by setup(), the parent caller will be the - 'run_command' method in 'distutils.dist', and *its* caller will be - the 'run_commands' method. If called any other way, the - immediate caller *might* be 'run_command', but it won't have been - called by 'run_commands'. Return True in that case or if a call stack - is unavailable. Return False otherwise. - """ - if run_frame is None: - msg = "Call stack not available. bdist_* commands may fail." - warnings.warn(msg) - if platform.python_implementation() == 'IronPython': - msg = "For best results, pass -X:Frames to enable call stack." - warnings.warn(msg) - return True - res = inspect.getouterframes(run_frame)[2] - caller, = res[:1] - info = inspect.getframeinfo(caller) - caller_module = caller.f_globals.get('__name__', '') - return ( - caller_module == 'distutils.dist' - and info.function == 'run_commands' - ) - - def do_egg_install(self): - - easy_install = self.distribution.get_command_class('easy_install') - - cmd = easy_install( - self.distribution, args="x", root=self.root, record=self.record, - ) - cmd.ensure_finalized() # finalize before bdist_egg munges install cmd - cmd.always_copy_from = '.' # make sure local-dir eggs get installed - - # pick up setup-dir .egg files only: no .egg-info - cmd.package_index.scan(glob.glob('*.egg')) - - self.run_command('bdist_egg') - args = [self.distribution.get_command_obj('bdist_egg').egg_output] - - if setuptools.bootstrap_install_from: - # Bootstrap self-installation of setuptools - args.insert(0, setuptools.bootstrap_install_from) - - cmd.args = args - cmd.run() - setuptools.bootstrap_install_from = None - - -# XXX Python 3.1 doesn't see _nc if this is inside the class -install.sub_commands = ( - [cmd for cmd in orig.install.sub_commands if cmd[0] not in install._nc] + - install.new_commands -) diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/install_egg_info.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/install_egg_info.py deleted file mode 100644 index fd0f118..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/install_egg_info.py +++ /dev/null @@ -1,116 +0,0 @@ -from distutils import log, dir_util -import os - -from setuptools import Command -from setuptools.archive_util import unpack_archive -import pkg_resources - - -class install_egg_info(Command): - """Install an .egg-info directory for the package""" - - description = "Install an .egg-info directory for the package" - - user_options = [ - ('install-dir=', 'd', "directory to install to"), - ] - - def initialize_options(self): - self.install_dir = None - - def finalize_options(self): - self.set_undefined_options('install_lib', - ('install_dir', 'install_dir')) - ei_cmd = self.get_finalized_command("egg_info") - basename = pkg_resources.Distribution( - None, None, ei_cmd.egg_name, ei_cmd.egg_version - ).egg_name() + '.egg-info' - self.source = ei_cmd.egg_info - self.target = os.path.join(self.install_dir, basename) - self.outputs = [self.target] - - def run(self): - self.run_command('egg_info') - if os.path.isdir(self.target) and not os.path.islink(self.target): - dir_util.remove_tree(self.target, dry_run=self.dry_run) - elif os.path.exists(self.target): - self.execute(os.unlink, (self.target,), "Removing " + self.target) - if not self.dry_run: - pkg_resources.ensure_directory(self.target) - self.execute( - self.copytree, (), "Copying %s to %s" % (self.source, self.target) - ) - self.install_namespaces() - - def get_outputs(self): - return self.outputs - - def copytree(self): - # Copy the .egg-info tree to site-packages - def skimmer(src, dst): - # filter out source-control directories; note that 'src' is always - # a '/'-separated path, regardless of platform. 'dst' is a - # platform-specific path. - for skip in '.svn/', 'CVS/': - if src.startswith(skip) or '/' + skip in src: - return None - self.outputs.append(dst) - log.debug("Copying %s to %s", src, dst) - return dst - - unpack_archive(self.source, self.target, skimmer) - - def install_namespaces(self): - nsp = self._get_all_ns_packages() - if not nsp: - return - filename, ext = os.path.splitext(self.target) - filename += '-nspkg.pth' - self.outputs.append(filename) - log.info("Installing %s", filename) - lines = map(self._gen_nspkg_line, nsp) - - if self.dry_run: - # always generate the lines, even in dry run - list(lines) - return - - with open(filename, 'wt') as f: - f.writelines(lines) - - _nspkg_tmpl = ( - "import sys, types, os", - "p = os.path.join(sys._getframe(1).f_locals['sitedir'], *%(pth)r)", - "ie = os.path.exists(os.path.join(p,'__init__.py'))", - "m = not ie and " - "sys.modules.setdefault(%(pkg)r, types.ModuleType(%(pkg)r))", - "mp = (m or []) and m.__dict__.setdefault('__path__',[])", - "(p not in mp) and mp.append(p)", - ) - "lines for the namespace installer" - - _nspkg_tmpl_multi = ( - 'm and setattr(sys.modules[%(parent)r], %(child)r, m)', - ) - "additional line(s) when a parent package is indicated" - - @classmethod - def _gen_nspkg_line(cls, pkg): - # ensure pkg is not a unicode string under Python 2.7 - pkg = str(pkg) - pth = tuple(pkg.split('.')) - tmpl_lines = cls._nspkg_tmpl - parent, sep, child = pkg.rpartition('.') - if parent: - tmpl_lines += cls._nspkg_tmpl_multi - return ';'.join(tmpl_lines) % locals() + '\n' - - def _get_all_ns_packages(self): - """Return sorted list of all package namespaces""" - nsp = set() - for pkg in self.distribution.namespace_packages or []: - pkg = pkg.split('.') - while pkg: - nsp.add('.'.join(pkg)) - pkg.pop() - return sorted(nsp) diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/install_lib.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/install_lib.py deleted file mode 100644 index 9b77222..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/install_lib.py +++ /dev/null @@ -1,118 +0,0 @@ -import os -import imp -from itertools import product, starmap -import distutils.command.install_lib as orig - -class install_lib(orig.install_lib): - """Don't add compiled flags to filenames of non-Python files""" - - def run(self): - self.build() - outfiles = self.install() - if outfiles is not None: - # always compile, in case we have any extension stubs to deal with - self.byte_compile(outfiles) - - def get_exclusions(self): - """ - Return a collections.Sized collections.Container of paths to be - excluded for single_version_externally_managed installations. - """ - all_packages = ( - pkg - for ns_pkg in self._get_SVEM_NSPs() - for pkg in self._all_packages(ns_pkg) - ) - - excl_specs = product(all_packages, self._gen_exclusion_paths()) - return set(starmap(self._exclude_pkg_path, excl_specs)) - - def _exclude_pkg_path(self, pkg, exclusion_path): - """ - Given a package name and exclusion path within that package, - compute the full exclusion path. - """ - parts = pkg.split('.') + [exclusion_path] - return os.path.join(self.install_dir, *parts) - - @staticmethod - def _all_packages(pkg_name): - """ - >>> list(install_lib._all_packages('foo.bar.baz')) - ['foo.bar.baz', 'foo.bar', 'foo'] - """ - while pkg_name: - yield pkg_name - pkg_name, sep, child = pkg_name.rpartition('.') - - def _get_SVEM_NSPs(self): - """ - Get namespace packages (list) but only for - single_version_externally_managed installations and empty otherwise. - """ - # TODO: is it necessary to short-circuit here? i.e. what's the cost - # if get_finalized_command is called even when namespace_packages is - # False? - if not self.distribution.namespace_packages: - return [] - - install_cmd = self.get_finalized_command('install') - svem = install_cmd.single_version_externally_managed - - return self.distribution.namespace_packages if svem else [] - - @staticmethod - def _gen_exclusion_paths(): - """ - Generate file paths to be excluded for namespace packages (bytecode - cache files). - """ - # always exclude the package module itself - yield '__init__.py' - - yield '__init__.pyc' - yield '__init__.pyo' - - if not hasattr(imp, 'get_tag'): - return - - base = os.path.join('__pycache__', '__init__.' + imp.get_tag()) - yield base + '.pyc' - yield base + '.pyo' - - def copy_tree( - self, infile, outfile, - preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1 - ): - assert preserve_mode and preserve_times and not preserve_symlinks - exclude = self.get_exclusions() - - if not exclude: - return orig.install_lib.copy_tree(self, infile, outfile) - - # Exclude namespace package __init__.py* files from the output - - from setuptools.archive_util import unpack_directory - from distutils import log - - outfiles = [] - - def pf(src, dst): - if dst in exclude: - log.warn("Skipping installation of %s (namespace package)", - dst) - return False - - log.info("copying %s -> %s", src, os.path.dirname(dst)) - outfiles.append(dst) - return dst - - unpack_directory(infile, outfile, pf) - return outfiles - - def get_outputs(self): - outputs = orig.install_lib.get_outputs(self) - exclude = self.get_exclusions() - if exclude: - return [f for f in outputs if f not in exclude] - return outputs diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/install_scripts.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/install_scripts.py deleted file mode 100644 index be66cb2..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/install_scripts.py +++ /dev/null @@ -1,60 +0,0 @@ -from distutils import log -import distutils.command.install_scripts as orig -import os - -from pkg_resources import Distribution, PathMetadata, ensure_directory - - -class install_scripts(orig.install_scripts): - """Do normal script install, plus any egg_info wrapper scripts""" - - def initialize_options(self): - orig.install_scripts.initialize_options(self) - self.no_ep = False - - def run(self): - import setuptools.command.easy_install as ei - - self.run_command("egg_info") - if self.distribution.scripts: - orig.install_scripts.run(self) # run first to set up self.outfiles - else: - self.outfiles = [] - if self.no_ep: - # don't install entry point scripts into .egg file! - return - - ei_cmd = self.get_finalized_command("egg_info") - dist = Distribution( - ei_cmd.egg_base, PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info), - ei_cmd.egg_name, ei_cmd.egg_version, - ) - bs_cmd = self.get_finalized_command('build_scripts') - exec_param = getattr(bs_cmd, 'executable', None) - bw_cmd = self.get_finalized_command("bdist_wininst") - is_wininst = getattr(bw_cmd, '_is_running', False) - writer = ei.ScriptWriter - if is_wininst: - exec_param = "python.exe" - writer = ei.WindowsScriptWriter - # resolve the writer to the environment - writer = writer.best() - cmd = writer.command_spec_class.best().from_param(exec_param) - for args in writer.get_args(dist, cmd.as_header()): - self.write_script(*args) - - def write_script(self, script_name, contents, mode="t", *ignored): - """Write an executable file to the scripts directory""" - from setuptools.command.easy_install import chmod, current_umask - - log.info("Installing %s script to %s", script_name, self.install_dir) - target = os.path.join(self.install_dir, script_name) - self.outfiles.append(target) - - mask = current_umask() - if not self.dry_run: - ensure_directory(target) - f = open(target, "w" + mode) - f.write(contents) - f.close() - chmod(target, 0o777 - mask) diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/launcher manifest.xml b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/launcher manifest.xml deleted file mode 100644 index 5972a96..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/launcher manifest.xml +++ /dev/null @@ -1,15 +0,0 @@ -<?xml version="1.0" encoding="UTF-8" standalone="yes"?> -<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0"> - <assemblyIdentity version="1.0.0.0" - processorArchitecture="X86" - name="%(name)s" - type="win32"/> - <!-- Identify the application security requirements. --> - <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3"> - <security> - <requestedPrivileges> - <requestedExecutionLevel level="asInvoker" uiAccess="false"/> - </requestedPrivileges> - </security> - </trustInfo> -</assembly> diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/register.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/register.py deleted file mode 100644 index 8d6336a..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/register.py +++ /dev/null @@ -1,10 +0,0 @@ -import distutils.command.register as orig - - -class register(orig.register): - __doc__ = orig.register.__doc__ - - def run(self): - # Make sure that we are using valid current name/version info - self.run_command('egg_info') - orig.register.run(self) diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/rotate.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/rotate.py deleted file mode 100644 index 1b07362..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/rotate.py +++ /dev/null @@ -1,61 +0,0 @@ -from distutils.util import convert_path -from distutils import log -from distutils.errors import DistutilsOptionError -import os - -from setuptools import Command -from setuptools.compat import basestring - - -class rotate(Command): - """Delete older distributions""" - - description = "delete older distributions, keeping N newest files" - user_options = [ - ('match=', 'm', "patterns to match (required)"), - ('dist-dir=', 'd', "directory where the distributions are"), - ('keep=', 'k', "number of matching distributions to keep"), - ] - - boolean_options = [] - - def initialize_options(self): - self.match = None - self.dist_dir = None - self.keep = None - - def finalize_options(self): - if self.match is None: - raise DistutilsOptionError( - "Must specify one or more (comma-separated) match patterns " - "(e.g. '.zip' or '.egg')" - ) - if self.keep is None: - raise DistutilsOptionError("Must specify number of files to keep") - try: - self.keep = int(self.keep) - except ValueError: - raise DistutilsOptionError("--keep must be an integer") - if isinstance(self.match, basestring): - self.match = [ - convert_path(p.strip()) for p in self.match.split(',') - ] - self.set_undefined_options('bdist', ('dist_dir', 'dist_dir')) - - def run(self): - self.run_command("egg_info") - from glob import glob - - for pattern in self.match: - pattern = self.distribution.get_name() + '*' + pattern - files = glob(os.path.join(self.dist_dir, pattern)) - files = [(os.path.getmtime(f), f) for f in files] - files.sort() - files.reverse() - - log.info("%d file(s) matching %s", len(files), pattern) - files = files[self.keep:] - for (t, f) in files: - log.info("Deleting %s", f) - if not self.dry_run: - os.unlink(f) diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/saveopts.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/saveopts.py deleted file mode 100644 index 611cec5..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/saveopts.py +++ /dev/null @@ -1,22 +0,0 @@ -from setuptools.command.setopt import edit_config, option_base - - -class saveopts(option_base): - """Save command-line options to a file""" - - description = "save supplied options to setup.cfg or other config file" - - def run(self): - dist = self.distribution - settings = {} - - for cmd in dist.command_options: - - if cmd == 'saveopts': - continue # don't save our own options! - - for opt, (src, val) in dist.get_option_dict(cmd).items(): - if src == "command line": - settings.setdefault(cmd, {})[opt] = val - - edit_config(self.filename, settings, self.dry_run) diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/sdist.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/sdist.py deleted file mode 100644 index 851a177..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/sdist.py +++ /dev/null @@ -1,197 +0,0 @@ -from glob import glob -from distutils import log -import distutils.command.sdist as orig -import os -import sys - -from setuptools.compat import PY3 -from setuptools.utils import cs_path_exists - -import pkg_resources - -READMES = 'README', 'README.rst', 'README.txt' - -_default_revctrl = list - -def walk_revctrl(dirname=''): - """Find all files under revision control""" - for ep in pkg_resources.iter_entry_points('setuptools.file_finders'): - for item in ep.load()(dirname): - yield item - - -class sdist(orig.sdist): - """Smart sdist that finds anything supported by revision control""" - - user_options = [ - ('formats=', None, - "formats for source distribution (comma-separated list)"), - ('keep-temp', 'k', - "keep the distribution tree around after creating " + - "archive file(s)"), - ('dist-dir=', 'd', - "directory to put the source distribution archive(s) in " - "[default: dist]"), - ] - - negative_opt = {} - - def run(self): - self.run_command('egg_info') - ei_cmd = self.get_finalized_command('egg_info') - self.filelist = ei_cmd.filelist - self.filelist.append(os.path.join(ei_cmd.egg_info, 'SOURCES.txt')) - self.check_readme() - - # Run sub commands - for cmd_name in self.get_sub_commands(): - self.run_command(cmd_name) - - # Call check_metadata only if no 'check' command - # (distutils <= 2.6) - import distutils.command - - if 'check' not in distutils.command.__all__: - self.check_metadata() - - self.make_distribution() - - dist_files = getattr(self.distribution, 'dist_files', []) - for file in self.archive_files: - data = ('sdist', '', file) - if data not in dist_files: - dist_files.append(data) - - def __read_template_hack(self): - # This grody hack closes the template file (MANIFEST.in) if an - # exception occurs during read_template. - # Doing so prevents an error when easy_install attempts to delete the - # file. - try: - orig.sdist.read_template(self) - except: - _, _, tb = sys.exc_info() - tb.tb_next.tb_frame.f_locals['template'].close() - raise - - # Beginning with Python 2.7.2, 3.1.4, and 3.2.1, this leaky file handle - # has been fixed, so only override the method if we're using an earlier - # Python. - has_leaky_handle = ( - sys.version_info < (2, 7, 2) - or (3, 0) <= sys.version_info < (3, 1, 4) - or (3, 2) <= sys.version_info < (3, 2, 1) - ) - if has_leaky_handle: - read_template = __read_template_hack - - def add_defaults(self): - standards = [READMES, - self.distribution.script_name] - for fn in standards: - if isinstance(fn, tuple): - alts = fn - got_it = 0 - for fn in alts: - if cs_path_exists(fn): - got_it = 1 - self.filelist.append(fn) - break - - if not got_it: - self.warn("standard file not found: should have one of " + - ', '.join(alts)) - else: - if cs_path_exists(fn): - self.filelist.append(fn) - else: - self.warn("standard file '%s' not found" % fn) - - optional = ['test/test*.py', 'setup.cfg'] - for pattern in optional: - files = list(filter(cs_path_exists, glob(pattern))) - if files: - self.filelist.extend(files) - - # getting python files - if self.distribution.has_pure_modules(): - build_py = self.get_finalized_command('build_py') - self.filelist.extend(build_py.get_source_files()) - # This functionality is incompatible with include_package_data, and - # will in fact create an infinite recursion if include_package_data - # is True. Use of include_package_data will imply that - # distutils-style automatic handling of package_data is disabled - if not self.distribution.include_package_data: - for _, src_dir, _, filenames in build_py.data_files: - self.filelist.extend([os.path.join(src_dir, filename) - for filename in filenames]) - - if self.distribution.has_ext_modules(): - build_ext = self.get_finalized_command('build_ext') - self.filelist.extend(build_ext.get_source_files()) - - if self.distribution.has_c_libraries(): - build_clib = self.get_finalized_command('build_clib') - self.filelist.extend(build_clib.get_source_files()) - - if self.distribution.has_scripts(): - build_scripts = self.get_finalized_command('build_scripts') - self.filelist.extend(build_scripts.get_source_files()) - - def check_readme(self): - for f in READMES: - if os.path.exists(f): - return - else: - self.warn( - "standard file not found: should have one of " + - ', '.join(READMES) - ) - - def make_release_tree(self, base_dir, files): - orig.sdist.make_release_tree(self, base_dir, files) - - # Save any egg_info command line options used to create this sdist - dest = os.path.join(base_dir, 'setup.cfg') - if hasattr(os, 'link') and os.path.exists(dest): - # unlink and re-copy, since it might be hard-linked, and - # we don't want to change the source version - os.unlink(dest) - self.copy_file('setup.cfg', dest) - - self.get_finalized_command('egg_info').save_version_info(dest) - - def _manifest_is_not_generated(self): - # check for special comment used in 2.7.1 and higher - if not os.path.isfile(self.manifest): - return False - - fp = open(self.manifest, 'rbU') - try: - first_line = fp.readline() - finally: - fp.close() - return (first_line != - '# file GENERATED by distutils, do NOT edit\n'.encode()) - - def read_manifest(self): - """Read the manifest file (named by 'self.manifest') and use it to - fill in 'self.filelist', the list of files to include in the source - distribution. - """ - log.info("reading manifest file '%s'", self.manifest) - manifest = open(self.manifest, 'rbU') - for line in manifest: - # The manifest must contain UTF-8. See #303. - if PY3: - try: - line = line.decode('UTF-8') - except UnicodeDecodeError: - log.warn("%r not UTF-8 decodable -- skipping" % line) - continue - # ignore comments and blank lines - line = line.strip() - if line.startswith('#') or not line: - continue - self.filelist.append(line) - manifest.close() diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/setopt.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/setopt.py deleted file mode 100644 index a04d603..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/setopt.py +++ /dev/null @@ -1,150 +0,0 @@ -from distutils.util import convert_path -from distutils import log -from distutils.errors import DistutilsOptionError -import distutils -import os - -from setuptools import Command - - -__all__ = ['config_file', 'edit_config', 'option_base', 'setopt'] - - -def config_file(kind="local"): - """Get the filename of the distutils, local, global, or per-user config - - `kind` must be one of "local", "global", or "user" - """ - if kind == 'local': - return 'setup.cfg' - if kind == 'global': - return os.path.join( - os.path.dirname(distutils.__file__), 'distutils.cfg' - ) - if kind == 'user': - dot = os.name == 'posix' and '.' or '' - return os.path.expanduser(convert_path("~/%spydistutils.cfg" % dot)) - raise ValueError( - "config_file() type must be 'local', 'global', or 'user'", kind - ) - - -def edit_config(filename, settings, dry_run=False): - """Edit a configuration file to include `settings` - - `settings` is a dictionary of dictionaries or ``None`` values, keyed by - command/section name. A ``None`` value means to delete the entire section, - while a dictionary lists settings to be changed or deleted in that section. - A setting of ``None`` means to delete that setting. - """ - from setuptools.compat import ConfigParser - - log.debug("Reading configuration from %s", filename) - opts = ConfigParser.RawConfigParser() - opts.read([filename]) - for section, options in settings.items(): - if options is None: - log.info("Deleting section [%s] from %s", section, filename) - opts.remove_section(section) - else: - if not opts.has_section(section): - log.debug("Adding new section [%s] to %s", section, filename) - opts.add_section(section) - for option, value in options.items(): - if value is None: - log.debug( - "Deleting %s.%s from %s", - section, option, filename - ) - opts.remove_option(section, option) - if not opts.options(section): - log.info("Deleting empty [%s] section from %s", - section, filename) - opts.remove_section(section) - else: - log.debug( - "Setting %s.%s to %r in %s", - section, option, value, filename - ) - opts.set(section, option, value) - - log.info("Writing %s", filename) - if not dry_run: - with open(filename, 'w') as f: - opts.write(f) - - -class option_base(Command): - """Abstract base class for commands that mess with config files""" - - user_options = [ - ('global-config', 'g', - "save options to the site-wide distutils.cfg file"), - ('user-config', 'u', - "save options to the current user's pydistutils.cfg file"), - ('filename=', 'f', - "configuration file to use (default=setup.cfg)"), - ] - - boolean_options = [ - 'global-config', 'user-config', - ] - - def initialize_options(self): - self.global_config = None - self.user_config = None - self.filename = None - - def finalize_options(self): - filenames = [] - if self.global_config: - filenames.append(config_file('global')) - if self.user_config: - filenames.append(config_file('user')) - if self.filename is not None: - filenames.append(self.filename) - if not filenames: - filenames.append(config_file('local')) - if len(filenames) > 1: - raise DistutilsOptionError( - "Must specify only one configuration file option", - filenames - ) - self.filename, = filenames - - -class setopt(option_base): - """Save command-line options to a file""" - - description = "set an option in setup.cfg or another config file" - - user_options = [ - ('command=', 'c', 'command to set an option for'), - ('option=', 'o', 'option to set'), - ('set-value=', 's', 'value of the option'), - ('remove', 'r', 'remove (unset) the value'), - ] + option_base.user_options - - boolean_options = option_base.boolean_options + ['remove'] - - def initialize_options(self): - option_base.initialize_options(self) - self.command = None - self.option = None - self.set_value = None - self.remove = None - - def finalize_options(self): - option_base.finalize_options(self) - if self.command is None or self.option is None: - raise DistutilsOptionError("Must specify --command *and* --option") - if self.set_value is None and not self.remove: - raise DistutilsOptionError("Must specify --set-value or --remove") - - def run(self): - edit_config( - self.filename, { - self.command: {self.option.replace('-', '_'): self.set_value} - }, - self.dry_run - ) diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/test.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/test.py deleted file mode 100644 index 42689f7..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/test.py +++ /dev/null @@ -1,175 +0,0 @@ -from distutils.errors import DistutilsOptionError -from unittest import TestLoader -import unittest -import sys - -from pkg_resources import (resource_listdir, resource_exists, normalize_path, - working_set, _namespace_packages, - add_activation_listener, require, EntryPoint) -from setuptools import Command -from setuptools.compat import PY3 -from setuptools.py31compat import unittest_main - - -class ScanningLoader(TestLoader): - def loadTestsFromModule(self, module): - """Return a suite of all tests cases contained in the given module - - If the module is a package, load tests from all the modules in it. - If the module has an ``additional_tests`` function, call it and add - the return value to the tests. - """ - tests = [] - tests.append(TestLoader.loadTestsFromModule(self, module)) - - if hasattr(module, "additional_tests"): - tests.append(module.additional_tests()) - - if hasattr(module, '__path__'): - for file in resource_listdir(module.__name__, ''): - if file.endswith('.py') and file != '__init__.py': - submodule = module.__name__ + '.' + file[:-3] - else: - if resource_exists(module.__name__, file + '/__init__.py'): - submodule = module.__name__ + '.' + file - else: - continue - tests.append(self.loadTestsFromName(submodule)) - - if len(tests) != 1: - return self.suiteClass(tests) - else: - return tests[0] # don't create a nested suite for only one return - - -class test(Command): - """Command to run unit tests after in-place build""" - - description = "run unit tests after in-place build" - - user_options = [ - ('test-module=', 'm', "Run 'test_suite' in specified module"), - ('test-suite=', 's', - "Test suite to run (e.g. 'some_module.test_suite')"), - ('test-runner=', 'r', "Test runner to use"), - ] - - def initialize_options(self): - self.test_suite = None - self.test_module = None - self.test_loader = None - self.test_runner = None - - def finalize_options(self): - - if self.test_suite is None: - if self.test_module is None: - self.test_suite = self.distribution.test_suite - else: - self.test_suite = self.test_module + ".test_suite" - elif self.test_module: - raise DistutilsOptionError( - "You may specify a module or a suite, but not both" - ) - - self.test_args = [self.test_suite] - - if self.verbose: - self.test_args.insert(0, '--verbose') - if self.test_loader is None: - self.test_loader = getattr(self.distribution, 'test_loader', None) - if self.test_loader is None: - self.test_loader = "setuptools.command.test:ScanningLoader" - if self.test_runner is None: - self.test_runner = getattr(self.distribution, 'test_runner', None) - - def with_project_on_sys_path(self, func): - with_2to3 = PY3 and getattr(self.distribution, 'use_2to3', False) - - if with_2to3: - # If we run 2to3 we can not do this inplace: - - # Ensure metadata is up-to-date - self.reinitialize_command('build_py', inplace=0) - self.run_command('build_py') - bpy_cmd = self.get_finalized_command("build_py") - build_path = normalize_path(bpy_cmd.build_lib) - - # Build extensions - self.reinitialize_command('egg_info', egg_base=build_path) - self.run_command('egg_info') - - self.reinitialize_command('build_ext', inplace=0) - self.run_command('build_ext') - else: - # Without 2to3 inplace works fine: - self.run_command('egg_info') - - # Build extensions in-place - self.reinitialize_command('build_ext', inplace=1) - self.run_command('build_ext') - - ei_cmd = self.get_finalized_command("egg_info") - - old_path = sys.path[:] - old_modules = sys.modules.copy() - - try: - sys.path.insert(0, normalize_path(ei_cmd.egg_base)) - working_set.__init__() - add_activation_listener(lambda dist: dist.activate()) - require('%s==%s' % (ei_cmd.egg_name, ei_cmd.egg_version)) - func() - finally: - sys.path[:] = old_path - sys.modules.clear() - sys.modules.update(old_modules) - working_set.__init__() - - def run(self): - if self.distribution.install_requires: - self.distribution.fetch_build_eggs( - self.distribution.install_requires) - if self.distribution.tests_require: - self.distribution.fetch_build_eggs(self.distribution.tests_require) - - if self.test_suite: - cmd = ' '.join(self.test_args) - if self.dry_run: - self.announce('skipping "unittest %s" (dry run)' % cmd) - else: - self.announce('running "unittest %s"' % cmd) - self.with_project_on_sys_path(self.run_tests) - - def run_tests(self): - # Purge modules under test from sys.modules. The test loader will - # re-import them from the build location. Required when 2to3 is used - # with namespace packages. - if PY3 and getattr(self.distribution, 'use_2to3', False): - module = self.test_args[-1].split('.')[0] - if module in _namespace_packages: - del_modules = [] - if module in sys.modules: - del_modules.append(module) - module += '.' - for name in sys.modules: - if name.startswith(module): - del_modules.append(name) - list(map(sys.modules.__delitem__, del_modules)) - - unittest_main( - None, None, [unittest.__file__] + self.test_args, - testLoader=self._resolve_as_ep(self.test_loader), - testRunner=self._resolve_as_ep(self.test_runner), - ) - - @staticmethod - def _resolve_as_ep(val): - """ - Load the indicated attribute value, called, as a as if it were - specified as an entry point. - """ - if val is None: - return - parsed = EntryPoint.parse("x=" + val) - return parsed.resolve()() diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/upload_docs.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/upload_docs.py deleted file mode 100644 index 001ee93..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/upload_docs.py +++ /dev/null @@ -1,190 +0,0 @@ -# -*- coding: utf-8 -*- -"""upload_docs - -Implements a Distutils 'upload_docs' subcommand (upload documentation to -PyPI's pythonhosted.org). -""" - -from base64 import standard_b64encode -from distutils import log -from distutils.errors import DistutilsOptionError -from distutils.command.upload import upload -import os -import socket -import zipfile -import tempfile -import sys -import shutil - -from setuptools.compat import httplib, urlparse, unicode, iteritems, PY3 -from pkg_resources import iter_entry_points - - -errors = 'surrogateescape' if PY3 else 'strict' - - -# This is not just a replacement for byte literals -# but works as a general purpose encoder -def b(s, encoding='utf-8'): - if isinstance(s, unicode): - return s.encode(encoding, errors) - return s - - -class upload_docs(upload): - description = 'Upload documentation to PyPI' - - user_options = [ - ('repository=', 'r', - "url of repository [default: %s]" % upload.DEFAULT_REPOSITORY), - ('show-response', None, - 'display full response text from server'), - ('upload-dir=', None, 'directory to upload'), - ] - boolean_options = upload.boolean_options - - def has_sphinx(self): - if self.upload_dir is None: - for ep in iter_entry_points('distutils.commands', 'build_sphinx'): - return True - - sub_commands = [('build_sphinx', has_sphinx)] - - def initialize_options(self): - upload.initialize_options(self) - self.upload_dir = None - self.target_dir = None - - def finalize_options(self): - upload.finalize_options(self) - if self.upload_dir is None: - if self.has_sphinx(): - build_sphinx = self.get_finalized_command('build_sphinx') - self.target_dir = build_sphinx.builder_target_dir - else: - build = self.get_finalized_command('build') - self.target_dir = os.path.join(build.build_base, 'docs') - else: - self.ensure_dirname('upload_dir') - self.target_dir = self.upload_dir - self.announce('Using upload directory %s' % self.target_dir) - - def create_zipfile(self, filename): - zip_file = zipfile.ZipFile(filename, "w") - try: - self.mkpath(self.target_dir) # just in case - for root, dirs, files in os.walk(self.target_dir): - if root == self.target_dir and not files: - raise DistutilsOptionError( - "no files found in upload directory '%s'" - % self.target_dir) - for name in files: - full = os.path.join(root, name) - relative = root[len(self.target_dir):].lstrip(os.path.sep) - dest = os.path.join(relative, name) - zip_file.write(full, dest) - finally: - zip_file.close() - - def run(self): - # Run sub commands - for cmd_name in self.get_sub_commands(): - self.run_command(cmd_name) - - tmp_dir = tempfile.mkdtemp() - name = self.distribution.metadata.get_name() - zip_file = os.path.join(tmp_dir, "%s.zip" % name) - try: - self.create_zipfile(zip_file) - self.upload_file(zip_file) - finally: - shutil.rmtree(tmp_dir) - - def upload_file(self, filename): - f = open(filename, 'rb') - content = f.read() - f.close() - meta = self.distribution.metadata - data = { - ':action': 'doc_upload', - 'name': meta.get_name(), - 'content': (os.path.basename(filename), content), - } - # set up the authentication - credentials = b(self.username + ':' + self.password) - credentials = standard_b64encode(credentials) - if PY3: - credentials = credentials.decode('ascii') - auth = "Basic " + credentials - - # Build up the MIME payload for the POST data - boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254' - sep_boundary = b('\n--') + b(boundary) - end_boundary = sep_boundary + b('--') - body = [] - for key, values in iteritems(data): - title = '\nContent-Disposition: form-data; name="%s"' % key - # handle multiple entries for the same name - if not isinstance(values, list): - values = [values] - for value in values: - if type(value) is tuple: - title += '; filename="%s"' % value[0] - value = value[1] - else: - value = b(value) - body.append(sep_boundary) - body.append(b(title)) - body.append(b("\n\n")) - body.append(value) - if value and value[-1:] == b('\r'): - body.append(b('\n')) # write an extra newline (lurve Macs) - body.append(end_boundary) - body.append(b("\n")) - body = b('').join(body) - - self.announce("Submitting documentation to %s" % (self.repository), - log.INFO) - - # build the Request - # We can't use urllib2 since we need to send the Basic - # auth right with the first request - schema, netloc, url, params, query, fragments = \ - urlparse(self.repository) - assert not params and not query and not fragments - if schema == 'http': - conn = httplib.HTTPConnection(netloc) - elif schema == 'https': - conn = httplib.HTTPSConnection(netloc) - else: - raise AssertionError("unsupported schema " + schema) - - data = '' - try: - conn.connect() - conn.putrequest("POST", url) - content_type = 'multipart/form-data; boundary=%s' % boundary - conn.putheader('Content-type', content_type) - conn.putheader('Content-length', str(len(body))) - conn.putheader('Authorization', auth) - conn.endheaders() - conn.send(body) - except socket.error as e: - self.announce(str(e), log.ERROR) - return - - r = conn.getresponse() - if r.status == 200: - self.announce('Server response (%s): %s' % (r.status, r.reason), - log.INFO) - elif r.status == 301: - location = r.getheader('Location') - if location is None: - location = 'https://pythonhosted.org/%s/' % meta.get_name() - self.announce('Upload successful. Visit %s' % location, - log.INFO) - else: - self.announce('Upload failed (%s): %s' % (r.status, r.reason), - log.ERROR) - if self.show_response: - print('-' * 75, r.read(), '-' * 75) diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/compat.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/compat.py deleted file mode 100644 index cddf629..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/compat.py +++ /dev/null @@ -1,71 +0,0 @@ -import sys -import os -import itertools - -IS_WINDOWS = (sys.platform.startswith("win") - or (sys.platform == 'cli' and os.name == 'nt') - or (os.name == 'java' and os._name == 'nt')) - -PY3 = sys.version_info >= (3,) -PY2 = not PY3 - -if PY2: - basestring = basestring - import __builtin__ as builtins - import ConfigParser - from StringIO import StringIO - BytesIO = StringIO - func_code = lambda o: o.func_code - func_globals = lambda o: o.func_globals - im_func = lambda o: o.im_func - from htmlentitydefs import name2codepoint - import httplib - from BaseHTTPServer import HTTPServer - from SimpleHTTPServer import SimpleHTTPRequestHandler - from BaseHTTPServer import BaseHTTPRequestHandler - iteritems = lambda o: o.iteritems() - long_type = long - maxsize = sys.maxint - unichr = unichr - unicode = unicode - bytes = str - from urllib import url2pathname, splittag, pathname2url - import urllib2 - from urllib2 import urlopen, HTTPError, URLError, unquote, splituser - from urlparse import urlparse, urlunparse, urljoin, urlsplit, urlunsplit - filterfalse = itertools.ifilterfalse - - exec("""def reraise(tp, value, tb=None): - raise tp, value, tb""") - -if PY3: - basestring = str - import builtins - import configparser as ConfigParser - from io import StringIO, BytesIO - func_code = lambda o: o.__code__ - func_globals = lambda o: o.__globals__ - im_func = lambda o: o.__func__ - from html.entities import name2codepoint - import http.client as httplib - from http.server import HTTPServer, SimpleHTTPRequestHandler - from http.server import BaseHTTPRequestHandler - iteritems = lambda o: o.items() - long_type = int - maxsize = sys.maxsize - unichr = chr - unicode = str - bytes = bytes - from urllib.error import HTTPError, URLError - import urllib.request as urllib2 - from urllib.request import urlopen, url2pathname, pathname2url - from urllib.parse import ( - urlparse, urlunparse, unquote, splituser, urljoin, urlsplit, - urlunsplit, splittag, - ) - filterfalse = itertools.filterfalse - - def reraise(tp, value, tb=None): - if value.__traceback__ is not tb: - raise value.with_traceback(tb) - raise value diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/depends.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/depends.py deleted file mode 100644 index e87ef3f..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/depends.py +++ /dev/null @@ -1,215 +0,0 @@ -import sys -import imp -import marshal -from imp import PKG_DIRECTORY, PY_COMPILED, PY_SOURCE, PY_FROZEN -from distutils.version import StrictVersion -from setuptools import compat - -__all__ = [ - 'Require', 'find_module', 'get_module_constant', 'extract_constant' -] - -class Require: - """A prerequisite to building or installing a distribution""" - - def __init__(self, name, requested_version, module, homepage='', - attribute=None, format=None): - - if format is None and requested_version is not None: - format = StrictVersion - - if format is not None: - requested_version = format(requested_version) - if attribute is None: - attribute = '__version__' - - self.__dict__.update(locals()) - del self.self - - def full_name(self): - """Return full package/distribution name, w/version""" - if self.requested_version is not None: - return '%s-%s' % (self.name,self.requested_version) - return self.name - - def version_ok(self, version): - """Is 'version' sufficiently up-to-date?""" - return self.attribute is None or self.format is None or \ - str(version) != "unknown" and version >= self.requested_version - - def get_version(self, paths=None, default="unknown"): - - """Get version number of installed module, 'None', or 'default' - - Search 'paths' for module. If not found, return 'None'. If found, - return the extracted version attribute, or 'default' if no version - attribute was specified, or the value cannot be determined without - importing the module. The version is formatted according to the - requirement's version format (if any), unless it is 'None' or the - supplied 'default'. - """ - - if self.attribute is None: - try: - f,p,i = find_module(self.module,paths) - if f: f.close() - return default - except ImportError: - return None - - v = get_module_constant(self.module, self.attribute, default, paths) - - if v is not None and v is not default and self.format is not None: - return self.format(v) - - return v - - def is_present(self, paths=None): - """Return true if dependency is present on 'paths'""" - return self.get_version(paths) is not None - - def is_current(self, paths=None): - """Return true if dependency is present and up-to-date on 'paths'""" - version = self.get_version(paths) - if version is None: - return False - return self.version_ok(version) - - -def _iter_code(code): - - """Yield '(op,arg)' pair for each operation in code object 'code'""" - - from array import array - from dis import HAVE_ARGUMENT, EXTENDED_ARG - - bytes = array('b',code.co_code) - eof = len(code.co_code) - - ptr = 0 - extended_arg = 0 - - while ptr<eof: - - op = bytes[ptr] - - if op>=HAVE_ARGUMENT: - - arg = bytes[ptr+1] + bytes[ptr+2]*256 + extended_arg - ptr += 3 - - if op==EXTENDED_ARG: - extended_arg = arg * compat.long_type(65536) - continue - - else: - arg = None - ptr += 1 - - yield op,arg - - -def find_module(module, paths=None): - """Just like 'imp.find_module()', but with package support""" - - parts = module.split('.') - - while parts: - part = parts.pop(0) - f, path, (suffix,mode,kind) = info = imp.find_module(part, paths) - - if kind==PKG_DIRECTORY: - parts = parts or ['__init__'] - paths = [path] - - elif parts: - raise ImportError("Can't find %r in %s" % (parts,module)) - - return info - - -def get_module_constant(module, symbol, default=-1, paths=None): - - """Find 'module' by searching 'paths', and extract 'symbol' - - Return 'None' if 'module' does not exist on 'paths', or it does not define - 'symbol'. If the module defines 'symbol' as a constant, return the - constant. Otherwise, return 'default'.""" - - try: - f, path, (suffix, mode, kind) = find_module(module, paths) - except ImportError: - # Module doesn't exist - return None - - try: - if kind==PY_COMPILED: - f.read(8) # skip magic & date - code = marshal.load(f) - elif kind==PY_FROZEN: - code = imp.get_frozen_object(module) - elif kind==PY_SOURCE: - code = compile(f.read(), path, 'exec') - else: - # Not something we can parse; we'll have to import it. :( - if module not in sys.modules: - imp.load_module(module, f, path, (suffix, mode, kind)) - return getattr(sys.modules[module], symbol, None) - - finally: - if f: - f.close() - - return extract_constant(code, symbol, default) - - -def extract_constant(code, symbol, default=-1): - """Extract the constant value of 'symbol' from 'code' - - If the name 'symbol' is bound to a constant value by the Python code - object 'code', return that value. If 'symbol' is bound to an expression, - return 'default'. Otherwise, return 'None'. - - Return value is based on the first assignment to 'symbol'. 'symbol' must - be a global, or at least a non-"fast" local in the code block. That is, - only 'STORE_NAME' and 'STORE_GLOBAL' opcodes are checked, and 'symbol' - must be present in 'code.co_names'. - """ - - if symbol not in code.co_names: - # name's not there, can't possibly be an assigment - return None - - name_idx = list(code.co_names).index(symbol) - - STORE_NAME = 90 - STORE_GLOBAL = 97 - LOAD_CONST = 100 - - const = default - - for op, arg in _iter_code(code): - - if op==LOAD_CONST: - const = code.co_consts[arg] - elif arg==name_idx and (op==STORE_NAME or op==STORE_GLOBAL): - return const - else: - const = default - - -def _update_globals(): - """ - Patch the globals to remove the objects not available on some platforms. - - XXX it'd be better to test assertions about bytecode instead. - """ - - if not sys.platform.startswith('java') and sys.platform != 'cli': - return - incompatible = 'extract_constant', 'get_module_constant' - for name in incompatible: - del globals()[name] - __all__.remove(name) - -_update_globals() diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/dist.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/dist.py deleted file mode 100644 index ffbc7c4..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/dist.py +++ /dev/null @@ -1,861 +0,0 @@ -__all__ = ['Distribution'] - -import re -import os -import sys -import warnings -import numbers -import distutils.log -import distutils.core -import distutils.cmd -import distutils.dist -from distutils.core import Distribution as _Distribution -from distutils.errors import (DistutilsOptionError, DistutilsPlatformError, - DistutilsSetupError) - -from setuptools.depends import Require -from setuptools.compat import basestring, PY2 -from setuptools import windows_support -import pkg_resources - -packaging = pkg_resources.packaging - - -def _get_unpatched(cls): - """Protect against re-patching the distutils if reloaded - - Also ensures that no other distutils extension monkeypatched the distutils - first. - """ - while cls.__module__.startswith('setuptools'): - cls, = cls.__bases__ - if not cls.__module__.startswith('distutils'): - raise AssertionError( - "distutils has already been patched by %r" % cls - ) - return cls - -_Distribution = _get_unpatched(_Distribution) - -def _patch_distribution_metadata_write_pkg_info(): - """ - Workaround issue #197 - Python 3 prior to 3.2.2 uses an environment-local - encoding to save the pkg_info. Monkey-patch its write_pkg_info method to - correct this undesirable behavior. - """ - environment_local = (3,) <= sys.version_info[:3] < (3, 2, 2) - if not environment_local: - return - - # from Python 3.4 - def write_pkg_info(self, base_dir): - """Write the PKG-INFO file into the release tree. - """ - with open(os.path.join(base_dir, 'PKG-INFO'), 'w', - encoding='UTF-8') as pkg_info: - self.write_pkg_file(pkg_info) - - distutils.dist.DistributionMetadata.write_pkg_info = write_pkg_info -_patch_distribution_metadata_write_pkg_info() - -sequence = tuple, list - -def check_importable(dist, attr, value): - try: - ep = pkg_resources.EntryPoint.parse('x='+value) - assert not ep.extras - except (TypeError,ValueError,AttributeError,AssertionError): - raise DistutilsSetupError( - "%r must be importable 'module:attrs' string (got %r)" - % (attr,value) - ) - - -def assert_string_list(dist, attr, value): - """Verify that value is a string list or None""" - try: - assert ''.join(value)!=value - except (TypeError,ValueError,AttributeError,AssertionError): - raise DistutilsSetupError( - "%r must be a list of strings (got %r)" % (attr,value) - ) -def check_nsp(dist, attr, value): - """Verify that namespace packages are valid""" - assert_string_list(dist,attr,value) - for nsp in value: - if not dist.has_contents_for(nsp): - raise DistutilsSetupError( - "Distribution contains no modules or packages for " + - "namespace package %r" % nsp - ) - if '.' in nsp: - parent = '.'.join(nsp.split('.')[:-1]) - if parent not in value: - distutils.log.warn( - "WARNING: %r is declared as a package namespace, but %r" - " is not: please correct this in setup.py", nsp, parent - ) - -def check_extras(dist, attr, value): - """Verify that extras_require mapping is valid""" - try: - for k,v in value.items(): - if ':' in k: - k,m = k.split(':',1) - if pkg_resources.invalid_marker(m): - raise DistutilsSetupError("Invalid environment marker: "+m) - list(pkg_resources.parse_requirements(v)) - except (TypeError,ValueError,AttributeError): - raise DistutilsSetupError( - "'extras_require' must be a dictionary whose values are " - "strings or lists of strings containing valid project/version " - "requirement specifiers." - ) - -def assert_bool(dist, attr, value): - """Verify that value is True, False, 0, or 1""" - if bool(value) != value: - raise DistutilsSetupError( - "%r must be a boolean value (got %r)" % (attr,value) - ) -def check_requirements(dist, attr, value): - """Verify that install_requires is a valid requirements list""" - try: - list(pkg_resources.parse_requirements(value)) - except (TypeError,ValueError): - raise DistutilsSetupError( - "%r must be a string or list of strings " - "containing valid project/version requirement specifiers" % (attr,) - ) -def check_entry_points(dist, attr, value): - """Verify that entry_points map is parseable""" - try: - pkg_resources.EntryPoint.parse_map(value) - except ValueError as e: - raise DistutilsSetupError(e) - -def check_test_suite(dist, attr, value): - if not isinstance(value,basestring): - raise DistutilsSetupError("test_suite must be a string") - -def check_package_data(dist, attr, value): - """Verify that value is a dictionary of package names to glob lists""" - if isinstance(value,dict): - for k,v in value.items(): - if not isinstance(k,str): break - try: iter(v) - except TypeError: - break - else: - return - raise DistutilsSetupError( - attr+" must be a dictionary mapping package names to lists of " - "wildcard patterns" - ) - -def check_packages(dist, attr, value): - for pkgname in value: - if not re.match(r'\w+(\.\w+)*', pkgname): - distutils.log.warn( - "WARNING: %r not a valid package name; please use only" - ".-separated package names in setup.py", pkgname - ) - - -class Distribution(_Distribution): - """Distribution with support for features, tests, and package data - - This is an enhanced version of 'distutils.dist.Distribution' that - effectively adds the following new optional keyword arguments to 'setup()': - - 'install_requires' -- a string or sequence of strings specifying project - versions that the distribution requires when installed, in the format - used by 'pkg_resources.require()'. They will be installed - automatically when the package is installed. If you wish to use - packages that are not available in PyPI, or want to give your users an - alternate download location, you can add a 'find_links' option to the - '[easy_install]' section of your project's 'setup.cfg' file, and then - setuptools will scan the listed web pages for links that satisfy the - requirements. - - 'extras_require' -- a dictionary mapping names of optional "extras" to the - additional requirement(s) that using those extras incurs. For example, - this:: - - extras_require = dict(reST = ["docutils>=0.3", "reSTedit"]) - - indicates that the distribution can optionally provide an extra - capability called "reST", but it can only be used if docutils and - reSTedit are installed. If the user installs your package using - EasyInstall and requests one of your extras, the corresponding - additional requirements will be installed if needed. - - 'features' **deprecated** -- a dictionary mapping option names to - 'setuptools.Feature' - objects. Features are a portion of the distribution that can be - included or excluded based on user options, inter-feature dependencies, - and availability on the current system. Excluded features are omitted - from all setup commands, including source and binary distributions, so - you can create multiple distributions from the same source tree. - Feature names should be valid Python identifiers, except that they may - contain the '-' (minus) sign. Features can be included or excluded - via the command line options '--with-X' and '--without-X', where 'X' is - the name of the feature. Whether a feature is included by default, and - whether you are allowed to control this from the command line, is - determined by the Feature object. See the 'Feature' class for more - information. - - 'test_suite' -- the name of a test suite to run for the 'test' command. - If the user runs 'python setup.py test', the package will be installed, - and the named test suite will be run. The format is the same as - would be used on a 'unittest.py' command line. That is, it is the - dotted name of an object to import and call to generate a test suite. - - 'package_data' -- a dictionary mapping package names to lists of filenames - or globs to use to find data files contained in the named packages. - If the dictionary has filenames or globs listed under '""' (the empty - string), those names will be searched for in every package, in addition - to any names for the specific package. Data files found using these - names/globs will be installed along with the package, in the same - location as the package. Note that globs are allowed to reference - the contents of non-package subdirectories, as long as you use '/' as - a path separator. (Globs are automatically converted to - platform-specific paths at runtime.) - - In addition to these new keywords, this class also has several new methods - for manipulating the distribution's contents. For example, the 'include()' - and 'exclude()' methods can be thought of as in-place add and subtract - commands that add or remove packages, modules, extensions, and so on from - the distribution. They are used by the feature subsystem to configure the - distribution for the included and excluded features. - """ - - _patched_dist = None - - def patch_missing_pkg_info(self, attrs): - # Fake up a replacement for the data that would normally come from - # PKG-INFO, but which might not yet be built if this is a fresh - # checkout. - # - if not attrs or 'name' not in attrs or 'version' not in attrs: - return - key = pkg_resources.safe_name(str(attrs['name'])).lower() - dist = pkg_resources.working_set.by_key.get(key) - if dist is not None and not dist.has_metadata('PKG-INFO'): - dist._version = pkg_resources.safe_version(str(attrs['version'])) - self._patched_dist = dist - - def __init__(self, attrs=None): - have_package_data = hasattr(self, "package_data") - if not have_package_data: - self.package_data = {} - _attrs_dict = attrs or {} - if 'features' in _attrs_dict or 'require_features' in _attrs_dict: - Feature.warn_deprecated() - self.require_features = [] - self.features = {} - self.dist_files = [] - self.src_root = attrs and attrs.pop("src_root", None) - self.patch_missing_pkg_info(attrs) - # Make sure we have any eggs needed to interpret 'attrs' - if attrs is not None: - self.dependency_links = attrs.pop('dependency_links', []) - assert_string_list(self,'dependency_links',self.dependency_links) - if attrs and 'setup_requires' in attrs: - self.fetch_build_eggs(attrs['setup_requires']) - for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'): - if not hasattr(self,ep.name): - setattr(self,ep.name,None) - _Distribution.__init__(self,attrs) - if isinstance(self.metadata.version, numbers.Number): - # Some people apparently take "version number" too literally :) - self.metadata.version = str(self.metadata.version) - - if self.metadata.version is not None: - try: - ver = packaging.version.Version(self.metadata.version) - normalized_version = str(ver) - if self.metadata.version != normalized_version: - warnings.warn( - "Normalizing '%s' to '%s'" % ( - self.metadata.version, - normalized_version, - ) - ) - self.metadata.version = normalized_version - except (packaging.version.InvalidVersion, TypeError): - warnings.warn( - "The version specified (%r) is an invalid version, this " - "may not work as expected with newer versions of " - "setuptools, pip, and PyPI. Please see PEP 440 for more " - "details." % self.metadata.version - ) - - def parse_command_line(self): - """Process features after parsing command line options""" - result = _Distribution.parse_command_line(self) - if self.features: - self._finalize_features() - return result - - def _feature_attrname(self,name): - """Convert feature name to corresponding option attribute name""" - return 'with_'+name.replace('-','_') - - def fetch_build_eggs(self, requires): - """Resolve pre-setup requirements""" - resolved_dists = pkg_resources.working_set.resolve( - pkg_resources.parse_requirements(requires), - installer=self.fetch_build_egg, - replace_conflicting=True, - ) - for dist in resolved_dists: - pkg_resources.working_set.add(dist, replace=True) - - def finalize_options(self): - _Distribution.finalize_options(self) - if self.features: - self._set_global_opts_from_features() - - for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'): - value = getattr(self,ep.name,None) - if value is not None: - ep.require(installer=self.fetch_build_egg) - ep.load()(self, ep.name, value) - if getattr(self, 'convert_2to3_doctests', None): - # XXX may convert to set here when we can rely on set being builtin - self.convert_2to3_doctests = [os.path.abspath(p) for p in self.convert_2to3_doctests] - else: - self.convert_2to3_doctests = [] - - def get_egg_cache_dir(self): - egg_cache_dir = os.path.join(os.curdir, '.eggs') - if not os.path.exists(egg_cache_dir): - os.mkdir(egg_cache_dir) - windows_support.hide_file(egg_cache_dir) - readme_txt_filename = os.path.join(egg_cache_dir, 'README.txt') - with open(readme_txt_filename, 'w') as f: - f.write('This directory contains eggs that were downloaded ' - 'by setuptools to build, test, and run plug-ins.\n\n') - f.write('This directory caches those eggs to prevent ' - 'repeated downloads.\n\n') - f.write('However, it is safe to delete this directory.\n\n') - - return egg_cache_dir - - def fetch_build_egg(self, req): - """Fetch an egg needed for building""" - - try: - cmd = self._egg_fetcher - cmd.package_index.to_scan = [] - except AttributeError: - from setuptools.command.easy_install import easy_install - dist = self.__class__({'script_args':['easy_install']}) - dist.parse_config_files() - opts = dist.get_option_dict('easy_install') - keep = ( - 'find_links', 'site_dirs', 'index_url', 'optimize', - 'site_dirs', 'allow_hosts' - ) - for key in list(opts): - if key not in keep: - del opts[key] # don't use any other settings - if self.dependency_links: - links = self.dependency_links[:] - if 'find_links' in opts: - links = opts['find_links'][1].split() + links - opts['find_links'] = ('setup', links) - install_dir = self.get_egg_cache_dir() - cmd = easy_install( - dist, args=["x"], install_dir=install_dir, exclude_scripts=True, - always_copy=False, build_directory=None, editable=False, - upgrade=False, multi_version=True, no_report=True, user=False - ) - cmd.ensure_finalized() - self._egg_fetcher = cmd - return cmd.easy_install(req) - - def _set_global_opts_from_features(self): - """Add --with-X/--without-X options based on optional features""" - - go = [] - no = self.negative_opt.copy() - - for name,feature in self.features.items(): - self._set_feature(name,None) - feature.validate(self) - - if feature.optional: - descr = feature.description - incdef = ' (default)' - excdef='' - if not feature.include_by_default(): - excdef, incdef = incdef, excdef - - go.append(('with-'+name, None, 'include '+descr+incdef)) - go.append(('without-'+name, None, 'exclude '+descr+excdef)) - no['without-'+name] = 'with-'+name - - self.global_options = self.feature_options = go + self.global_options - self.negative_opt = self.feature_negopt = no - - def _finalize_features(self): - """Add/remove features and resolve dependencies between them""" - - # First, flag all the enabled items (and thus their dependencies) - for name,feature in self.features.items(): - enabled = self.feature_is_included(name) - if enabled or (enabled is None and feature.include_by_default()): - feature.include_in(self) - self._set_feature(name,1) - - # Then disable the rest, so that off-by-default features don't - # get flagged as errors when they're required by an enabled feature - for name,feature in self.features.items(): - if not self.feature_is_included(name): - feature.exclude_from(self) - self._set_feature(name,0) - - def get_command_class(self, command): - """Pluggable version of get_command_class()""" - if command in self.cmdclass: - return self.cmdclass[command] - - for ep in pkg_resources.iter_entry_points('distutils.commands',command): - ep.require(installer=self.fetch_build_egg) - self.cmdclass[command] = cmdclass = ep.load() - return cmdclass - else: - return _Distribution.get_command_class(self, command) - - def print_commands(self): - for ep in pkg_resources.iter_entry_points('distutils.commands'): - if ep.name not in self.cmdclass: - # don't require extras as the commands won't be invoked - cmdclass = ep.resolve() - self.cmdclass[ep.name] = cmdclass - return _Distribution.print_commands(self) - - def _set_feature(self,name,status): - """Set feature's inclusion status""" - setattr(self,self._feature_attrname(name),status) - - def feature_is_included(self,name): - """Return 1 if feature is included, 0 if excluded, 'None' if unknown""" - return getattr(self,self._feature_attrname(name)) - - def include_feature(self,name): - """Request inclusion of feature named 'name'""" - - if self.feature_is_included(name)==0: - descr = self.features[name].description - raise DistutilsOptionError( - descr + " is required, but was excluded or is not available" - ) - self.features[name].include_in(self) - self._set_feature(name,1) - - def include(self,**attrs): - """Add items to distribution that are named in keyword arguments - - For example, 'dist.exclude(py_modules=["x"])' would add 'x' to - the distribution's 'py_modules' attribute, if it was not already - there. - - Currently, this method only supports inclusion for attributes that are - lists or tuples. If you need to add support for adding to other - attributes in this or a subclass, you can add an '_include_X' method, - where 'X' is the name of the attribute. The method will be called with - the value passed to 'include()'. So, 'dist.include(foo={"bar":"baz"})' - will try to call 'dist._include_foo({"bar":"baz"})', which can then - handle whatever special inclusion logic is needed. - """ - for k,v in attrs.items(): - include = getattr(self, '_include_'+k, None) - if include: - include(v) - else: - self._include_misc(k,v) - - def exclude_package(self,package): - """Remove packages, modules, and extensions in named package""" - - pfx = package+'.' - if self.packages: - self.packages = [ - p for p in self.packages - if p != package and not p.startswith(pfx) - ] - - if self.py_modules: - self.py_modules = [ - p for p in self.py_modules - if p != package and not p.startswith(pfx) - ] - - if self.ext_modules: - self.ext_modules = [ - p for p in self.ext_modules - if p.name != package and not p.name.startswith(pfx) - ] - - def has_contents_for(self,package): - """Return true if 'exclude_package(package)' would do something""" - - pfx = package+'.' - - for p in self.iter_distribution_names(): - if p==package or p.startswith(pfx): - return True - - def _exclude_misc(self,name,value): - """Handle 'exclude()' for list/tuple attrs without a special handler""" - if not isinstance(value,sequence): - raise DistutilsSetupError( - "%s: setting must be a list or tuple (%r)" % (name, value) - ) - try: - old = getattr(self,name) - except AttributeError: - raise DistutilsSetupError( - "%s: No such distribution setting" % name - ) - if old is not None and not isinstance(old,sequence): - raise DistutilsSetupError( - name+": this setting cannot be changed via include/exclude" - ) - elif old: - setattr(self,name,[item for item in old if item not in value]) - - def _include_misc(self,name,value): - """Handle 'include()' for list/tuple attrs without a special handler""" - - if not isinstance(value,sequence): - raise DistutilsSetupError( - "%s: setting must be a list (%r)" % (name, value) - ) - try: - old = getattr(self,name) - except AttributeError: - raise DistutilsSetupError( - "%s: No such distribution setting" % name - ) - if old is None: - setattr(self,name,value) - elif not isinstance(old,sequence): - raise DistutilsSetupError( - name+": this setting cannot be changed via include/exclude" - ) - else: - setattr(self,name,old+[item for item in value if item not in old]) - - def exclude(self,**attrs): - """Remove items from distribution that are named in keyword arguments - - For example, 'dist.exclude(py_modules=["x"])' would remove 'x' from - the distribution's 'py_modules' attribute. Excluding packages uses - the 'exclude_package()' method, so all of the package's contained - packages, modules, and extensions are also excluded. - - Currently, this method only supports exclusion from attributes that are - lists or tuples. If you need to add support for excluding from other - attributes in this or a subclass, you can add an '_exclude_X' method, - where 'X' is the name of the attribute. The method will be called with - the value passed to 'exclude()'. So, 'dist.exclude(foo={"bar":"baz"})' - will try to call 'dist._exclude_foo({"bar":"baz"})', which can then - handle whatever special exclusion logic is needed. - """ - for k,v in attrs.items(): - exclude = getattr(self, '_exclude_'+k, None) - if exclude: - exclude(v) - else: - self._exclude_misc(k,v) - - def _exclude_packages(self,packages): - if not isinstance(packages,sequence): - raise DistutilsSetupError( - "packages: setting must be a list or tuple (%r)" % (packages,) - ) - list(map(self.exclude_package, packages)) - - def _parse_command_opts(self, parser, args): - # Remove --with-X/--without-X options when processing command args - self.global_options = self.__class__.global_options - self.negative_opt = self.__class__.negative_opt - - # First, expand any aliases - command = args[0] - aliases = self.get_option_dict('aliases') - while command in aliases: - src,alias = aliases[command] - del aliases[command] # ensure each alias can expand only once! - import shlex - args[:1] = shlex.split(alias,True) - command = args[0] - - nargs = _Distribution._parse_command_opts(self, parser, args) - - # Handle commands that want to consume all remaining arguments - cmd_class = self.get_command_class(command) - if getattr(cmd_class,'command_consumes_arguments',None): - self.get_option_dict(command)['args'] = ("command line", nargs) - if nargs is not None: - return [] - - return nargs - - def get_cmdline_options(self): - """Return a '{cmd: {opt:val}}' map of all command-line options - - Option names are all long, but do not include the leading '--', and - contain dashes rather than underscores. If the option doesn't take - an argument (e.g. '--quiet'), the 'val' is 'None'. - - Note that options provided by config files are intentionally excluded. - """ - - d = {} - - for cmd,opts in self.command_options.items(): - - for opt,(src,val) in opts.items(): - - if src != "command line": - continue - - opt = opt.replace('_','-') - - if val==0: - cmdobj = self.get_command_obj(cmd) - neg_opt = self.negative_opt.copy() - neg_opt.update(getattr(cmdobj,'negative_opt',{})) - for neg,pos in neg_opt.items(): - if pos==opt: - opt=neg - val=None - break - else: - raise AssertionError("Shouldn't be able to get here") - - elif val==1: - val = None - - d.setdefault(cmd,{})[opt] = val - - return d - - def iter_distribution_names(self): - """Yield all packages, modules, and extension names in distribution""" - - for pkg in self.packages or (): - yield pkg - - for module in self.py_modules or (): - yield module - - for ext in self.ext_modules or (): - if isinstance(ext,tuple): - name, buildinfo = ext - else: - name = ext.name - if name.endswith('module'): - name = name[:-6] - yield name - - def handle_display_options(self, option_order): - """If there were any non-global "display-only" options - (--help-commands or the metadata display options) on the command - line, display the requested info and return true; else return - false. - """ - import sys - - if PY2 or self.help_commands: - return _Distribution.handle_display_options(self, option_order) - - # Stdout may be StringIO (e.g. in tests) - import io - if not isinstance(sys.stdout, io.TextIOWrapper): - return _Distribution.handle_display_options(self, option_order) - - # Don't wrap stdout if utf-8 is already the encoding. Provides - # workaround for #334. - if sys.stdout.encoding.lower() in ('utf-8', 'utf8'): - return _Distribution.handle_display_options(self, option_order) - - # Print metadata in UTF-8 no matter the platform - encoding = sys.stdout.encoding - errors = sys.stdout.errors - newline = sys.platform != 'win32' and '\n' or None - line_buffering = sys.stdout.line_buffering - - sys.stdout = io.TextIOWrapper( - sys.stdout.detach(), 'utf-8', errors, newline, line_buffering) - try: - return _Distribution.handle_display_options(self, option_order) - finally: - sys.stdout = io.TextIOWrapper( - sys.stdout.detach(), encoding, errors, newline, line_buffering) - - -# Install it throughout the distutils -for module in distutils.dist, distutils.core, distutils.cmd: - module.Distribution = Distribution - - -class Feature: - """ - **deprecated** -- The `Feature` facility was never completely implemented - or supported, `has reported issues - <https://bitbucket.org/pypa/setuptools/issue/58>`_ and will be removed in - a future version. - - A subset of the distribution that can be excluded if unneeded/wanted - - Features are created using these keyword arguments: - - 'description' -- a short, human readable description of the feature, to - be used in error messages, and option help messages. - - 'standard' -- if true, the feature is included by default if it is - available on the current system. Otherwise, the feature is only - included if requested via a command line '--with-X' option, or if - another included feature requires it. The default setting is 'False'. - - 'available' -- if true, the feature is available for installation on the - current system. The default setting is 'True'. - - 'optional' -- if true, the feature's inclusion can be controlled from the - command line, using the '--with-X' or '--without-X' options. If - false, the feature's inclusion status is determined automatically, - based on 'availabile', 'standard', and whether any other feature - requires it. The default setting is 'True'. - - 'require_features' -- a string or sequence of strings naming features - that should also be included if this feature is included. Defaults to - empty list. May also contain 'Require' objects that should be - added/removed from the distribution. - - 'remove' -- a string or list of strings naming packages to be removed - from the distribution if this feature is *not* included. If the - feature *is* included, this argument is ignored. This argument exists - to support removing features that "crosscut" a distribution, such as - defining a 'tests' feature that removes all the 'tests' subpackages - provided by other features. The default for this argument is an empty - list. (Note: the named package(s) or modules must exist in the base - distribution when the 'setup()' function is initially called.) - - other keywords -- any other keyword arguments are saved, and passed to - the distribution's 'include()' and 'exclude()' methods when the - feature is included or excluded, respectively. So, for example, you - could pass 'packages=["a","b"]' to cause packages 'a' and 'b' to be - added or removed from the distribution as appropriate. - - A feature must include at least one 'requires', 'remove', or other - keyword argument. Otherwise, it can't affect the distribution in any way. - Note also that you can subclass 'Feature' to create your own specialized - feature types that modify the distribution in other ways when included or - excluded. See the docstrings for the various methods here for more detail. - Aside from the methods, the only feature attributes that distributions look - at are 'description' and 'optional'. - """ - - @staticmethod - def warn_deprecated(): - warnings.warn( - "Features are deprecated and will be removed in a future " - "version. See http://bitbucket.org/pypa/setuptools/65.", - DeprecationWarning, - stacklevel=3, - ) - - def __init__(self, description, standard=False, available=True, - optional=True, require_features=(), remove=(), **extras): - self.warn_deprecated() - - self.description = description - self.standard = standard - self.available = available - self.optional = optional - if isinstance(require_features,(str,Require)): - require_features = require_features, - - self.require_features = [ - r for r in require_features if isinstance(r,str) - ] - er = [r for r in require_features if not isinstance(r,str)] - if er: extras['require_features'] = er - - if isinstance(remove,str): - remove = remove, - self.remove = remove - self.extras = extras - - if not remove and not require_features and not extras: - raise DistutilsSetupError( - "Feature %s: must define 'require_features', 'remove', or at least one" - " of 'packages', 'py_modules', etc." - ) - - def include_by_default(self): - """Should this feature be included by default?""" - return self.available and self.standard - - def include_in(self,dist): - - """Ensure feature and its requirements are included in distribution - - You may override this in a subclass to perform additional operations on - the distribution. Note that this method may be called more than once - per feature, and so should be idempotent. - - """ - - if not self.available: - raise DistutilsPlatformError( - self.description+" is required," - "but is not available on this platform" - ) - - dist.include(**self.extras) - - for f in self.require_features: - dist.include_feature(f) - - def exclude_from(self,dist): - - """Ensure feature is excluded from distribution - - You may override this in a subclass to perform additional operations on - the distribution. This method will be called at most once per - feature, and only after all included features have been asked to - include themselves. - """ - - dist.exclude(**self.extras) - - if self.remove: - for item in self.remove: - dist.exclude_package(item) - - def validate(self,dist): - - """Verify that feature makes sense in context of distribution - - This method is called by the distribution just before it parses its - command line. It checks to ensure that the 'remove' attribute, if any, - contains only valid package/module names that are present in the base - distribution when 'setup()' is called. You may override it in a - subclass to perform any other required validation of the feature - against a target distribution. - """ - - for item in self.remove: - if not dist.has_contents_for(item): - raise DistutilsSetupError( - "%s wants to be able to remove %s, but the distribution" - " doesn't contain any packages or modules under %s" - % (self.description, item, item) - ) diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/extension.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/extension.py deleted file mode 100644 index 8178ed3..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/extension.py +++ /dev/null @@ -1,57 +0,0 @@ -import sys -import re -import functools -import distutils.core -import distutils.errors -import distutils.extension - -from .dist import _get_unpatched -from . import msvc9_support - -_Extension = _get_unpatched(distutils.core.Extension) - -msvc9_support.patch_for_specialized_compiler() - -def have_pyrex(): - """ - Return True if Cython or Pyrex can be imported. - """ - pyrex_impls = 'Cython.Distutils.build_ext', 'Pyrex.Distutils.build_ext' - for pyrex_impl in pyrex_impls: - try: - # from (pyrex_impl) import build_ext - __import__(pyrex_impl, fromlist=['build_ext']).build_ext - return True - except Exception: - pass - return False - - -class Extension(_Extension): - """Extension that uses '.c' files in place of '.pyx' files""" - - def __init__(self, *args, **kw): - _Extension.__init__(self, *args, **kw) - self._convert_pyx_sources_to_lang() - - def _convert_pyx_sources_to_lang(self): - """ - Replace sources with .pyx extensions to sources with the target - language extension. This mechanism allows language authors to supply - pre-converted sources but to prefer the .pyx sources. - """ - if have_pyrex(): - # the build has Cython, so allow it to compile the .pyx files - return - lang = self.language or '' - target_ext = '.cpp' if lang.lower() == 'c++' else '.c' - sub = functools.partial(re.sub, '.pyx$', target_ext) - self.sources = list(map(sub, self.sources)) - -class Library(Extension): - """Just like a regular Extension, but built as a library instead""" - -distutils.core.Extension = Extension -distutils.extension.Extension = Extension -if 'distutils.command.build_ext' in sys.modules: - sys.modules['distutils.command.build_ext'].Extension = Extension diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/lib2to3_ex.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/lib2to3_ex.py deleted file mode 100644 index feef591..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/lib2to3_ex.py +++ /dev/null @@ -1,58 +0,0 @@ -""" -Customized Mixin2to3 support: - - - adds support for converting doctests - - -This module raises an ImportError on Python 2. -""" - -from distutils.util import Mixin2to3 as _Mixin2to3 -from distutils import log -from lib2to3.refactor import RefactoringTool, get_fixers_from_package -import setuptools - -class DistutilsRefactoringTool(RefactoringTool): - def log_error(self, msg, *args, **kw): - log.error(msg, *args) - - def log_message(self, msg, *args): - log.info(msg, *args) - - def log_debug(self, msg, *args): - log.debug(msg, *args) - -class Mixin2to3(_Mixin2to3): - def run_2to3(self, files, doctests = False): - # See of the distribution option has been set, otherwise check the - # setuptools default. - if self.distribution.use_2to3 is not True: - return - if not files: - return - log.info("Fixing "+" ".join(files)) - self.__build_fixer_names() - self.__exclude_fixers() - if doctests: - if setuptools.run_2to3_on_doctests: - r = DistutilsRefactoringTool(self.fixer_names) - r.refactor(files, write=True, doctests_only=True) - else: - _Mixin2to3.run_2to3(self, files) - - def __build_fixer_names(self): - if self.fixer_names: return - self.fixer_names = [] - for p in setuptools.lib2to3_fixer_packages: - self.fixer_names.extend(get_fixers_from_package(p)) - if self.distribution.use_2to3_fixers is not None: - for p in self.distribution.use_2to3_fixers: - self.fixer_names.extend(get_fixers_from_package(p)) - - def __exclude_fixers(self): - excluded_fixers = getattr(self, 'exclude_fixers', []) - if self.distribution.use_2to3_exclude_fixers is not None: - excluded_fixers.extend(self.distribution.use_2to3_exclude_fixers) - for fixer_name in excluded_fixers: - if fixer_name in self.fixer_names: - self.fixer_names.remove(fixer_name) diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/msvc9_support.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/msvc9_support.py deleted file mode 100644 index a69c747..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/msvc9_support.py +++ /dev/null @@ -1,63 +0,0 @@ -try: - import distutils.msvc9compiler -except ImportError: - pass - -unpatched = dict() - -def patch_for_specialized_compiler(): - """ - Patch functions in distutils.msvc9compiler to use the standalone compiler - build for Python (Windows only). Fall back to original behavior when the - standalone compiler is not available. - """ - if 'distutils' not in globals(): - # The module isn't available to be patched - return - - if unpatched: - # Already patched - return - - unpatched.update(vars(distutils.msvc9compiler)) - - distutils.msvc9compiler.find_vcvarsall = find_vcvarsall - distutils.msvc9compiler.query_vcvarsall = query_vcvarsall - -def find_vcvarsall(version): - Reg = distutils.msvc9compiler.Reg - VC_BASE = r'Software\%sMicrosoft\DevDiv\VCForPython\%0.1f' - key = VC_BASE % ('', version) - try: - # Per-user installs register the compiler path here - productdir = Reg.get_value(key, "installdir") - except KeyError: - try: - # All-user installs on a 64-bit system register here - key = VC_BASE % ('Wow6432Node\\', version) - productdir = Reg.get_value(key, "installdir") - except KeyError: - productdir = None - - if productdir: - import os - vcvarsall = os.path.join(productdir, "vcvarsall.bat") - if os.path.isfile(vcvarsall): - return vcvarsall - - return unpatched['find_vcvarsall'](version) - -def query_vcvarsall(version, *args, **kwargs): - try: - return unpatched['query_vcvarsall'](version, *args, **kwargs) - except distutils.errors.DistutilsPlatformError as exc: - if exc and "vcvarsall.bat" in exc.args[0]: - message = 'Microsoft Visual C++ %0.1f is required (%s).' % (version, exc.args[0]) - if int(version) == 9: - # This redirection link is maintained by Microsoft. - # Contact vspython@microsoft.com if it needs updating. - raise distutils.errors.DistutilsPlatformError( - message + ' Get it from http://aka.ms/vcpython27' - ) - raise distutils.errors.DistutilsPlatformError(message) - raise diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/package_index.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/package_index.py deleted file mode 100644 index cabf103..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/package_index.py +++ /dev/null @@ -1,1049 +0,0 @@ -"""PyPI and direct package downloading""" -import sys -import os -import re -import shutil -import socket -import base64 -import hashlib -from functools import wraps - -from pkg_resources import ( - CHECKOUT_DIST, Distribution, BINARY_DIST, normalize_path, SOURCE_DIST, - require, Environment, find_distributions, safe_name, safe_version, - to_filename, Requirement, DEVELOP_DIST, -) -from setuptools import ssl_support -from distutils import log -from distutils.errors import DistutilsError -from setuptools.compat import (urllib2, httplib, StringIO, HTTPError, - urlparse, urlunparse, unquote, splituser, - url2pathname, name2codepoint, - unichr, urljoin, urlsplit, urlunsplit, - ConfigParser) -from setuptools.compat import filterfalse -from fnmatch import translate -from setuptools.py26compat import strip_fragment -from setuptools.py27compat import get_all_headers - -EGG_FRAGMENT = re.compile(r'^egg=([-A-Za-z0-9_.]+)$') -HREF = re.compile("""href\\s*=\\s*['"]?([^'"> ]+)""", re.I) -# this is here to fix emacs' cruddy broken syntax highlighting -PYPI_MD5 = re.compile( - '<a href="([^"#]+)">([^<]+)</a>\n\s+\\(<a (?:title="MD5 hash"\n\s+)' - 'href="[^?]+\?:action=show_md5&digest=([0-9a-f]{32})">md5</a>\\)' -) -URL_SCHEME = re.compile('([-+.a-z0-9]{2,}):',re.I).match -EXTENSIONS = ".tar.gz .tar.bz2 .tar .zip .tgz".split() - -__all__ = [ - 'PackageIndex', 'distros_for_url', 'parse_bdist_wininst', - 'interpret_distro_name', -] - -_SOCKET_TIMEOUT = 15 - -def parse_bdist_wininst(name): - """Return (base,pyversion) or (None,None) for possible .exe name""" - - lower = name.lower() - base, py_ver, plat = None, None, None - - if lower.endswith('.exe'): - if lower.endswith('.win32.exe'): - base = name[:-10] - plat = 'win32' - elif lower.startswith('.win32-py',-16): - py_ver = name[-7:-4] - base = name[:-16] - plat = 'win32' - elif lower.endswith('.win-amd64.exe'): - base = name[:-14] - plat = 'win-amd64' - elif lower.startswith('.win-amd64-py',-20): - py_ver = name[-7:-4] - base = name[:-20] - plat = 'win-amd64' - return base,py_ver,plat - - -def egg_info_for_url(url): - scheme, server, path, parameters, query, fragment = urlparse(url) - base = unquote(path.split('/')[-1]) - if server=='sourceforge.net' and base=='download': # XXX Yuck - base = unquote(path.split('/')[-2]) - if '#' in base: base, fragment = base.split('#',1) - return base,fragment - -def distros_for_url(url, metadata=None): - """Yield egg or source distribution objects that might be found at a URL""" - base, fragment = egg_info_for_url(url) - for dist in distros_for_location(url, base, metadata): yield dist - if fragment: - match = EGG_FRAGMENT.match(fragment) - if match: - for dist in interpret_distro_name( - url, match.group(1), metadata, precedence = CHECKOUT_DIST - ): - yield dist - -def distros_for_location(location, basename, metadata=None): - """Yield egg or source distribution objects based on basename""" - if basename.endswith('.egg.zip'): - basename = basename[:-4] # strip the .zip - if basename.endswith('.egg') and '-' in basename: - # only one, unambiguous interpretation - return [Distribution.from_location(location, basename, metadata)] - if basename.endswith('.exe'): - win_base, py_ver, platform = parse_bdist_wininst(basename) - if win_base is not None: - return interpret_distro_name( - location, win_base, metadata, py_ver, BINARY_DIST, platform - ) - # Try source distro extensions (.zip, .tgz, etc.) - # - for ext in EXTENSIONS: - if basename.endswith(ext): - basename = basename[:-len(ext)] - return interpret_distro_name(location, basename, metadata) - return [] # no extension matched - -def distros_for_filename(filename, metadata=None): - """Yield possible egg or source distribution objects based on a filename""" - return distros_for_location( - normalize_path(filename), os.path.basename(filename), metadata - ) - - -def interpret_distro_name( - location, basename, metadata, py_version=None, precedence=SOURCE_DIST, - platform=None - ): - """Generate alternative interpretations of a source distro name - - Note: if `location` is a filesystem filename, you should call - ``pkg_resources.normalize_path()`` on it before passing it to this - routine! - """ - # Generate alternative interpretations of a source distro name - # Because some packages are ambiguous as to name/versions split - # e.g. "adns-python-1.1.0", "egenix-mx-commercial", etc. - # So, we generate each possible interepretation (e.g. "adns, python-1.1.0" - # "adns-python, 1.1.0", and "adns-python-1.1.0, no version"). In practice, - # the spurious interpretations should be ignored, because in the event - # there's also an "adns" package, the spurious "python-1.1.0" version will - # compare lower than any numeric version number, and is therefore unlikely - # to match a request for it. It's still a potential problem, though, and - # in the long run PyPI and the distutils should go for "safe" names and - # versions in distribution archive names (sdist and bdist). - - parts = basename.split('-') - if not py_version and any(re.match('py\d\.\d$', p) for p in parts[2:]): - # it is a bdist_dumb, not an sdist -- bail out - return - - for p in range(1,len(parts)+1): - yield Distribution( - location, metadata, '-'.join(parts[:p]), '-'.join(parts[p:]), - py_version=py_version, precedence = precedence, - platform = platform - ) - -# From Python 2.7 docs -def unique_everseen(iterable, key=None): - "List unique elements, preserving order. Remember all elements ever seen." - # unique_everseen('AAAABBBCCDAABBB') --> A B C D - # unique_everseen('ABBCcAD', str.lower) --> A B C D - seen = set() - seen_add = seen.add - if key is None: - for element in filterfalse(seen.__contains__, iterable): - seen_add(element) - yield element - else: - for element in iterable: - k = key(element) - if k not in seen: - seen_add(k) - yield element - -def unique_values(func): - """ - Wrap a function returning an iterable such that the resulting iterable - only ever yields unique items. - """ - @wraps(func) - def wrapper(*args, **kwargs): - return unique_everseen(func(*args, **kwargs)) - return wrapper - -REL = re.compile("""<([^>]*\srel\s*=\s*['"]?([^'">]+)[^>]*)>""", re.I) -# this line is here to fix emacs' cruddy broken syntax highlighting - -@unique_values -def find_external_links(url, page): - """Find rel="homepage" and rel="download" links in `page`, yielding URLs""" - - for match in REL.finditer(page): - tag, rel = match.groups() - rels = set(map(str.strip, rel.lower().split(','))) - if 'homepage' in rels or 'download' in rels: - for match in HREF.finditer(tag): - yield urljoin(url, htmldecode(match.group(1))) - - for tag in ("<th>Home Page", "<th>Download URL"): - pos = page.find(tag) - if pos!=-1: - match = HREF.search(page,pos) - if match: - yield urljoin(url, htmldecode(match.group(1))) - -user_agent = "Python-urllib/%s setuptools/%s" % ( - sys.version[:3], require('setuptools')[0].version -) - -class ContentChecker(object): - """ - A null content checker that defines the interface for checking content - """ - def feed(self, block): - """ - Feed a block of data to the hash. - """ - return - - def is_valid(self): - """ - Check the hash. Return False if validation fails. - """ - return True - - def report(self, reporter, template): - """ - Call reporter with information about the checker (hash name) - substituted into the template. - """ - return - -class HashChecker(ContentChecker): - pattern = re.compile( - r'(?P<hash_name>sha1|sha224|sha384|sha256|sha512|md5)=' - r'(?P<expected>[a-f0-9]+)' - ) - - def __init__(self, hash_name, expected): - self.hash_name = hash_name - self.hash = hashlib.new(hash_name) - self.expected = expected - - @classmethod - def from_url(cls, url): - "Construct a (possibly null) ContentChecker from a URL" - fragment = urlparse(url)[-1] - if not fragment: - return ContentChecker() - match = cls.pattern.search(fragment) - if not match: - return ContentChecker() - return cls(**match.groupdict()) - - def feed(self, block): - self.hash.update(block) - - def is_valid(self): - return self.hash.hexdigest() == self.expected - - def report(self, reporter, template): - msg = template % self.hash_name - return reporter(msg) - - -class PackageIndex(Environment): - """A distribution index that scans web pages for download URLs""" - - def __init__( - self, index_url="https://pypi.python.org/simple", hosts=('*',), - ca_bundle=None, verify_ssl=True, *args, **kw - ): - Environment.__init__(self,*args,**kw) - self.index_url = index_url + "/"[:not index_url.endswith('/')] - self.scanned_urls = {} - self.fetched_urls = {} - self.package_pages = {} - self.allows = re.compile('|'.join(map(translate,hosts))).match - self.to_scan = [] - if verify_ssl and ssl_support.is_available and (ca_bundle or ssl_support.find_ca_bundle()): - self.opener = ssl_support.opener_for(ca_bundle) - else: self.opener = urllib2.urlopen - - def process_url(self, url, retrieve=False): - """Evaluate a URL as a possible download, and maybe retrieve it""" - if url in self.scanned_urls and not retrieve: - return - self.scanned_urls[url] = True - if not URL_SCHEME(url): - self.process_filename(url) - return - else: - dists = list(distros_for_url(url)) - if dists: - if not self.url_ok(url): - return - self.debug("Found link: %s", url) - - if dists or not retrieve or url in self.fetched_urls: - list(map(self.add, dists)) - return # don't need the actual page - - if not self.url_ok(url): - self.fetched_urls[url] = True - return - - self.info("Reading %s", url) - self.fetched_urls[url] = True # prevent multiple fetch attempts - f = self.open_url(url, "Download error on %s: %%s -- Some packages may not be found!" % url) - if f is None: return - self.fetched_urls[f.url] = True - if 'html' not in f.headers.get('content-type', '').lower(): - f.close() # not html, we can't process it - return - - base = f.url # handle redirects - page = f.read() - if not isinstance(page, str): # We are in Python 3 and got bytes. We want str. - if isinstance(f, HTTPError): - # Errors have no charset, assume latin1: - charset = 'latin-1' - else: - charset = f.headers.get_param('charset') or 'latin-1' - page = page.decode(charset, "ignore") - f.close() - for match in HREF.finditer(page): - link = urljoin(base, htmldecode(match.group(1))) - self.process_url(link) - if url.startswith(self.index_url) and getattr(f,'code',None)!=404: - page = self.process_index(url, page) - - def process_filename(self, fn, nested=False): - # process filenames or directories - if not os.path.exists(fn): - self.warn("Not found: %s", fn) - return - - if os.path.isdir(fn) and not nested: - path = os.path.realpath(fn) - for item in os.listdir(path): - self.process_filename(os.path.join(path,item), True) - - dists = distros_for_filename(fn) - if dists: - self.debug("Found: %s", fn) - list(map(self.add, dists)) - - def url_ok(self, url, fatal=False): - s = URL_SCHEME(url) - if (s and s.group(1).lower()=='file') or self.allows(urlparse(url)[1]): - return True - msg = ("\nNote: Bypassing %s (disallowed host; see " - "http://bit.ly/1dg9ijs for details).\n") - if fatal: - raise DistutilsError(msg % url) - else: - self.warn(msg, url) - - def scan_egg_links(self, search_path): - for item in search_path: - if os.path.isdir(item): - for entry in os.listdir(item): - if entry.endswith('.egg-link'): - self.scan_egg_link(item, entry) - - def scan_egg_link(self, path, entry): - lines = [_f for _f in map(str.strip, - open(os.path.join(path, entry))) if _f] - if len(lines)==2: - for dist in find_distributions(os.path.join(path, lines[0])): - dist.location = os.path.join(path, *lines) - dist.precedence = SOURCE_DIST - self.add(dist) - - def process_index(self,url,page): - """Process the contents of a PyPI page""" - def scan(link): - # Process a URL to see if it's for a package page - if link.startswith(self.index_url): - parts = list(map( - unquote, link[len(self.index_url):].split('/') - )) - if len(parts)==2 and '#' not in parts[1]: - # it's a package page, sanitize and index it - pkg = safe_name(parts[0]) - ver = safe_version(parts[1]) - self.package_pages.setdefault(pkg.lower(),{})[link] = True - return to_filename(pkg), to_filename(ver) - return None, None - - # process an index page into the package-page index - for match in HREF.finditer(page): - try: - scan(urljoin(url, htmldecode(match.group(1)))) - except ValueError: - pass - - pkg, ver = scan(url) # ensure this page is in the page index - if pkg: - # process individual package page - for new_url in find_external_links(url, page): - # Process the found URL - base, frag = egg_info_for_url(new_url) - if base.endswith('.py') and not frag: - if ver: - new_url+='#egg=%s-%s' % (pkg,ver) - else: - self.need_version_info(url) - self.scan_url(new_url) - - return PYPI_MD5.sub( - lambda m: '<a href="%s#md5=%s">%s</a>' % m.group(1,3,2), page - ) - else: - return "" # no sense double-scanning non-package pages - - def need_version_info(self, url): - self.scan_all( - "Page at %s links to .py file(s) without version info; an index " - "scan is required.", url - ) - - def scan_all(self, msg=None, *args): - if self.index_url not in self.fetched_urls: - if msg: self.warn(msg,*args) - self.info( - "Scanning index of all packages (this may take a while)" - ) - self.scan_url(self.index_url) - - def find_packages(self, requirement): - self.scan_url(self.index_url + requirement.unsafe_name+'/') - - if not self.package_pages.get(requirement.key): - # Fall back to safe version of the name - self.scan_url(self.index_url + requirement.project_name+'/') - - if not self.package_pages.get(requirement.key): - # We couldn't find the target package, so search the index page too - self.not_found_in_index(requirement) - - for url in list(self.package_pages.get(requirement.key,())): - # scan each page that might be related to the desired package - self.scan_url(url) - - def obtain(self, requirement, installer=None): - self.prescan() - self.find_packages(requirement) - for dist in self[requirement.key]: - if dist in requirement: - return dist - self.debug("%s does not match %s", requirement, dist) - return super(PackageIndex, self).obtain(requirement,installer) - - def check_hash(self, checker, filename, tfp): - """ - checker is a ContentChecker - """ - checker.report(self.debug, - "Validating %%s checksum for %s" % filename) - if not checker.is_valid(): - tfp.close() - os.unlink(filename) - raise DistutilsError( - "%s validation failed for %s; " - "possible download problem?" % ( - checker.hash.name, os.path.basename(filename)) - ) - - def add_find_links(self, urls): - """Add `urls` to the list that will be prescanned for searches""" - for url in urls: - if ( - self.to_scan is None # if we have already "gone online" - or not URL_SCHEME(url) # or it's a local file/directory - or url.startswith('file:') - or list(distros_for_url(url)) # or a direct package link - ): - # then go ahead and process it now - self.scan_url(url) - else: - # otherwise, defer retrieval till later - self.to_scan.append(url) - - def prescan(self): - """Scan urls scheduled for prescanning (e.g. --find-links)""" - if self.to_scan: - list(map(self.scan_url, self.to_scan)) - self.to_scan = None # from now on, go ahead and process immediately - - def not_found_in_index(self, requirement): - if self[requirement.key]: # we've seen at least one distro - meth, msg = self.info, "Couldn't retrieve index page for %r" - else: # no distros seen for this name, might be misspelled - meth, msg = (self.warn, - "Couldn't find index page for %r (maybe misspelled?)") - meth(msg, requirement.unsafe_name) - self.scan_all() - - def download(self, spec, tmpdir): - """Locate and/or download `spec` to `tmpdir`, returning a local path - - `spec` may be a ``Requirement`` object, or a string containing a URL, - an existing local filename, or a project/version requirement spec - (i.e. the string form of a ``Requirement`` object). If it is the URL - of a .py file with an unambiguous ``#egg=name-version`` tag (i.e., one - that escapes ``-`` as ``_`` throughout), a trivial ``setup.py`` is - automatically created alongside the downloaded file. - - If `spec` is a ``Requirement`` object or a string containing a - project/version requirement spec, this method returns the location of - a matching distribution (possibly after downloading it to `tmpdir`). - If `spec` is a locally existing file or directory name, it is simply - returned unchanged. If `spec` is a URL, it is downloaded to a subpath - of `tmpdir`, and the local filename is returned. Various errors may be - raised if a problem occurs during downloading. - """ - if not isinstance(spec,Requirement): - scheme = URL_SCHEME(spec) - if scheme: - # It's a url, download it to tmpdir - found = self._download_url(scheme.group(1), spec, tmpdir) - base, fragment = egg_info_for_url(spec) - if base.endswith('.py'): - found = self.gen_setup(found,fragment,tmpdir) - return found - elif os.path.exists(spec): - # Existing file or directory, just return it - return spec - else: - try: - spec = Requirement.parse(spec) - except ValueError: - raise DistutilsError( - "Not a URL, existing file, or requirement spec: %r" % - (spec,) - ) - return getattr(self.fetch_distribution(spec, tmpdir),'location',None) - - def fetch_distribution( - self, requirement, tmpdir, force_scan=False, source=False, - develop_ok=False, local_index=None - ): - """Obtain a distribution suitable for fulfilling `requirement` - - `requirement` must be a ``pkg_resources.Requirement`` instance. - If necessary, or if the `force_scan` flag is set, the requirement is - searched for in the (online) package index as well as the locally - installed packages. If a distribution matching `requirement` is found, - the returned distribution's ``location`` is the value you would have - gotten from calling the ``download()`` method with the matching - distribution's URL or filename. If no matching distribution is found, - ``None`` is returned. - - If the `source` flag is set, only source distributions and source - checkout links will be considered. Unless the `develop_ok` flag is - set, development and system eggs (i.e., those using the ``.egg-info`` - format) will be ignored. - """ - # process a Requirement - self.info("Searching for %s", requirement) - skipped = {} - dist = None - - def find(req, env=None): - if env is None: - env = self - # Find a matching distribution; may be called more than once - - for dist in env[req.key]: - - if dist.precedence==DEVELOP_DIST and not develop_ok: - if dist not in skipped: - self.warn("Skipping development or system egg: %s",dist) - skipped[dist] = 1 - continue - - if dist in req and (dist.precedence<=SOURCE_DIST or not source): - return dist - - if force_scan: - self.prescan() - self.find_packages(requirement) - dist = find(requirement) - - if local_index is not None: - dist = dist or find(requirement, local_index) - - if dist is None: - if self.to_scan is not None: - self.prescan() - dist = find(requirement) - - if dist is None and not force_scan: - self.find_packages(requirement) - dist = find(requirement) - - if dist is None: - self.warn( - "No local packages or download links found for %s%s", - (source and "a source distribution of " or ""), - requirement, - ) - else: - self.info("Best match: %s", dist) - return dist.clone(location=self.download(dist.location, tmpdir)) - - def fetch(self, requirement, tmpdir, force_scan=False, source=False): - """Obtain a file suitable for fulfilling `requirement` - - DEPRECATED; use the ``fetch_distribution()`` method now instead. For - backward compatibility, this routine is identical but returns the - ``location`` of the downloaded distribution instead of a distribution - object. - """ - dist = self.fetch_distribution(requirement,tmpdir,force_scan,source) - if dist is not None: - return dist.location - return None - - def gen_setup(self, filename, fragment, tmpdir): - match = EGG_FRAGMENT.match(fragment) - dists = match and [ - d for d in - interpret_distro_name(filename, match.group(1), None) if d.version - ] or [] - - if len(dists)==1: # unambiguous ``#egg`` fragment - basename = os.path.basename(filename) - - # Make sure the file has been downloaded to the temp dir. - if os.path.dirname(filename) != tmpdir: - dst = os.path.join(tmpdir, basename) - from setuptools.command.easy_install import samefile - if not samefile(filename, dst): - shutil.copy2(filename, dst) - filename=dst - - with open(os.path.join(tmpdir, 'setup.py'), 'w') as file: - file.write( - "from setuptools import setup\n" - "setup(name=%r, version=%r, py_modules=[%r])\n" - % ( - dists[0].project_name, dists[0].version, - os.path.splitext(basename)[0] - ) - ) - return filename - - elif match: - raise DistutilsError( - "Can't unambiguously interpret project/version identifier %r; " - "any dashes in the name or version should be escaped using " - "underscores. %r" % (fragment,dists) - ) - else: - raise DistutilsError( - "Can't process plain .py files without an '#egg=name-version'" - " suffix to enable automatic setup script generation." - ) - - dl_blocksize = 8192 - def _download_to(self, url, filename): - self.info("Downloading %s", url) - # Download the file - fp, info = None, None - try: - checker = HashChecker.from_url(url) - fp = self.open_url(strip_fragment(url)) - if isinstance(fp, HTTPError): - raise DistutilsError( - "Can't download %s: %s %s" % (url, fp.code,fp.msg) - ) - headers = fp.info() - blocknum = 0 - bs = self.dl_blocksize - size = -1 - if "content-length" in headers: - # Some servers return multiple Content-Length headers :( - sizes = get_all_headers(headers, 'Content-Length') - size = max(map(int, sizes)) - self.reporthook(url, filename, blocknum, bs, size) - with open(filename,'wb') as tfp: - while True: - block = fp.read(bs) - if block: - checker.feed(block) - tfp.write(block) - blocknum += 1 - self.reporthook(url, filename, blocknum, bs, size) - else: - break - self.check_hash(checker, filename, tfp) - return headers - finally: - if fp: fp.close() - - def reporthook(self, url, filename, blocknum, blksize, size): - pass # no-op - - def open_url(self, url, warning=None): - if url.startswith('file:'): - return local_open(url) - try: - return open_with_auth(url, self.opener) - except (ValueError, httplib.InvalidURL) as v: - msg = ' '.join([str(arg) for arg in v.args]) - if warning: - self.warn(warning, msg) - else: - raise DistutilsError('%s %s' % (url, msg)) - except urllib2.HTTPError as v: - return v - except urllib2.URLError as v: - if warning: - self.warn(warning, v.reason) - else: - raise DistutilsError("Download error for %s: %s" - % (url, v.reason)) - except httplib.BadStatusLine as v: - if warning: - self.warn(warning, v.line) - else: - raise DistutilsError( - '%s returned a bad status line. The server might be ' - 'down, %s' % - (url, v.line) - ) - except httplib.HTTPException as v: - if warning: - self.warn(warning, v) - else: - raise DistutilsError("Download error for %s: %s" - % (url, v)) - - def _download_url(self, scheme, url, tmpdir): - # Determine download filename - # - name, fragment = egg_info_for_url(url) - if name: - while '..' in name: - name = name.replace('..','.').replace('\\','_') - else: - name = "__downloaded__" # default if URL has no path contents - - if name.endswith('.egg.zip'): - name = name[:-4] # strip the extra .zip before download - - filename = os.path.join(tmpdir,name) - - # Download the file - # - if scheme=='svn' or scheme.startswith('svn+'): - return self._download_svn(url, filename) - elif scheme=='git' or scheme.startswith('git+'): - return self._download_git(url, filename) - elif scheme.startswith('hg+'): - return self._download_hg(url, filename) - elif scheme=='file': - return url2pathname(urlparse(url)[2]) - else: - self.url_ok(url, True) # raises error if not allowed - return self._attempt_download(url, filename) - - def scan_url(self, url): - self.process_url(url, True) - - def _attempt_download(self, url, filename): - headers = self._download_to(url, filename) - if 'html' in headers.get('content-type','').lower(): - return self._download_html(url, headers, filename) - else: - return filename - - def _download_html(self, url, headers, filename): - file = open(filename) - for line in file: - if line.strip(): - # Check for a subversion index page - if re.search(r'<title>([^- ]+ - )?Revision \d+:', line): - # it's a subversion index page: - file.close() - os.unlink(filename) - return self._download_svn(url, filename) - break # not an index page - file.close() - os.unlink(filename) - raise DistutilsError("Unexpected HTML page found at "+url) - - def _download_svn(self, url, filename): - url = url.split('#',1)[0] # remove any fragment for svn's sake - creds = '' - if url.lower().startswith('svn:') and '@' in url: - scheme, netloc, path, p, q, f = urlparse(url) - if not netloc and path.startswith('//') and '/' in path[2:]: - netloc, path = path[2:].split('/',1) - auth, host = splituser(netloc) - if auth: - if ':' in auth: - user, pw = auth.split(':',1) - creds = " --username=%s --password=%s" % (user, pw) - else: - creds = " --username="+auth - netloc = host - url = urlunparse((scheme, netloc, url, p, q, f)) - self.info("Doing subversion checkout from %s to %s", url, filename) - os.system("svn checkout%s -q %s %s" % (creds, url, filename)) - return filename - - @staticmethod - def _vcs_split_rev_from_url(url, pop_prefix=False): - scheme, netloc, path, query, frag = urlsplit(url) - - scheme = scheme.split('+', 1)[-1] - - # Some fragment identification fails - path = path.split('#',1)[0] - - rev = None - if '@' in path: - path, rev = path.rsplit('@', 1) - - # Also, discard fragment - url = urlunsplit((scheme, netloc, path, query, '')) - - return url, rev - - def _download_git(self, url, filename): - filename = filename.split('#',1)[0] - url, rev = self._vcs_split_rev_from_url(url, pop_prefix=True) - - self.info("Doing git clone from %s to %s", url, filename) - os.system("git clone --quiet %s %s" % (url, filename)) - - if rev is not None: - self.info("Checking out %s", rev) - os.system("(cd %s && git checkout --quiet %s)" % ( - filename, - rev, - )) - - return filename - - def _download_hg(self, url, filename): - filename = filename.split('#',1)[0] - url, rev = self._vcs_split_rev_from_url(url, pop_prefix=True) - - self.info("Doing hg clone from %s to %s", url, filename) - os.system("hg clone --quiet %s %s" % (url, filename)) - - if rev is not None: - self.info("Updating to %s", rev) - os.system("(cd %s && hg up -C -r %s >&-)" % ( - filename, - rev, - )) - - return filename - - def debug(self, msg, *args): - log.debug(msg, *args) - - def info(self, msg, *args): - log.info(msg, *args) - - def warn(self, msg, *args): - log.warn(msg, *args) - -# This pattern matches a character entity reference (a decimal numeric -# references, a hexadecimal numeric reference, or a named reference). -entity_sub = re.compile(r'&(#(\d+|x[\da-fA-F]+)|[\w.:-]+);?').sub - -def uchr(c): - if not isinstance(c, int): - return c - if c>255: return unichr(c) - return chr(c) - -def decode_entity(match): - what = match.group(1) - if what.startswith('#x'): - what = int(what[2:], 16) - elif what.startswith('#'): - what = int(what[1:]) - else: - what = name2codepoint.get(what, match.group(0)) - return uchr(what) - -def htmldecode(text): - """Decode HTML entities in the given text.""" - return entity_sub(decode_entity, text) - -def socket_timeout(timeout=15): - def _socket_timeout(func): - def _socket_timeout(*args, **kwargs): - old_timeout = socket.getdefaulttimeout() - socket.setdefaulttimeout(timeout) - try: - return func(*args, **kwargs) - finally: - socket.setdefaulttimeout(old_timeout) - return _socket_timeout - return _socket_timeout - -def _encode_auth(auth): - """ - A function compatible with Python 2.3-3.3 that will encode - auth from a URL suitable for an HTTP header. - >>> str(_encode_auth('username%3Apassword')) - 'dXNlcm5hbWU6cGFzc3dvcmQ=' - - Long auth strings should not cause a newline to be inserted. - >>> long_auth = 'username:' + 'password'*10 - >>> chr(10) in str(_encode_auth(long_auth)) - False - """ - auth_s = unquote(auth) - # convert to bytes - auth_bytes = auth_s.encode() - # use the legacy interface for Python 2.3 support - encoded_bytes = base64.encodestring(auth_bytes) - # convert back to a string - encoded = encoded_bytes.decode() - # strip the trailing carriage return - return encoded.replace('\n','') - -class Credential(object): - """ - A username/password pair. Use like a namedtuple. - """ - def __init__(self, username, password): - self.username = username - self.password = password - - def __iter__(self): - yield self.username - yield self.password - - def __str__(self): - return '%(username)s:%(password)s' % vars(self) - -class PyPIConfig(ConfigParser.ConfigParser): - - def __init__(self): - """ - Load from ~/.pypirc - """ - defaults = dict.fromkeys(['username', 'password', 'repository'], '') - ConfigParser.ConfigParser.__init__(self, defaults) - - rc = os.path.join(os.path.expanduser('~'), '.pypirc') - if os.path.exists(rc): - self.read(rc) - - @property - def creds_by_repository(self): - sections_with_repositories = [ - section for section in self.sections() - if self.get(section, 'repository').strip() - ] - - return dict(map(self._get_repo_cred, sections_with_repositories)) - - def _get_repo_cred(self, section): - repo = self.get(section, 'repository').strip() - return repo, Credential( - self.get(section, 'username').strip(), - self.get(section, 'password').strip(), - ) - - def find_credential(self, url): - """ - If the URL indicated appears to be a repository defined in this - config, return the credential for that repository. - """ - for repository, cred in self.creds_by_repository.items(): - if url.startswith(repository): - return cred - - -def open_with_auth(url, opener=urllib2.urlopen): - """Open a urllib2 request, handling HTTP authentication""" - - scheme, netloc, path, params, query, frag = urlparse(url) - - # Double scheme does not raise on Mac OS X as revealed by a - # failing test. We would expect "nonnumeric port". Refs #20. - if netloc.endswith(':'): - raise httplib.InvalidURL("nonnumeric port: ''") - - if scheme in ('http', 'https'): - auth, host = splituser(netloc) - else: - auth = None - - if not auth: - cred = PyPIConfig().find_credential(url) - if cred: - auth = str(cred) - info = cred.username, url - log.info('Authenticating as %s for %s (from .pypirc)' % info) - - if auth: - auth = "Basic " + _encode_auth(auth) - new_url = urlunparse((scheme,host,path,params,query,frag)) - request = urllib2.Request(new_url) - request.add_header("Authorization", auth) - else: - request = urllib2.Request(url) - - request.add_header('User-Agent', user_agent) - fp = opener(request) - - if auth: - # Put authentication info back into request URL if same host, - # so that links found on the page will work - s2, h2, path2, param2, query2, frag2 = urlparse(fp.url) - if s2==scheme and h2==host: - fp.url = urlunparse((s2,netloc,path2,param2,query2,frag2)) - - return fp - -# adding a timeout to avoid freezing package_index -open_with_auth = socket_timeout(_SOCKET_TIMEOUT)(open_with_auth) - - -def fix_sf_url(url): - return url # backward compatibility - -def local_open(url): - """Read a local path, with special support for directories""" - scheme, server, path, param, query, frag = urlparse(url) - filename = url2pathname(path) - if os.path.isfile(filename): - return urllib2.urlopen(url) - elif path.endswith('/') and os.path.isdir(filename): - files = [] - for f in os.listdir(filename): - if f=='index.html': - with open(os.path.join(filename,f),'r') as fp: - body = fp.read() - break - elif os.path.isdir(os.path.join(filename,f)): - f+='/' - files.append("<a href=%r>%s</a>" % (f,f)) - else: - body = ("<html><head><title>%s</title>" % url) + \ - "</head><body>%s</body></html>" % '\n'.join(files) - status, message = 200, "OK" - else: - status, message, body = 404, "Path not found", "Not found" - - headers = {'content-type': 'text/html'} - return HTTPError(url, status, message, headers, StringIO(body)) diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/py26compat.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/py26compat.py deleted file mode 100644 index 738b0cc..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/py26compat.py +++ /dev/null @@ -1,19 +0,0 @@ -""" -Compatibility Support for Python 2.6 and earlier -""" - -import sys - -from setuptools.compat import splittag - -def strip_fragment(url): - """ - In `Python 8280 <http://bugs.python.org/issue8280>`_, Python 2.7 and - later was patched to disregard the fragment when making URL requests. - Do the same for Python 2.6 and earlier. - """ - url, fragment = splittag(url) - return url - -if sys.version_info >= (2,7): - strip_fragment = lambda x: x diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/py27compat.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/py27compat.py deleted file mode 100644 index 9d2886d..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/py27compat.py +++ /dev/null @@ -1,15 +0,0 @@ -""" -Compatibility Support for Python 2.7 and earlier -""" - -import sys - -def get_all_headers(message, key): - """ - Given an HTTPMessage, return all headers matching a given key. - """ - return message.get_all(key) - -if sys.version_info < (3,): - def get_all_headers(message, key): - return message.getheaders(key) diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/py31compat.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/py31compat.py deleted file mode 100644 index c487ac0..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/py31compat.py +++ /dev/null @@ -1,52 +0,0 @@ -import sys -import unittest - -__all__ = ['get_config_vars', 'get_path'] - -try: - # Python 2.7 or >=3.2 - from sysconfig import get_config_vars, get_path -except ImportError: - from distutils.sysconfig import get_config_vars, get_python_lib - def get_path(name): - if name not in ('platlib', 'purelib'): - raise ValueError("Name must be purelib or platlib") - return get_python_lib(name=='platlib') - -try: - # Python >=3.2 - from tempfile import TemporaryDirectory -except ImportError: - import shutil - import tempfile - class TemporaryDirectory(object): - """" - Very simple temporary directory context manager. - Will try to delete afterward, but will also ignore OS and similar - errors on deletion. - """ - def __init__(self): - self.name = None # Handle mkdtemp raising an exception - self.name = tempfile.mkdtemp() - - def __enter__(self): - return self.name - - def __exit__(self, exctype, excvalue, exctrace): - try: - shutil.rmtree(self.name, True) - except OSError: #removal errors are not the only possible - pass - self.name = None - - -unittest_main = unittest.main - -_PY31 = (3, 1) <= sys.version_info[:2] < (3, 2) -if _PY31: - # on Python 3.1, translate testRunner==None to TextTestRunner - # for compatibility with Python 2.6, 2.7, and 3.2+ - def unittest_main(*args, **kwargs): - if 'testRunner' in kwargs and kwargs['testRunner'] is None: - kwargs['testRunner'] = unittest.TextTestRunner - return unittest.main(*args, **kwargs) diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/sandbox.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/sandbox.py deleted file mode 100644 index 6725512..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/sandbox.py +++ /dev/null @@ -1,489 +0,0 @@ -import os -import sys -import tempfile -import operator -import functools -import itertools -import re -import contextlib -import pickle - -import pkg_resources - -if os.name == "java": - import org.python.modules.posix.PosixModule as _os -else: - _os = sys.modules[os.name] -try: - _file = file -except NameError: - _file = None -_open = open -from distutils.errors import DistutilsError -from pkg_resources import working_set - -from setuptools import compat -from setuptools.compat import builtins - -__all__ = [ - "AbstractSandbox", "DirectorySandbox", "SandboxViolation", "run_setup", -] - -def _execfile(filename, globals, locals=None): - """ - Python 3 implementation of execfile. - """ - mode = 'rb' - # Python 2.6 compile requires LF for newlines, so use deprecated - # Universal newlines support. - if sys.version_info < (2, 7): - mode += 'U' - with open(filename, mode) as stream: - script = stream.read() - if locals is None: - locals = globals - code = compile(script, filename, 'exec') - exec(code, globals, locals) - - -@contextlib.contextmanager -def save_argv(repl=None): - saved = sys.argv[:] - if repl is not None: - sys.argv[:] = repl - try: - yield saved - finally: - sys.argv[:] = saved - - -@contextlib.contextmanager -def save_path(): - saved = sys.path[:] - try: - yield saved - finally: - sys.path[:] = saved - - -@contextlib.contextmanager -def override_temp(replacement): - """ - Monkey-patch tempfile.tempdir with replacement, ensuring it exists - """ - if not os.path.isdir(replacement): - os.makedirs(replacement) - - saved = tempfile.tempdir - - tempfile.tempdir = replacement - - try: - yield - finally: - tempfile.tempdir = saved - - -@contextlib.contextmanager -def pushd(target): - saved = os.getcwd() - os.chdir(target) - try: - yield saved - finally: - os.chdir(saved) - - -class UnpickleableException(Exception): - """ - An exception representing another Exception that could not be pickled. - """ - @classmethod - def dump(cls, type, exc): - """ - Always return a dumped (pickled) type and exc. If exc can't be pickled, - wrap it in UnpickleableException first. - """ - try: - return pickle.dumps(type), pickle.dumps(exc) - except Exception: - return cls.dump(cls, cls(repr(exc))) - - -class ExceptionSaver: - """ - A Context Manager that will save an exception, serialized, and restore it - later. - """ - def __enter__(self): - return self - - def __exit__(self, type, exc, tb): - if not exc: - return - - # dump the exception - self._saved = UnpickleableException.dump(type, exc) - self._tb = tb - - # suppress the exception - return True - - def resume(self): - "restore and re-raise any exception" - - if '_saved' not in vars(self): - return - - type, exc = map(pickle.loads, self._saved) - compat.reraise(type, exc, self._tb) - - -@contextlib.contextmanager -def save_modules(): - """ - Context in which imported modules are saved. - - Translates exceptions internal to the context into the equivalent exception - outside the context. - """ - saved = sys.modules.copy() - with ExceptionSaver() as saved_exc: - yield saved - - sys.modules.update(saved) - # remove any modules imported since - del_modules = ( - mod_name for mod_name in sys.modules - if mod_name not in saved - # exclude any encodings modules. See #285 - and not mod_name.startswith('encodings.') - ) - _clear_modules(del_modules) - - saved_exc.resume() - - -def _clear_modules(module_names): - for mod_name in list(module_names): - del sys.modules[mod_name] - - -@contextlib.contextmanager -def save_pkg_resources_state(): - saved = pkg_resources.__getstate__() - try: - yield saved - finally: - pkg_resources.__setstate__(saved) - - -@contextlib.contextmanager -def setup_context(setup_dir): - temp_dir = os.path.join(setup_dir, 'temp') - with save_pkg_resources_state(): - with save_modules(): - hide_setuptools() - with save_path(): - with save_argv(): - with override_temp(temp_dir): - with pushd(setup_dir): - # ensure setuptools commands are available - __import__('setuptools') - yield - - -def _needs_hiding(mod_name): - """ - >>> _needs_hiding('setuptools') - True - >>> _needs_hiding('pkg_resources') - True - >>> _needs_hiding('setuptools_plugin') - False - >>> _needs_hiding('setuptools.__init__') - True - >>> _needs_hiding('distutils') - True - """ - pattern = re.compile('(setuptools|pkg_resources|distutils)(\.|$)') - return bool(pattern.match(mod_name)) - - -def hide_setuptools(): - """ - Remove references to setuptools' modules from sys.modules to allow the - invocation to import the most appropriate setuptools. This technique is - necessary to avoid issues such as #315 where setuptools upgrading itself - would fail to find a function declared in the metadata. - """ - modules = filter(_needs_hiding, sys.modules) - _clear_modules(modules) - - -def run_setup(setup_script, args): - """Run a distutils setup script, sandboxed in its directory""" - setup_dir = os.path.abspath(os.path.dirname(setup_script)) - with setup_context(setup_dir): - try: - sys.argv[:] = [setup_script]+list(args) - sys.path.insert(0, setup_dir) - # reset to include setup dir, w/clean callback list - working_set.__init__() - working_set.callbacks.append(lambda dist:dist.activate()) - def runner(): - ns = dict(__file__=setup_script, __name__='__main__') - _execfile(setup_script, ns) - DirectorySandbox(setup_dir).run(runner) - except SystemExit as v: - if v.args and v.args[0]: - raise - # Normal exit, just return - - -class AbstractSandbox: - """Wrap 'os' module and 'open()' builtin for virtualizing setup scripts""" - - _active = False - - def __init__(self): - self._attrs = [ - name for name in dir(_os) - if not name.startswith('_') and hasattr(self,name) - ] - - def _copy(self, source): - for name in self._attrs: - setattr(os, name, getattr(source,name)) - - def run(self, func): - """Run 'func' under os sandboxing""" - try: - self._copy(self) - if _file: - builtins.file = self._file - builtins.open = self._open - self._active = True - return func() - finally: - self._active = False - if _file: - builtins.file = _file - builtins.open = _open - self._copy(_os) - - def _mk_dual_path_wrapper(name): - original = getattr(_os,name) - def wrap(self,src,dst,*args,**kw): - if self._active: - src,dst = self._remap_pair(name,src,dst,*args,**kw) - return original(src,dst,*args,**kw) - return wrap - - for name in ["rename", "link", "symlink"]: - if hasattr(_os,name): locals()[name] = _mk_dual_path_wrapper(name) - - def _mk_single_path_wrapper(name, original=None): - original = original or getattr(_os,name) - def wrap(self,path,*args,**kw): - if self._active: - path = self._remap_input(name,path,*args,**kw) - return original(path,*args,**kw) - return wrap - - if _file: - _file = _mk_single_path_wrapper('file', _file) - _open = _mk_single_path_wrapper('open', _open) - for name in [ - "stat", "listdir", "chdir", "open", "chmod", "chown", "mkdir", - "remove", "unlink", "rmdir", "utime", "lchown", "chroot", "lstat", - "startfile", "mkfifo", "mknod", "pathconf", "access" - ]: - if hasattr(_os,name): locals()[name] = _mk_single_path_wrapper(name) - - def _mk_single_with_return(name): - original = getattr(_os,name) - def wrap(self,path,*args,**kw): - if self._active: - path = self._remap_input(name,path,*args,**kw) - return self._remap_output(name, original(path,*args,**kw)) - return original(path,*args,**kw) - return wrap - - for name in ['readlink', 'tempnam']: - if hasattr(_os,name): locals()[name] = _mk_single_with_return(name) - - def _mk_query(name): - original = getattr(_os,name) - def wrap(self,*args,**kw): - retval = original(*args,**kw) - if self._active: - return self._remap_output(name, retval) - return retval - return wrap - - for name in ['getcwd', 'tmpnam']: - if hasattr(_os,name): locals()[name] = _mk_query(name) - - def _validate_path(self,path): - """Called to remap or validate any path, whether input or output""" - return path - - def _remap_input(self,operation,path,*args,**kw): - """Called for path inputs""" - return self._validate_path(path) - - def _remap_output(self,operation,path): - """Called for path outputs""" - return self._validate_path(path) - - def _remap_pair(self,operation,src,dst,*args,**kw): - """Called for path pairs like rename, link, and symlink operations""" - return ( - self._remap_input(operation+'-from',src,*args,**kw), - self._remap_input(operation+'-to',dst,*args,**kw) - ) - - -if hasattr(os, 'devnull'): - _EXCEPTIONS = [os.devnull,] -else: - _EXCEPTIONS = [] - -try: - from win32com.client.gencache import GetGeneratePath - _EXCEPTIONS.append(GetGeneratePath()) - del GetGeneratePath -except ImportError: - # it appears pywin32 is not installed, so no need to exclude. - pass - -class DirectorySandbox(AbstractSandbox): - """Restrict operations to a single subdirectory - pseudo-chroot""" - - write_ops = dict.fromkeys([ - "open", "chmod", "chown", "mkdir", "remove", "unlink", "rmdir", - "utime", "lchown", "chroot", "mkfifo", "mknod", "tempnam", - ]) - - _exception_patterns = [ - # Allow lib2to3 to attempt to save a pickled grammar object (#121) - '.*lib2to3.*\.pickle$', - ] - "exempt writing to paths that match the pattern" - - def __init__(self, sandbox, exceptions=_EXCEPTIONS): - self._sandbox = os.path.normcase(os.path.realpath(sandbox)) - self._prefix = os.path.join(self._sandbox,'') - self._exceptions = [ - os.path.normcase(os.path.realpath(path)) - for path in exceptions - ] - AbstractSandbox.__init__(self) - - def _violation(self, operation, *args, **kw): - raise SandboxViolation(operation, args, kw) - - if _file: - def _file(self, path, mode='r', *args, **kw): - if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path): - self._violation("file", path, mode, *args, **kw) - return _file(path,mode,*args,**kw) - - def _open(self, path, mode='r', *args, **kw): - if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path): - self._violation("open", path, mode, *args, **kw) - return _open(path,mode,*args,**kw) - - def tmpnam(self): - self._violation("tmpnam") - - def _ok(self, path): - active = self._active - try: - self._active = False - realpath = os.path.normcase(os.path.realpath(path)) - return ( - self._exempted(realpath) - or realpath == self._sandbox - or realpath.startswith(self._prefix) - ) - finally: - self._active = active - - def _exempted(self, filepath): - start_matches = ( - filepath.startswith(exception) - for exception in self._exceptions - ) - pattern_matches = ( - re.match(pattern, filepath) - for pattern in self._exception_patterns - ) - candidates = itertools.chain(start_matches, pattern_matches) - return any(candidates) - - def _remap_input(self, operation, path, *args, **kw): - """Called for path inputs""" - if operation in self.write_ops and not self._ok(path): - self._violation(operation, os.path.realpath(path), *args, **kw) - return path - - def _remap_pair(self, operation, src, dst, *args, **kw): - """Called for path pairs like rename, link, and symlink operations""" - if not self._ok(src) or not self._ok(dst): - self._violation(operation, src, dst, *args, **kw) - return (src,dst) - - def open(self, file, flags, mode=0o777, *args, **kw): - """Called for low-level os.open()""" - if flags & WRITE_FLAGS and not self._ok(file): - self._violation("os.open", file, flags, mode, *args, **kw) - return _os.open(file,flags,mode, *args, **kw) - -WRITE_FLAGS = functools.reduce( - operator.or_, [getattr(_os, a, 0) for a in - "O_WRONLY O_RDWR O_APPEND O_CREAT O_TRUNC O_TEMPORARY".split()] -) - -class SandboxViolation(DistutilsError): - """A setup script attempted to modify the filesystem outside the sandbox""" - - def __str__(self): - return """SandboxViolation: %s%r %s - -The package setup script has attempted to modify files on your system -that are not within the EasyInstall build area, and has been aborted. - -This package cannot be safely installed by EasyInstall, and may not -support alternate installation locations even if you run its setup -script by hand. Please inform the package's author and the EasyInstall -maintainers to find out if a fix or workaround is available.""" % self.args - - - - - - - - - - - - - - - - - - - - - - - - - - - -# diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/script (dev).tmpl b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/script (dev).tmpl deleted file mode 100644 index d58b1bb..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/script (dev).tmpl +++ /dev/null @@ -1,5 +0,0 @@ -# EASY-INSTALL-DEV-SCRIPT: %(spec)r,%(script_name)r -__requires__ = %(spec)r -__import__('pkg_resources').require(%(spec)r) -__file__ = %(dev_path)r -exec(compile(open(__file__).read(), __file__, 'exec')) diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/script.tmpl b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/script.tmpl deleted file mode 100644 index ff5efbc..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/script.tmpl +++ /dev/null @@ -1,3 +0,0 @@ -# EASY-INSTALL-SCRIPT: %(spec)r,%(script_name)r -__requires__ = %(spec)r -__import__('pkg_resources').run_script(%(spec)r, %(script_name)r) diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/site-patch.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/site-patch.py deleted file mode 100644 index c216801..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/site-patch.py +++ /dev/null @@ -1,76 +0,0 @@ -def __boot(): - import sys - import os - PYTHONPATH = os.environ.get('PYTHONPATH') - if PYTHONPATH is None or (sys.platform=='win32' and not PYTHONPATH): - PYTHONPATH = [] - else: - PYTHONPATH = PYTHONPATH.split(os.pathsep) - - pic = getattr(sys,'path_importer_cache',{}) - stdpath = sys.path[len(PYTHONPATH):] - mydir = os.path.dirname(__file__) - #print "searching",stdpath,sys.path - - for item in stdpath: - if item==mydir or not item: - continue # skip if current dir. on Windows, or my own directory - importer = pic.get(item) - if importer is not None: - loader = importer.find_module('site') - if loader is not None: - # This should actually reload the current module - loader.load_module('site') - break - else: - try: - import imp # Avoid import loop in Python >= 3.3 - stream, path, descr = imp.find_module('site',[item]) - except ImportError: - continue - if stream is None: - continue - try: - # This should actually reload the current module - imp.load_module('site',stream,path,descr) - finally: - stream.close() - break - else: - raise ImportError("Couldn't find the real 'site' module") - - #print "loaded", __file__ - - known_paths = dict([(makepath(item)[1],1) for item in sys.path]) # 2.2 comp - - oldpos = getattr(sys,'__egginsert',0) # save old insertion position - sys.__egginsert = 0 # and reset the current one - - for item in PYTHONPATH: - addsitedir(item) - - sys.__egginsert += oldpos # restore effective old position - - d, nd = makepath(stdpath[0]) - insert_at = None - new_path = [] - - for item in sys.path: - p, np = makepath(item) - - if np==nd and insert_at is None: - # We've hit the first 'system' path entry, so added entries go here - insert_at = len(new_path) - - if np in known_paths or insert_at is None: - new_path.append(item) - else: - # new path after the insert point, back-insert it - new_path.insert(insert_at, item) - insert_at += 1 - - sys.path[:] = new_path - -if __name__=='site': - __boot() - del __boot diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/ssl_support.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/ssl_support.py deleted file mode 100644 index cc7db06..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/ssl_support.py +++ /dev/null @@ -1,241 +0,0 @@ -import os -import socket -import atexit -import re - -import pkg_resources -from pkg_resources import ResolutionError, ExtractionError -from setuptools.compat import urllib2 - -try: - import ssl -except ImportError: - ssl = None - -__all__ = [ - 'VerifyingHTTPSHandler', 'find_ca_bundle', 'is_available', 'cert_paths', - 'opener_for' -] - -cert_paths = """ -/etc/pki/tls/certs/ca-bundle.crt -/etc/ssl/certs/ca-certificates.crt -/usr/share/ssl/certs/ca-bundle.crt -/usr/local/share/certs/ca-root.crt -/etc/ssl/cert.pem -/System/Library/OpenSSL/certs/cert.pem -""".strip().split() - - -HTTPSHandler = HTTPSConnection = object - -for what, where in ( - ('HTTPSHandler', ['urllib2','urllib.request']), - ('HTTPSConnection', ['httplib', 'http.client']), -): - for module in where: - try: - exec("from %s import %s" % (module, what)) - except ImportError: - pass - -is_available = ssl is not None and object not in (HTTPSHandler, HTTPSConnection) - - -try: - from ssl import CertificateError, match_hostname -except ImportError: - try: - from backports.ssl_match_hostname import CertificateError - from backports.ssl_match_hostname import match_hostname - except ImportError: - CertificateError = None - match_hostname = None - -if not CertificateError: - class CertificateError(ValueError): - pass - -if not match_hostname: - def _dnsname_match(dn, hostname, max_wildcards=1): - """Matching according to RFC 6125, section 6.4.3 - - http://tools.ietf.org/html/rfc6125#section-6.4.3 - """ - pats = [] - if not dn: - return False - - # Ported from python3-syntax: - # leftmost, *remainder = dn.split(r'.') - parts = dn.split(r'.') - leftmost = parts[0] - remainder = parts[1:] - - wildcards = leftmost.count('*') - if wildcards > max_wildcards: - # Issue #17980: avoid denials of service by refusing more - # than one wildcard per fragment. A survey of established - # policy among SSL implementations showed it to be a - # reasonable choice. - raise CertificateError( - "too many wildcards in certificate DNS name: " + repr(dn)) - - # speed up common case w/o wildcards - if not wildcards: - return dn.lower() == hostname.lower() - - # RFC 6125, section 6.4.3, subitem 1. - # The client SHOULD NOT attempt to match a presented identifier in which - # the wildcard character comprises a label other than the left-most label. - if leftmost == '*': - # When '*' is a fragment by itself, it matches a non-empty dotless - # fragment. - pats.append('[^.]+') - elif leftmost.startswith('xn--') or hostname.startswith('xn--'): - # RFC 6125, section 6.4.3, subitem 3. - # The client SHOULD NOT attempt to match a presented identifier - # where the wildcard character is embedded within an A-label or - # U-label of an internationalized domain name. - pats.append(re.escape(leftmost)) - else: - # Otherwise, '*' matches any dotless string, e.g. www* - pats.append(re.escape(leftmost).replace(r'\*', '[^.]*')) - - # add the remaining fragments, ignore any wildcards - for frag in remainder: - pats.append(re.escape(frag)) - - pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE) - return pat.match(hostname) - - def match_hostname(cert, hostname): - """Verify that *cert* (in decoded format as returned by - SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 - rules are followed, but IP addresses are not accepted for *hostname*. - - CertificateError is raised on failure. On success, the function - returns nothing. - """ - if not cert: - raise ValueError("empty or no certificate") - dnsnames = [] - san = cert.get('subjectAltName', ()) - for key, value in san: - if key == 'DNS': - if _dnsname_match(value, hostname): - return - dnsnames.append(value) - if not dnsnames: - # The subject is only checked when there is no dNSName entry - # in subjectAltName - for sub in cert.get('subject', ()): - for key, value in sub: - # XXX according to RFC 2818, the most specific Common Name - # must be used. - if key == 'commonName': - if _dnsname_match(value, hostname): - return - dnsnames.append(value) - if len(dnsnames) > 1: - raise CertificateError("hostname %r " - "doesn't match either of %s" - % (hostname, ', '.join(map(repr, dnsnames)))) - elif len(dnsnames) == 1: - raise CertificateError("hostname %r " - "doesn't match %r" - % (hostname, dnsnames[0])) - else: - raise CertificateError("no appropriate commonName or " - "subjectAltName fields were found") - - -class VerifyingHTTPSHandler(HTTPSHandler): - """Simple verifying handler: no auth, subclasses, timeouts, etc.""" - - def __init__(self, ca_bundle): - self.ca_bundle = ca_bundle - HTTPSHandler.__init__(self) - - def https_open(self, req): - return self.do_open( - lambda host, **kw: VerifyingHTTPSConn(host, self.ca_bundle, **kw), req - ) - - -class VerifyingHTTPSConn(HTTPSConnection): - """Simple verifying connection: no auth, subclasses, timeouts, etc.""" - def __init__(self, host, ca_bundle, **kw): - HTTPSConnection.__init__(self, host, **kw) - self.ca_bundle = ca_bundle - - def connect(self): - sock = socket.create_connection( - (self.host, self.port), getattr(self, 'source_address', None) - ) - - # Handle the socket if a (proxy) tunnel is present - if hasattr(self, '_tunnel') and getattr(self, '_tunnel_host', None): - self.sock = sock - self._tunnel() - # http://bugs.python.org/issue7776: Python>=3.4.1 and >=2.7.7 - # change self.host to mean the proxy server host when tunneling is - # being used. Adapt, since we are interested in the destination - # host for the match_hostname() comparison. - actual_host = self._tunnel_host - else: - actual_host = self.host - - self.sock = ssl.wrap_socket( - sock, cert_reqs=ssl.CERT_REQUIRED, ca_certs=self.ca_bundle - ) - try: - match_hostname(self.sock.getpeercert(), actual_host) - except CertificateError: - self.sock.shutdown(socket.SHUT_RDWR) - self.sock.close() - raise - -def opener_for(ca_bundle=None): - """Get a urlopen() replacement that uses ca_bundle for verification""" - return urllib2.build_opener( - VerifyingHTTPSHandler(ca_bundle or find_ca_bundle()) - ).open - - -_wincerts = None - -def get_win_certfile(): - global _wincerts - if _wincerts is not None: - return _wincerts.name - - try: - from wincertstore import CertFile - except ImportError: - return None - - class MyCertFile(CertFile): - def __init__(self, stores=(), certs=()): - CertFile.__init__(self) - for store in stores: - self.addstore(store) - self.addcerts(certs) - atexit.register(self.close) - - _wincerts = MyCertFile(stores=['CA', 'ROOT']) - return _wincerts.name - - -def find_ca_bundle(): - """Return an existing CA bundle path, or None""" - if os.name=='nt': - return get_win_certfile() - else: - for cert_path in cert_paths: - if os.path.isfile(cert_path): - return cert_path - try: - return pkg_resources.resource_filename('certifi', 'cacert.pem') - except (ImportError, ResolutionError, ExtractionError): - return None diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/__init__.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/__init__.py deleted file mode 100644 index b8a29cb..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/__init__.py +++ /dev/null @@ -1,323 +0,0 @@ -"""Tests for the 'setuptools' package""" -import sys -import os -import distutils.core -import distutils.cmd -from distutils.errors import DistutilsOptionError, DistutilsPlatformError -from distutils.errors import DistutilsSetupError -from distutils.core import Extension -from distutils.version import LooseVersion -from setuptools.compat import func_code - -import pytest - -import setuptools.dist -import setuptools.depends as dep -from setuptools import Feature -from setuptools.depends import Require - -def makeSetup(**args): - """Return distribution from 'setup(**args)', without executing commands""" - - distutils.core._setup_stop_after = "commandline" - - # Don't let system command line leak into tests! - args.setdefault('script_args',['install']) - - try: - return setuptools.setup(**args) - finally: - distutils.core._setup_stop_after = None - - -needs_bytecode = pytest.mark.skipif( - not hasattr(dep, 'get_module_constant'), - reason="bytecode support not available", -) - -class TestDepends: - - def testExtractConst(self): - if not hasattr(dep, 'extract_constant'): - # skip on non-bytecode platforms - return - - def f1(): - global x, y, z - x = "test" - y = z - - fc = func_code(f1) - - # unrecognized name - assert dep.extract_constant(fc,'q', -1) is None - - # constant assigned - dep.extract_constant(fc,'x', -1) == "test" - - # expression assigned - dep.extract_constant(fc,'y', -1) == -1 - - # recognized name, not assigned - dep.extract_constant(fc,'z', -1) is None - - def testFindModule(self): - with pytest.raises(ImportError): - dep.find_module('no-such.-thing') - with pytest.raises(ImportError): - dep.find_module('setuptools.non-existent') - f,p,i = dep.find_module('setuptools.tests') - f.close() - - @needs_bytecode - def testModuleExtract(self): - from email import __version__ - assert dep.get_module_constant('email','__version__') == __version__ - assert dep.get_module_constant('sys','version') == sys.version - assert dep.get_module_constant('setuptools.tests','__doc__') == __doc__ - - @needs_bytecode - def testRequire(self): - req = Require('Email','1.0.3','email') - - assert req.name == 'Email' - assert req.module == 'email' - assert req.requested_version == '1.0.3' - assert req.attribute == '__version__' - assert req.full_name() == 'Email-1.0.3' - - from email import __version__ - assert req.get_version() == __version__ - assert req.version_ok('1.0.9') - assert not req.version_ok('0.9.1') - assert not req.version_ok('unknown') - - assert req.is_present() - assert req.is_current() - - req = Require('Email 3000','03000','email',format=LooseVersion) - assert req.is_present() - assert not req.is_current() - assert not req.version_ok('unknown') - - req = Require('Do-what-I-mean','1.0','d-w-i-m') - assert not req.is_present() - assert not req.is_current() - - req = Require('Tests', None, 'tests', homepage="http://example.com") - assert req.format is None - assert req.attribute is None - assert req.requested_version is None - assert req.full_name() == 'Tests' - assert req.homepage == 'http://example.com' - - paths = [os.path.dirname(p) for p in __path__] - assert req.is_present(paths) - assert req.is_current(paths) - - -class TestDistro: - - def setup_method(self, method): - self.e1 = Extension('bar.ext',['bar.c']) - self.e2 = Extension('c.y', ['y.c']) - - self.dist = makeSetup( - packages=['a', 'a.b', 'a.b.c', 'b', 'c'], - py_modules=['b.d','x'], - ext_modules = (self.e1, self.e2), - package_dir = {}, - ) - - def testDistroType(self): - assert isinstance(self.dist,setuptools.dist.Distribution) - - def testExcludePackage(self): - self.dist.exclude_package('a') - assert self.dist.packages == ['b','c'] - - self.dist.exclude_package('b') - assert self.dist.packages == ['c'] - assert self.dist.py_modules == ['x'] - assert self.dist.ext_modules == [self.e1, self.e2] - - self.dist.exclude_package('c') - assert self.dist.packages == [] - assert self.dist.py_modules == ['x'] - assert self.dist.ext_modules == [self.e1] - - # test removals from unspecified options - makeSetup().exclude_package('x') - - def testIncludeExclude(self): - # remove an extension - self.dist.exclude(ext_modules=[self.e1]) - assert self.dist.ext_modules == [self.e2] - - # add it back in - self.dist.include(ext_modules=[self.e1]) - assert self.dist.ext_modules == [self.e2, self.e1] - - # should not add duplicate - self.dist.include(ext_modules=[self.e1]) - assert self.dist.ext_modules == [self.e2, self.e1] - - def testExcludePackages(self): - self.dist.exclude(packages=['c','b','a']) - assert self.dist.packages == [] - assert self.dist.py_modules == ['x'] - assert self.dist.ext_modules == [self.e1] - - def testEmpty(self): - dist = makeSetup() - dist.include(packages=['a'], py_modules=['b'], ext_modules=[self.e2]) - dist = makeSetup() - dist.exclude(packages=['a'], py_modules=['b'], ext_modules=[self.e2]) - - def testContents(self): - assert self.dist.has_contents_for('a') - self.dist.exclude_package('a') - assert not self.dist.has_contents_for('a') - - assert self.dist.has_contents_for('b') - self.dist.exclude_package('b') - assert not self.dist.has_contents_for('b') - - assert self.dist.has_contents_for('c') - self.dist.exclude_package('c') - assert not self.dist.has_contents_for('c') - - def testInvalidIncludeExclude(self): - with pytest.raises(DistutilsSetupError): - self.dist.include(nonexistent_option='x') - with pytest.raises(DistutilsSetupError): - self.dist.exclude(nonexistent_option='x') - with pytest.raises(DistutilsSetupError): - self.dist.include(packages={'x':'y'}) - with pytest.raises(DistutilsSetupError): - self.dist.exclude(packages={'x':'y'}) - with pytest.raises(DistutilsSetupError): - self.dist.include(ext_modules={'x':'y'}) - with pytest.raises(DistutilsSetupError): - self.dist.exclude(ext_modules={'x':'y'}) - - with pytest.raises(DistutilsSetupError): - self.dist.include(package_dir=['q']) - with pytest.raises(DistutilsSetupError): - self.dist.exclude(package_dir=['q']) - - -class TestFeatures: - - def setup_method(self, method): - self.req = Require('Distutils','1.0.3','distutils') - self.dist = makeSetup( - features={ - 'foo': Feature("foo",standard=True,require_features=['baz',self.req]), - 'bar': Feature("bar", standard=True, packages=['pkg.bar'], - py_modules=['bar_et'], remove=['bar.ext'], - ), - 'baz': Feature( - "baz", optional=False, packages=['pkg.baz'], - scripts = ['scripts/baz_it'], - libraries=[('libfoo','foo/foofoo.c')] - ), - 'dwim': Feature("DWIM", available=False, remove='bazish'), - }, - script_args=['--without-bar', 'install'], - packages = ['pkg.bar', 'pkg.foo'], - py_modules = ['bar_et', 'bazish'], - ext_modules = [Extension('bar.ext',['bar.c'])] - ) - - def testDefaults(self): - assert not Feature( - "test",standard=True,remove='x',available=False - ).include_by_default() - assert Feature("test",standard=True,remove='x').include_by_default() - # Feature must have either kwargs, removes, or require_features - with pytest.raises(DistutilsSetupError): - Feature("test") - - def testAvailability(self): - with pytest.raises(DistutilsPlatformError): - self.dist.features['dwim'].include_in(self.dist) - - def testFeatureOptions(self): - dist = self.dist - assert ( - ('with-dwim',None,'include DWIM') in dist.feature_options - ) - assert ( - ('without-dwim',None,'exclude DWIM (default)') in dist.feature_options - ) - assert ( - ('with-bar',None,'include bar (default)') in dist.feature_options - ) - assert ( - ('without-bar',None,'exclude bar') in dist.feature_options - ) - assert dist.feature_negopt['without-foo'] == 'with-foo' - assert dist.feature_negopt['without-bar'] == 'with-bar' - assert dist.feature_negopt['without-dwim'] == 'with-dwim' - assert (not 'without-baz' in dist.feature_negopt) - - def testUseFeatures(self): - dist = self.dist - assert dist.with_foo == 1 - assert dist.with_bar == 0 - assert dist.with_baz == 1 - assert (not 'bar_et' in dist.py_modules) - assert (not 'pkg.bar' in dist.packages) - assert ('pkg.baz' in dist.packages) - assert ('scripts/baz_it' in dist.scripts) - assert (('libfoo','foo/foofoo.c') in dist.libraries) - assert dist.ext_modules == [] - assert dist.require_features == [self.req] - - # If we ask for bar, it should fail because we explicitly disabled - # it on the command line - with pytest.raises(DistutilsOptionError): - dist.include_feature('bar') - - def testFeatureWithInvalidRemove(self): - with pytest.raises(SystemExit): - makeSetup(features={'x':Feature('x', remove='y')}) - -class TestCommandTests: - - def testTestIsCommand(self): - test_cmd = makeSetup().get_command_obj('test') - assert (isinstance(test_cmd, distutils.cmd.Command)) - - def testLongOptSuiteWNoDefault(self): - ts1 = makeSetup(script_args=['test','--test-suite=foo.tests.suite']) - ts1 = ts1.get_command_obj('test') - ts1.ensure_finalized() - assert ts1.test_suite == 'foo.tests.suite' - - def testDefaultSuite(self): - ts2 = makeSetup(test_suite='bar.tests.suite').get_command_obj('test') - ts2.ensure_finalized() - assert ts2.test_suite == 'bar.tests.suite' - - def testDefaultWModuleOnCmdLine(self): - ts3 = makeSetup( - test_suite='bar.tests', - script_args=['test','-m','foo.tests'] - ).get_command_obj('test') - ts3.ensure_finalized() - assert ts3.test_module == 'foo.tests' - assert ts3.test_suite == 'foo.tests.test_suite' - - def testConflictingOptions(self): - ts4 = makeSetup( - script_args=['test','-m','bar.tests', '-s','foo.tests.suite'] - ).get_command_obj('test') - with pytest.raises(DistutilsOptionError): - ts4.ensure_finalized() - - def testNoSuite(self): - ts5 = makeSetup().get_command_obj('test') - ts5.ensure_finalized() - assert ts5.test_suite == None diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/contexts.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/contexts.py deleted file mode 100644 index 1d29284..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/contexts.py +++ /dev/null @@ -1,85 +0,0 @@ -import tempfile -import os -import shutil -import sys -import contextlib -import site - -from ..compat import StringIO - - -@contextlib.contextmanager -def tempdir(cd=lambda dir:None, **kwargs): - temp_dir = tempfile.mkdtemp(**kwargs) - orig_dir = os.getcwd() - try: - cd(temp_dir) - yield temp_dir - finally: - cd(orig_dir) - shutil.rmtree(temp_dir) - - -@contextlib.contextmanager -def environment(**replacements): - """ - In a context, patch the environment with replacements. Pass None values - to clear the values. - """ - saved = dict( - (key, os.environ[key]) - for key in replacements - if key in os.environ - ) - - # remove values that are null - remove = (key for (key, value) in replacements.items() if value is None) - for key in list(remove): - os.environ.pop(key, None) - replacements.pop(key) - - os.environ.update(replacements) - - try: - yield saved - finally: - for key in replacements: - os.environ.pop(key, None) - os.environ.update(saved) - - -@contextlib.contextmanager -def quiet(): - """ - Redirect stdout/stderr to StringIO objects to prevent console output from - distutils commands. - """ - - old_stdout = sys.stdout - old_stderr = sys.stderr - new_stdout = sys.stdout = StringIO() - new_stderr = sys.stderr = StringIO() - try: - yield new_stdout, new_stderr - finally: - new_stdout.seek(0) - new_stderr.seek(0) - sys.stdout = old_stdout - sys.stderr = old_stderr - - -@contextlib.contextmanager -def save_user_site_setting(): - saved = site.ENABLE_USER_SITE - try: - yield saved - finally: - site.ENABLE_USER_SITE = saved - - -@contextlib.contextmanager -def suppress_exceptions(*excs): - try: - yield - except excs: - pass diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/environment.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/environment.py deleted file mode 100644 index a23c050..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/environment.py +++ /dev/null @@ -1,60 +0,0 @@ -import os -import sys -import unicodedata - -from subprocess import Popen as _Popen, PIPE as _PIPE - - -def _which_dirs(cmd): - result = set() - for path in os.environ.get('PATH', '').split(os.pathsep): - filename = os.path.join(path, cmd) - if os.access(filename, os.X_OK): - result.add(path) - return result - - -def run_setup_py(cmd, pypath=None, path=None, - data_stream=0, env=None): - """ - Execution command for tests, separate from those used by the - code directly to prevent accidental behavior issues - """ - if env is None: - env = dict() - for envname in os.environ: - env[envname] = os.environ[envname] - - #override the python path if needed - if pypath is not None: - env["PYTHONPATH"] = pypath - - #overide the execution path if needed - if path is not None: - env["PATH"] = path - if not env.get("PATH", ""): - env["PATH"] = _which_dirs("tar").union(_which_dirs("gzip")) - env["PATH"] = os.pathsep.join(env["PATH"]) - - cmd = [sys.executable, "setup.py"] + list(cmd) - - # http://bugs.python.org/issue8557 - shell = sys.platform == 'win32' - - try: - proc = _Popen( - cmd, stdout=_PIPE, stderr=_PIPE, shell=shell, env=env, - ) - - data = proc.communicate()[data_stream] - except OSError: - return 1, '' - - #decode the console string if needed - if hasattr(data, "decode"): - # use the default encoding - data = data.decode() - data = unicodedata.normalize('NFC', data) - - #communciate calls wait() - return proc.returncode, data diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/fixtures.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/fixtures.py deleted file mode 100644 index c70c38c..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/fixtures.py +++ /dev/null @@ -1,27 +0,0 @@ -try: - from unittest import mock -except ImportError: - import mock -import pytest - -from . import contexts - - -@pytest.yield_fixture -def user_override(): - """ - Override site.USER_BASE and site.USER_SITE with temporary directories in - a context. - """ - with contexts.tempdir() as user_base: - with mock.patch('site.USER_BASE', user_base): - with contexts.tempdir() as user_site: - with mock.patch('site.USER_SITE', user_site): - with contexts.save_user_site_setting(): - yield - - -@pytest.yield_fixture -def tmpdir_cwd(tmpdir): - with tmpdir.as_cwd() as orig: - yield orig diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/py26compat.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/py26compat.py deleted file mode 100644 index c568088..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/py26compat.py +++ /dev/null @@ -1,14 +0,0 @@ -import sys -import tarfile -import contextlib - -def _tarfile_open_ex(*args, **kwargs): - """ - Extend result as a context manager. - """ - return contextlib.closing(tarfile.open(*args, **kwargs)) - -if sys.version_info[:2] < (2, 7) or (3, 0) <= sys.version_info[:2] < (3, 2): - tarfile_open = _tarfile_open_ex -else: - tarfile_open = tarfile.open diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/script-with-bom.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/script-with-bom.py deleted file mode 100644 index 22dee0d..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/script-with-bom.py +++ /dev/null @@ -1,3 +0,0 @@ -# -*- coding: utf-8 -*- - -result = 'passed' diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/server.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/server.py deleted file mode 100644 index 6b21427..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/server.py +++ /dev/null @@ -1,65 +0,0 @@ -"""Basic http server for tests to simulate PyPI or custom indexes -""" - -import time -import threading -from setuptools.compat import BaseHTTPRequestHandler -from setuptools.compat import HTTPServer, SimpleHTTPRequestHandler - -class IndexServer(HTTPServer): - """Basic single-threaded http server simulating a package index - - You can use this server in unittest like this:: - s = IndexServer() - s.start() - index_url = s.base_url() + 'mytestindex' - # do some test requests to the index - # The index files should be located in setuptools/tests/indexes - s.stop() - """ - def __init__(self, server_address=('', 0), - RequestHandlerClass=SimpleHTTPRequestHandler): - HTTPServer.__init__(self, server_address, RequestHandlerClass) - self._run = True - - def start(self): - self.thread = threading.Thread(target=self.serve_forever) - self.thread.start() - - def stop(self): - "Stop the server" - - # Let the server finish the last request and wait for a new one. - time.sleep(0.1) - - self.shutdown() - self.thread.join() - self.socket.close() - - def base_url(self): - port = self.server_port - return 'http://127.0.0.1:%s/setuptools/tests/indexes/' % port - -class RequestRecorder(BaseHTTPRequestHandler): - def do_GET(self): - requests = vars(self.server).setdefault('requests', []) - requests.append(self) - self.send_response(200, 'OK') - -class MockServer(HTTPServer, threading.Thread): - """ - A simple HTTP Server that records the requests made to it. - """ - def __init__(self, server_address=('', 0), - RequestHandlerClass=RequestRecorder): - HTTPServer.__init__(self, server_address, RequestHandlerClass) - threading.Thread.__init__(self) - self.setDaemon(True) - self.requests = [] - - def run(self): - self.serve_forever() - - @property - def url(self): - return 'http://localhost:%(server_port)s/' % vars(self) diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/test_bdist_egg.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/test_bdist_egg.py deleted file mode 100644 index ccfb2ea..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/test_bdist_egg.py +++ /dev/null @@ -1,43 +0,0 @@ -"""develop tests -""" -import os -import re - -import pytest - -from setuptools.dist import Distribution - -from . import contexts - -SETUP_PY = """\ -from setuptools import setup - -setup(name='foo', py_modules=['hi']) -""" - -@pytest.yield_fixture -def setup_context(tmpdir): - with (tmpdir/'setup.py').open('w') as f: - f.write(SETUP_PY) - with (tmpdir/'hi.py').open('w') as f: - f.write('1\n') - with tmpdir.as_cwd(): - yield tmpdir - - -class Test: - def test_bdist_egg(self, setup_context, user_override): - dist = Distribution(dict( - script_name='setup.py', - script_args=['bdist_egg'], - name='foo', - py_modules=['hi'] - )) - os.makedirs(os.path.join('build', 'src')) - with contexts.quiet(): - dist.parse_command_line() - dist.run_commands() - - # let's see if we got our egg link at the right place - [content] = os.listdir('dist') - assert re.match('foo-0.0.0-py[23].\d.egg$', content) diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/test_build_ext.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/test_build_ext.py deleted file mode 100644 index 0719ba4..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/test_build_ext.py +++ /dev/null @@ -1,18 +0,0 @@ -import distutils.command.build_ext as orig - -from setuptools.command.build_ext import build_ext -from setuptools.dist import Distribution - -class TestBuildExt: - def test_get_ext_filename(self): - """ - Setuptools needs to give back the same - result as distutils, even if the fullname - is not in ext_map. - """ - dist = Distribution() - cmd = build_ext(dist) - cmd.ext_map['foo/bar'] = '' - res = cmd.get_ext_filename('foo') - wanted = orig.build_ext.get_ext_filename(cmd, 'foo') - assert res == wanted diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/test_develop.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/test_develop.py deleted file mode 100644 index ed1b194..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/test_develop.py +++ /dev/null @@ -1,103 +0,0 @@ -"""develop tests -""" -import os -import shutil -import site -import sys -import tempfile - -from setuptools.command.develop import develop -from setuptools.dist import Distribution - -SETUP_PY = """\ -from setuptools import setup - -setup(name='foo', - packages=['foo'], - use_2to3=True, -) -""" - -INIT_PY = """print "foo" -""" - -class TestDevelopTest: - - def setup_method(self, method): - if hasattr(sys, 'real_prefix'): - return - - # Directory structure - self.dir = tempfile.mkdtemp() - os.mkdir(os.path.join(self.dir, 'foo')) - # setup.py - setup = os.path.join(self.dir, 'setup.py') - f = open(setup, 'w') - f.write(SETUP_PY) - f.close() - self.old_cwd = os.getcwd() - # foo/__init__.py - init = os.path.join(self.dir, 'foo', '__init__.py') - f = open(init, 'w') - f.write(INIT_PY) - f.close() - - os.chdir(self.dir) - self.old_base = site.USER_BASE - site.USER_BASE = tempfile.mkdtemp() - self.old_site = site.USER_SITE - site.USER_SITE = tempfile.mkdtemp() - - def teardown_method(self, method): - if hasattr(sys, 'real_prefix') or (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix): - return - - os.chdir(self.old_cwd) - shutil.rmtree(self.dir) - shutil.rmtree(site.USER_BASE) - shutil.rmtree(site.USER_SITE) - site.USER_BASE = self.old_base - site.USER_SITE = self.old_site - - def test_develop(self): - if hasattr(sys, 'real_prefix'): - return - dist = Distribution( - dict(name='foo', - packages=['foo'], - use_2to3=True, - version='0.0', - )) - dist.script_name = 'setup.py' - cmd = develop(dist) - cmd.user = 1 - cmd.ensure_finalized() - cmd.install_dir = site.USER_SITE - cmd.user = 1 - old_stdout = sys.stdout - #sys.stdout = StringIO() - try: - cmd.run() - finally: - sys.stdout = old_stdout - - # let's see if we got our egg link at the right place - content = os.listdir(site.USER_SITE) - content.sort() - assert content == ['easy-install.pth', 'foo.egg-link'] - - # Check that we are using the right code. - egg_link_file = open(os.path.join(site.USER_SITE, 'foo.egg-link'), 'rt') - try: - path = egg_link_file.read().split()[0].strip() - finally: - egg_link_file.close() - init_file = open(os.path.join(path, 'foo', '__init__.py'), 'rt') - try: - init = init_file.read().strip() - finally: - init_file.close() - if sys.version < "3": - assert init == 'print "foo"' - else: - assert init == 'print("foo")' diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/test_dist_info.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/test_dist_info.py deleted file mode 100644 index 6d0ab58..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/test_dist_info.py +++ /dev/null @@ -1,70 +0,0 @@ -"""Test .dist-info style distributions. -""" -import os -import shutil -import tempfile - -import pytest - -import pkg_resources -from .textwrap import DALS - - -class TestDistInfo: - - def test_distinfo(self): - dists = dict( - (d.project_name, d) - for d in pkg_resources.find_distributions(self.tmpdir) - ) - - assert len(dists) == 2, dists - - unversioned = dists['UnversionedDistribution'] - versioned = dists['VersionedDistribution'] - - assert versioned.version == '2.718' # from filename - assert unversioned.version == '0.3' # from METADATA - - @pytest.mark.importorskip('ast') - def test_conditional_dependencies(self): - specs = 'splort==4', 'quux>=1.1' - requires = list(map(pkg_resources.Requirement.parse, specs)) - - for d in pkg_resources.find_distributions(self.tmpdir): - assert d.requires() == requires[:1] - assert d.requires(extras=('baz',)) == requires - assert d.extras == ['baz'] - - metadata_template = DALS(""" - Metadata-Version: 1.2 - Name: {name} - {version} - Requires-Dist: splort (==4) - Provides-Extra: baz - Requires-Dist: quux (>=1.1); extra == 'baz' - """) - - def setup_method(self, method): - self.tmpdir = tempfile.mkdtemp() - dist_info_name = 'VersionedDistribution-2.718.dist-info' - versioned = os.path.join(self.tmpdir, dist_info_name) - os.mkdir(versioned) - with open(os.path.join(versioned, 'METADATA'), 'w+') as metadata_file: - metadata = self.metadata_template.format( - name='VersionedDistribution', - version='', - ).replace('\n\n', '\n') - metadata_file.write(metadata) - dist_info_name = 'UnversionedDistribution.dist-info' - unversioned = os.path.join(self.tmpdir, dist_info_name) - os.mkdir(unversioned) - with open(os.path.join(unversioned, 'METADATA'), 'w+') as metadata_file: - metadata = self.metadata_template.format( - name='UnversionedDistribution', - version='Version: 0.3', - ) - metadata_file.write(metadata) - - def teardown_method(self, method): - shutil.rmtree(self.tmpdir) diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/test_easy_install.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/test_easy_install.py deleted file mode 100644 index e71bbfc..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/test_easy_install.py +++ /dev/null @@ -1,524 +0,0 @@ -# -*- coding: utf-8 -*- - -"""Easy install Tests -""" -from __future__ import absolute_import - -import sys -import os -import shutil -import tempfile -import site -import contextlib -import tarfile -import logging -import itertools -import distutils.errors - -import pytest -try: - from unittest import mock -except ImportError: - import mock - -from setuptools import sandbox -from setuptools import compat -from setuptools.compat import StringIO, BytesIO, urlparse -from setuptools.sandbox import run_setup -import setuptools.command.easy_install as ei -from setuptools.command.easy_install import PthDistributions -from setuptools.command import easy_install as easy_install_pkg -from setuptools.dist import Distribution -from pkg_resources import working_set -from pkg_resources import Distribution as PRDistribution -import setuptools.tests.server -import pkg_resources - -from .py26compat import tarfile_open -from . import contexts -from .textwrap import DALS - - -class FakeDist(object): - def get_entry_map(self, group): - if group != 'console_scripts': - return {} - return {'name': 'ep'} - - def as_requirement(self): - return 'spec' - -SETUP_PY = DALS(""" - from setuptools import setup - - setup(name='foo') - """) - -class TestEasyInstallTest: - - def test_install_site_py(self): - dist = Distribution() - cmd = ei.easy_install(dist) - cmd.sitepy_installed = False - cmd.install_dir = tempfile.mkdtemp() - try: - cmd.install_site_py() - sitepy = os.path.join(cmd.install_dir, 'site.py') - assert os.path.exists(sitepy) - finally: - shutil.rmtree(cmd.install_dir) - - def test_get_script_args(self): - header = ei.CommandSpec.best().from_environment().as_header() - expected = header + DALS(""" - # EASY-INSTALL-ENTRY-SCRIPT: 'spec','console_scripts','name' - __requires__ = 'spec' - import sys - from pkg_resources import load_entry_point - - if __name__ == '__main__': - sys.exit( - load_entry_point('spec', 'console_scripts', 'name')() - ) - """) - dist = FakeDist() - - args = next(ei.ScriptWriter.get_args(dist)) - name, script = itertools.islice(args, 2) - - assert script == expected - - def test_no_find_links(self): - # new option '--no-find-links', that blocks find-links added at - # the project level - dist = Distribution() - cmd = ei.easy_install(dist) - cmd.check_pth_processing = lambda: True - cmd.no_find_links = True - cmd.find_links = ['link1', 'link2'] - cmd.install_dir = os.path.join(tempfile.mkdtemp(), 'ok') - cmd.args = ['ok'] - cmd.ensure_finalized() - assert cmd.package_index.scanned_urls == {} - - # let's try without it (default behavior) - cmd = ei.easy_install(dist) - cmd.check_pth_processing = lambda: True - cmd.find_links = ['link1', 'link2'] - cmd.install_dir = os.path.join(tempfile.mkdtemp(), 'ok') - cmd.args = ['ok'] - cmd.ensure_finalized() - keys = sorted(cmd.package_index.scanned_urls.keys()) - assert keys == ['link1', 'link2'] - - def test_write_exception(self): - """ - Test that `cant_write_to_target` is rendered as a DistutilsError. - """ - dist = Distribution() - cmd = ei.easy_install(dist) - cmd.install_dir = os.getcwd() - with pytest.raises(distutils.errors.DistutilsError): - cmd.cant_write_to_target() - - -class TestPTHFileWriter: - def test_add_from_cwd_site_sets_dirty(self): - '''a pth file manager should set dirty - if a distribution is in site but also the cwd - ''' - pth = PthDistributions('does-not_exist', [os.getcwd()]) - assert not pth.dirty - pth.add(PRDistribution(os.getcwd())) - assert pth.dirty - - def test_add_from_site_is_ignored(self): - location = '/test/location/does-not-have-to-exist' - # PthDistributions expects all locations to be normalized - location = pkg_resources.normalize_path(location) - pth = PthDistributions('does-not_exist', [location, ]) - assert not pth.dirty - pth.add(PRDistribution(location)) - assert not pth.dirty - - -@pytest.yield_fixture -def setup_context(tmpdir): - with (tmpdir/'setup.py').open('w') as f: - f.write(SETUP_PY) - with tmpdir.as_cwd(): - yield tmpdir - - -@pytest.mark.usefixtures("user_override") -@pytest.mark.usefixtures("setup_context") -class TestUserInstallTest: - - # prevent check that site-packages is writable. easy_install - # shouldn't be writing to system site-packages during finalize - # options, but while it does, bypass the behavior. - prev_sp_write = mock.patch( - 'setuptools.command.easy_install.easy_install.check_site_dir', - mock.Mock(), - ) - - # simulate setuptools installed in user site packages - @mock.patch('setuptools.command.easy_install.__file__', site.USER_SITE) - @mock.patch('site.ENABLE_USER_SITE', True) - @prev_sp_write - def test_user_install_not_implied_user_site_enabled(self): - self.assert_not_user_site() - - @mock.patch('site.ENABLE_USER_SITE', False) - @prev_sp_write - def test_user_install_not_implied_user_site_disabled(self): - self.assert_not_user_site() - - @staticmethod - def assert_not_user_site(): - # create a finalized easy_install command - dist = Distribution() - dist.script_name = 'setup.py' - cmd = ei.easy_install(dist) - cmd.args = ['py'] - cmd.ensure_finalized() - assert not cmd.user, 'user should not be implied' - - def test_multiproc_atexit(self): - pytest.importorskip('multiprocessing') - - log = logging.getLogger('test_easy_install') - logging.basicConfig(level=logging.INFO, stream=sys.stderr) - log.info('this should not break') - - @pytest.fixture() - def foo_package(self, tmpdir): - egg_file = tmpdir / 'foo-1.0.egg-info' - with egg_file.open('w') as f: - f.write('Name: foo\n') - return str(tmpdir) - - @pytest.yield_fixture() - def install_target(self, tmpdir): - target = str(tmpdir) - with mock.patch('sys.path', sys.path + [target]): - python_path = os.path.pathsep.join(sys.path) - with mock.patch.dict(os.environ, PYTHONPATH=python_path): - yield target - - def test_local_index(self, foo_package, install_target): - """ - The local index must be used when easy_install locates installed - packages. - """ - dist = Distribution() - dist.script_name = 'setup.py' - cmd = ei.easy_install(dist) - cmd.install_dir = install_target - cmd.args = ['foo'] - cmd.ensure_finalized() - cmd.local_index.scan([foo_package]) - res = cmd.easy_install('foo') - actual = os.path.normcase(os.path.realpath(res.location)) - expected = os.path.normcase(os.path.realpath(foo_package)) - assert actual == expected - - @contextlib.contextmanager - def user_install_setup_context(self, *args, **kwargs): - """ - Wrap sandbox.setup_context to patch easy_install in that context to - appear as user-installed. - """ - with self.orig_context(*args, **kwargs): - import setuptools.command.easy_install as ei - ei.__file__ = site.USER_SITE - yield - - def patched_setup_context(self): - self.orig_context = sandbox.setup_context - - return mock.patch( - 'setuptools.sandbox.setup_context', - self.user_install_setup_context, - ) - - -@pytest.yield_fixture -def distutils_package(): - distutils_setup_py = SETUP_PY.replace( - 'from setuptools import setup', - 'from distutils.core import setup', - ) - with contexts.tempdir(cd=os.chdir): - with open('setup.py', 'w') as f: - f.write(distutils_setup_py) - yield - - -class TestDistutilsPackage: - def test_bdist_egg_available_on_distutils_pkg(self, distutils_package): - run_setup('setup.py', ['bdist_egg']) - - -class TestSetupRequires: - - def test_setup_requires_honors_fetch_params(self): - """ - When easy_install installs a source distribution which specifies - setup_requires, it should honor the fetch parameters (such as - allow-hosts, index-url, and find-links). - """ - # set up a server which will simulate an alternate package index. - p_index = setuptools.tests.server.MockServer() - p_index.start() - netloc = 1 - p_index_loc = urlparse(p_index.url)[netloc] - if p_index_loc.endswith(':0'): - # Some platforms (Jython) don't find a port to which to bind, - # so skip this test for them. - return - with contexts.quiet(): - # create an sdist that has a build-time dependency. - with TestSetupRequires.create_sdist() as dist_file: - with contexts.tempdir() as temp_install_dir: - with contexts.environment(PYTHONPATH=temp_install_dir): - ei_params = [ - '--index-url', p_index.url, - '--allow-hosts', p_index_loc, - '--exclude-scripts', - '--install-dir', temp_install_dir, - dist_file, - ] - with sandbox.save_argv(['easy_install']): - # attempt to install the dist. It should fail because - # it doesn't exist. - with pytest.raises(SystemExit): - easy_install_pkg.main(ei_params) - # there should have been two or three requests to the server - # (three happens on Python 3.3a) - assert 2 <= len(p_index.requests) <= 3 - assert p_index.requests[0].path == '/does-not-exist/' - - @staticmethod - @contextlib.contextmanager - def create_sdist(): - """ - Return an sdist with a setup_requires dependency (of something that - doesn't exist) - """ - with contexts.tempdir() as dir: - dist_path = os.path.join(dir, 'setuptools-test-fetcher-1.0.tar.gz') - script = DALS(""" - import setuptools - setuptools.setup( - name="setuptools-test-fetcher", - version="1.0", - setup_requires = ['does-not-exist'], - ) - """) - make_trivial_sdist(dist_path, script) - yield dist_path - - def test_setup_requires_overrides_version_conflict(self): - """ - Regression test for issue #323. - - Ensures that a distribution's setup_requires requirements can still be - installed and used locally even if a conflicting version of that - requirement is already on the path. - """ - - pr_state = pkg_resources.__getstate__() - fake_dist = PRDistribution('does-not-matter', project_name='foobar', - version='0.0') - working_set.add(fake_dist) - - try: - with contexts.tempdir() as temp_dir: - test_pkg = create_setup_requires_package(temp_dir) - test_setup_py = os.path.join(test_pkg, 'setup.py') - with contexts.quiet() as (stdout, stderr): - # Don't even need to install the package, just - # running the setup.py at all is sufficient - run_setup(test_setup_py, ['--name']) - - lines = stdout.readlines() - assert len(lines) > 0 - assert lines[-1].strip(), 'test_pkg' - finally: - pkg_resources.__setstate__(pr_state) - - -def create_setup_requires_package(path): - """Creates a source tree under path for a trivial test package that has a - single requirement in setup_requires--a tarball for that requirement is - also created and added to the dependency_links argument. - """ - - test_setup_attrs = { - 'name': 'test_pkg', 'version': '0.0', - 'setup_requires': ['foobar==0.1'], - 'dependency_links': [os.path.abspath(path)] - } - - test_pkg = os.path.join(path, 'test_pkg') - test_setup_py = os.path.join(test_pkg, 'setup.py') - os.mkdir(test_pkg) - - with open(test_setup_py, 'w') as f: - f.write(DALS(""" - import setuptools - setuptools.setup(**%r) - """ % test_setup_attrs)) - - foobar_path = os.path.join(path, 'foobar-0.1.tar.gz') - make_trivial_sdist( - foobar_path, - DALS(""" - import setuptools - setuptools.setup( - name='foobar', - version='0.1' - ) - """)) - - return test_pkg - - -def make_trivial_sdist(dist_path, setup_py): - """Create a simple sdist tarball at dist_path, containing just a - setup.py, the contents of which are provided by the setup_py string. - """ - - setup_py_file = tarfile.TarInfo(name='setup.py') - try: - # Python 3 (StringIO gets converted to io module) - MemFile = BytesIO - except AttributeError: - MemFile = StringIO - setup_py_bytes = MemFile(setup_py.encode('utf-8')) - setup_py_file.size = len(setup_py_bytes.getvalue()) - with tarfile_open(dist_path, 'w:gz') as dist: - dist.addfile(setup_py_file, fileobj=setup_py_bytes) - - -class TestScriptHeader: - non_ascii_exe = '/Users/José/bin/python' - exe_with_spaces = r'C:\Program Files\Python33\python.exe' - - @pytest.mark.skipif( - sys.platform.startswith('java') and ei.is_sh(sys.executable), - reason="Test cannot run under java when executable is sh" - ) - def test_get_script_header(self): - expected = '#!%s\n' % ei.nt_quote_arg(os.path.normpath(sys.executable)) - actual = ei.ScriptWriter.get_script_header('#!/usr/local/bin/python') - assert actual == expected - - expected = '#!%s -x\n' % ei.nt_quote_arg(os.path.normpath - (sys.executable)) - actual = ei.ScriptWriter.get_script_header('#!/usr/bin/python -x') - assert actual == expected - - actual = ei.ScriptWriter.get_script_header('#!/usr/bin/python', - executable=self.non_ascii_exe) - expected = '#!%s -x\n' % self.non_ascii_exe - assert actual == expected - - actual = ei.ScriptWriter.get_script_header('#!/usr/bin/python', - executable='"'+self.exe_with_spaces+'"') - expected = '#!"%s"\n' % self.exe_with_spaces - assert actual == expected - - @pytest.mark.xfail( - compat.PY3 and os.environ.get("LC_CTYPE") in ("C", "POSIX"), - reason="Test fails in this locale on Python 3" - ) - @mock.patch.dict(sys.modules, java=mock.Mock(lang=mock.Mock(System= - mock.Mock(getProperty=mock.Mock(return_value=""))))) - @mock.patch('sys.platform', 'java1.5.0_13') - def test_get_script_header_jython_workaround(self, tmpdir): - # Create a mock sys.executable that uses a shebang line - header = DALS(""" - #!/usr/bin/python - # -*- coding: utf-8 -*- - """) - exe = tmpdir / 'exe.py' - with exe.open('w') as f: - f.write(header) - exe = str(exe) - - header = ei.ScriptWriter.get_script_header('#!/usr/local/bin/python', - executable=exe) - assert header == '#!/usr/bin/env %s\n' % exe - - expect_out = 'stdout' if sys.version_info < (2,7) else 'stderr' - - with contexts.quiet() as (stdout, stderr): - # When options are included, generate a broken shebang line - # with a warning emitted - candidate = ei.ScriptWriter.get_script_header('#!/usr/bin/python -x', - executable=exe) - assert candidate == '#!%s -x\n' % exe - output = locals()[expect_out] - assert 'Unable to adapt shebang line' in output.getvalue() - - with contexts.quiet() as (stdout, stderr): - candidate = ei.ScriptWriter.get_script_header('#!/usr/bin/python', - executable=self.non_ascii_exe) - assert candidate == '#!%s -x\n' % self.non_ascii_exe - output = locals()[expect_out] - assert 'Unable to adapt shebang line' in output.getvalue() - - -class TestCommandSpec: - def test_custom_launch_command(self): - """ - Show how a custom CommandSpec could be used to specify a #! executable - which takes parameters. - """ - cmd = ei.CommandSpec(['/usr/bin/env', 'python3']) - assert cmd.as_header() == '#!/usr/bin/env python3\n' - - def test_from_param_for_CommandSpec_is_passthrough(self): - """ - from_param should return an instance of a CommandSpec - """ - cmd = ei.CommandSpec(['python']) - cmd_new = ei.CommandSpec.from_param(cmd) - assert cmd is cmd_new - - def test_from_environment_with_spaces_in_executable(self): - with mock.patch('sys.executable', TestScriptHeader.exe_with_spaces): - cmd = ei.CommandSpec.from_environment() - assert len(cmd) == 1 - assert cmd.as_header().startswith('#!"') - - def test_from_simple_string_uses_shlex(self): - """ - In order to support `executable = /usr/bin/env my-python`, make sure - from_param invokes shlex on that input. - """ - cmd = ei.CommandSpec.from_param('/usr/bin/env my-python') - assert len(cmd) == 2 - assert '"' not in cmd.as_header() - - def test_sys_executable(self): - """ - CommandSpec.from_string(sys.executable) should contain just that param. - """ - writer = ei.ScriptWriter.best() - cmd = writer.command_spec_class.from_string(sys.executable) - assert len(cmd) == 1 - assert cmd[0] == sys.executable - - -class TestWindowsScriptWriter: - def test_header(self): - hdr = ei.WindowsScriptWriter.get_script_header('') - assert hdr.startswith('#!') - assert hdr.endswith('\n') - hdr = hdr.lstrip('#!') - hdr = hdr.rstrip('\n') - # header should not start with an escaped quote - assert not hdr.startswith('\\"') diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/test_egg_info.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/test_egg_info.py deleted file mode 100644 index a1caf9f..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/test_egg_info.py +++ /dev/null @@ -1,98 +0,0 @@ -import os -import stat - -import pytest - -from . import environment -from .textwrap import DALS -from . import contexts - - -class TestEggInfo: - - setup_script = DALS(""" - from setuptools import setup - - setup( - name='foo', - py_modules=['hello'], - entry_points={'console_scripts': ['hi = hello.run']}, - zip_safe=False, - ) - """) - - def _create_project(self): - with open('setup.py', 'w') as f: - f.write(self.setup_script) - - with open('hello.py', 'w') as f: - f.write(DALS(""" - def run(): - print('hello') - """)) - - @pytest.yield_fixture - def env(self): - class Environment(str): pass - - with contexts.tempdir(prefix='setuptools-test.') as env_dir: - env = Environment(env_dir) - os.chmod(env_dir, stat.S_IRWXU) - subs = 'home', 'lib', 'scripts', 'data', 'egg-base' - env.paths = dict( - (dirname, os.path.join(env_dir, dirname)) - for dirname in subs - ) - list(map(os.mkdir, env.paths.values())) - config = os.path.join(env.paths['home'], '.pydistutils.cfg') - with open(config, 'w') as f: - f.write(DALS(""" - [egg_info] - egg-base = %(egg-base)s - """ % env.paths - )) - yield env - - def test_egg_base_installed_egg_info(self, tmpdir_cwd, env): - self._create_project() - - environ = os.environ.copy().update( - HOME=env.paths['home'], - ) - cmd = [ - 'install', - '--home', env.paths['home'], - '--install-lib', env.paths['lib'], - '--install-scripts', env.paths['scripts'], - '--install-data', env.paths['data'], - ] - code, data = environment.run_setup_py( - cmd=cmd, - pypath=os.pathsep.join([env.paths['lib'], str(tmpdir_cwd)]), - data_stream=1, - env=environ, - ) - if code: - raise AssertionError(data) - - actual = self._find_egg_info_files(env.paths['lib']) - - expected = [ - 'PKG-INFO', - 'SOURCES.txt', - 'dependency_links.txt', - 'entry_points.txt', - 'not-zip-safe', - 'top_level.txt', - ] - assert sorted(actual) == expected - - def _find_egg_info_files(self, root): - results = ( - filenames - for dirpath, dirnames, filenames in os.walk(root) - if os.path.basename(dirpath) == 'EGG-INFO' - ) - # expect exactly one result - result, = results - return result diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/test_find_packages.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/test_find_packages.py deleted file mode 100644 index 06a7c02..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/test_find_packages.py +++ /dev/null @@ -1,170 +0,0 @@ -"""Tests for setuptools.find_packages().""" -import os -import sys -import shutil -import tempfile -import platform - -import pytest - -import setuptools -from setuptools import find_packages - -find_420_packages = setuptools.PEP420PackageFinder.find - -# modeled after CPython's test.support.can_symlink -def can_symlink(): - TESTFN = tempfile.mktemp() - symlink_path = TESTFN + "can_symlink" - try: - os.symlink(TESTFN, symlink_path) - can = True - except (OSError, NotImplementedError, AttributeError): - can = False - else: - os.remove(symlink_path) - globals().update(can_symlink=lambda: can) - return can - -def has_symlink(): - bad_symlink = ( - # Windows symlink directory detection is broken on Python 3.2 - platform.system() == 'Windows' and sys.version_info[:2] == (3,2) - ) - return can_symlink() and not bad_symlink - -class TestFindPackages: - - def setup_method(self, method): - self.dist_dir = tempfile.mkdtemp() - self._make_pkg_structure() - - def teardown_method(self, method): - shutil.rmtree(self.dist_dir) - - def _make_pkg_structure(self): - """Make basic package structure. - - dist/ - docs/ - conf.py - pkg/ - __pycache__/ - nspkg/ - mod.py - subpkg/ - assets/ - asset - __init__.py - setup.py - - """ - self.docs_dir = self._mkdir('docs', self.dist_dir) - self._touch('conf.py', self.docs_dir) - self.pkg_dir = self._mkdir('pkg', self.dist_dir) - self._mkdir('__pycache__', self.pkg_dir) - self.ns_pkg_dir = self._mkdir('nspkg', self.pkg_dir) - self._touch('mod.py', self.ns_pkg_dir) - self.sub_pkg_dir = self._mkdir('subpkg', self.pkg_dir) - self.asset_dir = self._mkdir('assets', self.sub_pkg_dir) - self._touch('asset', self.asset_dir) - self._touch('__init__.py', self.sub_pkg_dir) - self._touch('setup.py', self.dist_dir) - - def _mkdir(self, path, parent_dir=None): - if parent_dir: - path = os.path.join(parent_dir, path) - os.mkdir(path) - return path - - def _touch(self, path, dir_=None): - if dir_: - path = os.path.join(dir_, path) - fp = open(path, 'w') - fp.close() - return path - - def test_regular_package(self): - self._touch('__init__.py', self.pkg_dir) - packages = find_packages(self.dist_dir) - assert packages == ['pkg', 'pkg.subpkg'] - - def test_exclude(self): - self._touch('__init__.py', self.pkg_dir) - packages = find_packages(self.dist_dir, exclude=('pkg.*',)) - assert packages == ['pkg'] - - def test_include_excludes_other(self): - """ - If include is specified, other packages should be excluded. - """ - self._touch('__init__.py', self.pkg_dir) - alt_dir = self._mkdir('other_pkg', self.dist_dir) - self._touch('__init__.py', alt_dir) - packages = find_packages(self.dist_dir, include=['other_pkg']) - assert packages == ['other_pkg'] - - def test_dir_with_dot_is_skipped(self): - shutil.rmtree(os.path.join(self.dist_dir, 'pkg/subpkg/assets')) - data_dir = self._mkdir('some.data', self.pkg_dir) - self._touch('__init__.py', data_dir) - self._touch('file.dat', data_dir) - packages = find_packages(self.dist_dir) - assert 'pkg.some.data' not in packages - - def test_dir_with_packages_in_subdir_is_excluded(self): - """ - Ensure that a package in a non-package such as build/pkg/__init__.py - is excluded. - """ - build_dir = self._mkdir('build', self.dist_dir) - build_pkg_dir = self._mkdir('pkg', build_dir) - self._touch('__init__.py', build_pkg_dir) - packages = find_packages(self.dist_dir) - assert 'build.pkg' not in packages - - @pytest.mark.skipif(not has_symlink(), reason='Symlink support required') - def test_symlinked_packages_are_included(self): - """ - A symbolically-linked directory should be treated like any other - directory when matched as a package. - - Create a link from lpkg -> pkg. - """ - self._touch('__init__.py', self.pkg_dir) - linked_pkg = os.path.join(self.dist_dir, 'lpkg') - os.symlink('pkg', linked_pkg) - assert os.path.isdir(linked_pkg) - packages = find_packages(self.dist_dir) - assert 'lpkg' in packages - - def _assert_packages(self, actual, expected): - assert set(actual) == set(expected) - - def test_pep420_ns_package(self): - packages = find_420_packages( - self.dist_dir, include=['pkg*'], exclude=['pkg.subpkg.assets']) - self._assert_packages(packages, ['pkg', 'pkg.nspkg', 'pkg.subpkg']) - - def test_pep420_ns_package_no_includes(self): - packages = find_420_packages( - self.dist_dir, exclude=['pkg.subpkg.assets']) - self._assert_packages(packages, ['docs', 'pkg', 'pkg.nspkg', 'pkg.subpkg']) - - def test_pep420_ns_package_no_includes_or_excludes(self): - packages = find_420_packages(self.dist_dir) - expected = [ - 'docs', 'pkg', 'pkg.nspkg', 'pkg.subpkg', 'pkg.subpkg.assets'] - self._assert_packages(packages, expected) - - def test_regular_package_with_nested_pep420_ns_packages(self): - self._touch('__init__.py', self.pkg_dir) - packages = find_420_packages( - self.dist_dir, exclude=['docs', 'pkg.subpkg.assets']) - self._assert_packages(packages, ['pkg', 'pkg.nspkg', 'pkg.subpkg']) - - def test_pep420_ns_package_no_non_package_dirs(self): - shutil.rmtree(self.docs_dir) - shutil.rmtree(os.path.join(self.dist_dir, 'pkg/subpkg/assets')) - packages = find_420_packages(self.dist_dir) - self._assert_packages(packages, ['pkg', 'pkg.nspkg', 'pkg.subpkg']) diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/test_integration.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/test_integration.py deleted file mode 100644 index 90bb431..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/test_integration.py +++ /dev/null @@ -1,99 +0,0 @@ -"""Run some integration tests. - -Try to install a few packages. -""" - -import glob -import os -import sys - -import pytest - -from setuptools.command.easy_install import easy_install -from setuptools.command import easy_install as easy_install_pkg -from setuptools.dist import Distribution -from setuptools.compat import urlopen - - -def setup_module(module): - packages = 'stevedore', 'virtualenvwrapper', 'pbr', 'novaclient' - for pkg in packages: - try: - __import__(pkg) - tmpl = "Integration tests cannot run when {pkg} is installed" - pytest.skip(tmpl.format(**locals())) - except ImportError: - pass - - try: - urlopen('https://pypi.python.org/pypi') - except Exception as exc: - pytest.skip(reason=str(exc)) - - -@pytest.fixture -def install_context(request, tmpdir, monkeypatch): - """Fixture to set up temporary installation directory. - """ - # Save old values so we can restore them. - new_cwd = tmpdir.mkdir('cwd') - user_base = tmpdir.mkdir('user_base') - user_site = tmpdir.mkdir('user_site') - install_dir = tmpdir.mkdir('install_dir') - - def fin(): - # undo the monkeypatch, particularly needed under - # windows because of kept handle on cwd - monkeypatch.undo() - new_cwd.remove() - user_base.remove() - user_site.remove() - install_dir.remove() - request.addfinalizer(fin) - - # Change the environment and site settings to control where the - # files are installed and ensure we do not overwrite anything. - monkeypatch.chdir(new_cwd) - monkeypatch.setattr(easy_install_pkg, '__file__', user_site.strpath) - monkeypatch.setattr('site.USER_BASE', user_base.strpath) - monkeypatch.setattr('site.USER_SITE', user_site.strpath) - monkeypatch.setattr('sys.path', sys.path + [install_dir.strpath]) - monkeypatch.setenv('PYTHONPATH', os.path.pathsep.join(sys.path)) - - # Set up the command for performing the installation. - dist = Distribution() - cmd = easy_install(dist) - cmd.install_dir = install_dir.strpath - return cmd - - -def _install_one(requirement, cmd, pkgname, modulename): - cmd.args = [requirement] - cmd.ensure_finalized() - cmd.run() - target = cmd.install_dir - dest_path = glob.glob(os.path.join(target, pkgname + '*.egg')) - assert dest_path - assert os.path.exists(os.path.join(dest_path[0], pkgname, modulename)) - - -def test_stevedore(install_context): - _install_one('stevedore', install_context, - 'stevedore', 'extension.py') - - -@pytest.mark.xfail -def test_virtualenvwrapper(install_context): - _install_one('virtualenvwrapper', install_context, - 'virtualenvwrapper', 'hook_loader.py') - - -def test_pbr(install_context): - _install_one('pbr', install_context, - 'pbr', 'core.py') - - -@pytest.mark.xfail -def test_python_novaclient(install_context): - _install_one('python-novaclient', install_context, - 'novaclient', 'base.py') diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/test_markerlib.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/test_markerlib.py deleted file mode 100644 index 8197b49..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/test_markerlib.py +++ /dev/null @@ -1,63 +0,0 @@ -import os - -import pytest - - -class TestMarkerlib: - - @pytest.mark.importorskip('ast') - def test_markers(self): - from _markerlib import interpret, default_environment, compile - - os_name = os.name - - assert interpret("") - - assert interpret("os.name != 'buuuu'") - assert interpret("os_name != 'buuuu'") - assert interpret("python_version > '1.0'") - assert interpret("python_version < '5.0'") - assert interpret("python_version <= '5.0'") - assert interpret("python_version >= '1.0'") - assert interpret("'%s' in os.name" % os_name) - assert interpret("'%s' in os_name" % os_name) - assert interpret("'buuuu' not in os.name") - - assert not interpret("os.name == 'buuuu'") - assert not interpret("os_name == 'buuuu'") - assert not interpret("python_version < '1.0'") - assert not interpret("python_version > '5.0'") - assert not interpret("python_version >= '5.0'") - assert not interpret("python_version <= '1.0'") - assert not interpret("'%s' not in os.name" % os_name) - assert not interpret("'buuuu' in os.name and python_version >= '5.0'") - assert not interpret("'buuuu' in os_name and python_version >= '5.0'") - - environment = default_environment() - environment['extra'] = 'test' - assert interpret("extra == 'test'", environment) - assert not interpret("extra == 'doc'", environment) - - def raises_nameError(): - try: - interpret("python.version == '42'") - except NameError: - pass - else: - raise Exception("Expected NameError") - - raises_nameError() - - def raises_syntaxError(): - try: - interpret("(x for x in (4,))") - except SyntaxError: - pass - else: - raise Exception("Expected SyntaxError") - - raises_syntaxError() - - statement = "python_version == '5'" - assert compile(statement).__doc__ == statement - diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/test_msvc9compiler.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/test_msvc9compiler.py deleted file mode 100644 index 09e0460..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/test_msvc9compiler.py +++ /dev/null @@ -1,179 +0,0 @@ -""" -Tests for msvc9compiler. -""" - -import os -import contextlib -import distutils.errors - -import pytest -try: - from unittest import mock -except ImportError: - import mock - -from . import contexts - -# importing only setuptools should apply the patch -__import__('setuptools') - -pytest.importorskip("distutils.msvc9compiler") - - -def mock_reg(hkcu=None, hklm=None): - """ - Return a mock for distutils.msvc9compiler.Reg, patched - to mock out the functions that access the registry. - """ - - _winreg = getattr(distutils.msvc9compiler, '_winreg', None) - winreg = getattr(distutils.msvc9compiler, 'winreg', _winreg) - - hives = { - winreg.HKEY_CURRENT_USER: hkcu or {}, - winreg.HKEY_LOCAL_MACHINE: hklm or {}, - } - - @classmethod - def read_keys(cls, base, key): - """Return list of registry keys.""" - hive = hives.get(base, {}) - return [ - k.rpartition('\\')[2] - for k in hive if k.startswith(key.lower()) - ] - - @classmethod - def read_values(cls, base, key): - """Return dict of registry keys and values.""" - hive = hives.get(base, {}) - return dict( - (k.rpartition('\\')[2], hive[k]) - for k in hive if k.startswith(key.lower()) - ) - - return mock.patch.multiple(distutils.msvc9compiler.Reg, - read_keys=read_keys, read_values=read_values) - - -class TestModulePatch: - """ - Ensure that importing setuptools is sufficient to replace - the standard find_vcvarsall function with a version that - recognizes the "Visual C++ for Python" package. - """ - - key_32 = r'software\microsoft\devdiv\vcforpython\9.0\installdir' - key_64 = r'software\wow6432node\microsoft\devdiv\vcforpython\9.0\installdir' - - def test_patched(self): - "Test the module is actually patched" - mod_name = distutils.msvc9compiler.find_vcvarsall.__module__ - assert mod_name == "setuptools.msvc9_support", "find_vcvarsall unpatched" - - def test_no_registry_entryies_means_nothing_found(self): - """ - No registry entries or environment variable should lead to an error - directing the user to download vcpython27. - """ - find_vcvarsall = distutils.msvc9compiler.find_vcvarsall - query_vcvarsall = distutils.msvc9compiler.query_vcvarsall - - with contexts.environment(VS90COMNTOOLS=None): - with mock_reg(): - assert find_vcvarsall(9.0) is None - - expected = distutils.errors.DistutilsPlatformError - with pytest.raises(expected) as exc: - query_vcvarsall(9.0) - assert 'aka.ms/vcpython27' in str(exc) - - @pytest.yield_fixture - def user_preferred_setting(self): - """ - Set up environment with different install dirs for user vs. system - and yield the user_install_dir for the expected result. - """ - with self.mock_install_dir() as user_install_dir: - with self.mock_install_dir() as system_install_dir: - reg = mock_reg( - hkcu={ - self.key_32: user_install_dir, - }, - hklm={ - self.key_32: system_install_dir, - self.key_64: system_install_dir, - }, - ) - with reg: - yield user_install_dir - - def test_prefer_current_user(self, user_preferred_setting): - """ - Ensure user's settings are preferred. - """ - result = distutils.msvc9compiler.find_vcvarsall(9.0) - expected = os.path.join(user_preferred_setting, 'vcvarsall.bat') - assert expected == result - - @pytest.yield_fixture - def local_machine_setting(self): - """ - Set up environment with only the system environment configured. - """ - with self.mock_install_dir() as system_install_dir: - reg = mock_reg( - hklm={ - self.key_32: system_install_dir, - }, - ) - with reg: - yield system_install_dir - - def test_local_machine_recognized(self, local_machine_setting): - """ - Ensure machine setting is honored if user settings are not present. - """ - result = distutils.msvc9compiler.find_vcvarsall(9.0) - expected = os.path.join(local_machine_setting, 'vcvarsall.bat') - assert expected == result - - @pytest.yield_fixture - def x64_preferred_setting(self): - """ - Set up environment with 64-bit and 32-bit system settings configured - and yield the canonical location. - """ - with self.mock_install_dir() as x32_dir: - with self.mock_install_dir() as x64_dir: - reg = mock_reg( - hklm={ - # This *should* only exist on 32-bit machines - self.key_32: x32_dir, - # This *should* only exist on 64-bit machines - self.key_64: x64_dir, - }, - ) - with reg: - yield x32_dir - - def test_ensure_64_bit_preferred(self, x64_preferred_setting): - """ - Ensure 64-bit system key is preferred. - """ - result = distutils.msvc9compiler.find_vcvarsall(9.0) - expected = os.path.join(x64_preferred_setting, 'vcvarsall.bat') - assert expected == result - - @staticmethod - @contextlib.contextmanager - def mock_install_dir(): - """ - Make a mock install dir in a unique location so that tests can - distinguish which dir was detected in a given scenario. - """ - with contexts.tempdir() as result: - vcvarsall = os.path.join(result, 'vcvarsall.bat') - with open(vcvarsall, 'w'): - pass - yield result diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/test_packageindex.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/test_packageindex.py deleted file mode 100644 index dcd90d6..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/test_packageindex.py +++ /dev/null @@ -1,203 +0,0 @@ -import sys -import distutils.errors - -from setuptools.compat import httplib, HTTPError, unicode, pathname2url - -import pkg_resources -import setuptools.package_index -from setuptools.tests.server import IndexServer - - -class TestPackageIndex: - - def test_bad_url_bad_port(self): - index = setuptools.package_index.PackageIndex() - url = 'http://127.0.0.1:0/nonesuch/test_package_index' - try: - v = index.open_url(url) - except Exception as v: - assert url in str(v) - else: - assert isinstance(v, HTTPError) - - def test_bad_url_typo(self): - # issue 16 - # easy_install inquant.contentmirror.plone breaks because of a typo - # in its home URL - index = setuptools.package_index.PackageIndex( - hosts=('www.example.com',) - ) - - url = 'url:%20https://svn.plone.org/svn/collective/inquant.contentmirror.plone/trunk' - try: - v = index.open_url(url) - except Exception as v: - assert url in str(v) - else: - assert isinstance(v, HTTPError) - - def test_bad_url_bad_status_line(self): - index = setuptools.package_index.PackageIndex( - hosts=('www.example.com',) - ) - - def _urlopen(*args): - raise httplib.BadStatusLine('line') - - index.opener = _urlopen - url = 'http://example.com' - try: - v = index.open_url(url) - except Exception as v: - assert 'line' in str(v) - else: - raise AssertionError('Should have raise here!') - - def test_bad_url_double_scheme(self): - """ - A bad URL with a double scheme should raise a DistutilsError. - """ - index = setuptools.package_index.PackageIndex( - hosts=('www.example.com',) - ) - - # issue 20 - url = 'http://http://svn.pythonpaste.org/Paste/wphp/trunk' - try: - index.open_url(url) - except distutils.errors.DistutilsError as error: - msg = unicode(error) - assert 'nonnumeric port' in msg or 'getaddrinfo failed' in msg or 'Name or service not known' in msg - return - raise RuntimeError("Did not raise") - - def test_bad_url_screwy_href(self): - index = setuptools.package_index.PackageIndex( - hosts=('www.example.com',) - ) - - # issue #160 - if sys.version_info[0] == 2 and sys.version_info[1] == 7: - # this should not fail - url = 'http://example.com' - page = ('<a href="http://www.famfamfam.com](' - 'http://www.famfamfam.com/">') - index.process_index(url, page) - - def test_url_ok(self): - index = setuptools.package_index.PackageIndex( - hosts=('www.example.com',) - ) - url = 'file:///tmp/test_package_index' - assert index.url_ok(url, True) - - def test_links_priority(self): - """ - Download links from the pypi simple index should be used before - external download links. - https://bitbucket.org/tarek/distribute/issue/163 - - Usecase : - - someone uploads a package on pypi, a md5 is generated - - someone manually copies this link (with the md5 in the url) onto an - external page accessible from the package page. - - someone reuploads the package (with a different md5) - - while easy_installing, an MD5 error occurs because the external link - is used - -> Setuptools should use the link from pypi, not the external one. - """ - if sys.platform.startswith('java'): - # Skip this test on jython because binding to :0 fails - return - - # start an index server - server = IndexServer() - server.start() - index_url = server.base_url() + 'test_links_priority/simple/' - - # scan a test index - pi = setuptools.package_index.PackageIndex(index_url) - requirement = pkg_resources.Requirement.parse('foobar') - pi.find_packages(requirement) - server.stop() - - # the distribution has been found - assert 'foobar' in pi - # we have only one link, because links are compared without md5 - assert len(pi['foobar'])==1 - # the link should be from the index - assert 'correct_md5' in pi['foobar'][0].location - - def test_parse_bdist_wininst(self): - parse = setuptools.package_index.parse_bdist_wininst - - actual = parse('reportlab-2.5.win32-py2.4.exe') - expected = 'reportlab-2.5', '2.4', 'win32' - assert actual == expected - - actual = parse('reportlab-2.5.win32.exe') - expected = 'reportlab-2.5', None, 'win32' - assert actual == expected - - actual = parse('reportlab-2.5.win-amd64-py2.7.exe') - expected = 'reportlab-2.5', '2.7', 'win-amd64' - assert actual == expected - - actual = parse('reportlab-2.5.win-amd64.exe') - expected = 'reportlab-2.5', None, 'win-amd64' - assert actual == expected - - def test__vcs_split_rev_from_url(self): - """ - Test the basic usage of _vcs_split_rev_from_url - """ - vsrfu = setuptools.package_index.PackageIndex._vcs_split_rev_from_url - url, rev = vsrfu('https://example.com/bar@2995') - assert url == 'https://example.com/bar' - assert rev == '2995' - - def test_local_index(self, tmpdir): - """ - local_open should be able to read an index from the file system. - """ - index_file = tmpdir / 'index.html' - with index_file.open('w') as f: - f.write('<div>content</div>') - url = 'file:' + pathname2url(str(tmpdir)) + '/' - res = setuptools.package_index.local_open(url) - assert 'content' in res.read() - - -class TestContentCheckers: - - def test_md5(self): - checker = setuptools.package_index.HashChecker.from_url( - 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478') - checker.feed('You should probably not be using MD5'.encode('ascii')) - assert checker.hash.hexdigest() == 'f12895fdffbd45007040d2e44df98478' - assert checker.is_valid() - - def test_other_fragment(self): - "Content checks should succeed silently if no hash is present" - checker = setuptools.package_index.HashChecker.from_url( - 'http://foo/bar#something%20completely%20different') - checker.feed('anything'.encode('ascii')) - assert checker.is_valid() - - def test_blank_md5(self): - "Content checks should succeed if a hash is empty" - checker = setuptools.package_index.HashChecker.from_url( - 'http://foo/bar#md5=') - checker.feed('anything'.encode('ascii')) - assert checker.is_valid() - - def test_get_hash_name_md5(self): - checker = setuptools.package_index.HashChecker.from_url( - 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478') - assert checker.hash_name == 'md5' - - def test_report(self): - checker = setuptools.package_index.HashChecker.from_url( - 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478') - rep = checker.report(lambda x: x, 'My message about %s') - assert rep == 'My message about md5' diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/test_sandbox.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/test_sandbox.py deleted file mode 100644 index 6e1e9e1..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/test_sandbox.py +++ /dev/null @@ -1,102 +0,0 @@ -"""develop tests -""" -import os -import types - -import pytest - -import pkg_resources -import setuptools.sandbox -from setuptools.sandbox import DirectorySandbox - - -class TestSandbox: - - def test_devnull(self, tmpdir): - sandbox = DirectorySandbox(str(tmpdir)) - sandbox.run(self._file_writer(os.devnull)) - - @staticmethod - def _file_writer(path): - def do_write(): - with open(path, 'w') as f: - f.write('xxx') - return do_write - - def test_win32com(self, tmpdir): - """ - win32com should not be prevented from caching COM interfaces - in gen_py. - """ - win32com = pytest.importorskip('win32com') - gen_py = win32com.__gen_path__ - target = os.path.join(gen_py, 'test_write') - sandbox = DirectorySandbox(str(tmpdir)) - try: - # attempt to create gen_py file - sandbox.run(self._file_writer(target)) - finally: - if os.path.exists(target): - os.remove(target) - - def test_setup_py_with_BOM(self): - """ - It should be possible to execute a setup.py with a Byte Order Mark - """ - target = pkg_resources.resource_filename(__name__, - 'script-with-bom.py') - namespace = types.ModuleType('namespace') - setuptools.sandbox._execfile(target, vars(namespace)) - assert namespace.result == 'passed' - - def test_setup_py_with_CRLF(self, tmpdir): - setup_py = tmpdir / 'setup.py' - with setup_py.open('wb') as stream: - stream.write(b'"degenerate script"\r\n') - setuptools.sandbox._execfile(str(setup_py), globals()) - - -class TestExceptionSaver: - def test_exception_trapped(self): - with setuptools.sandbox.ExceptionSaver(): - raise ValueError("details") - - def test_exception_resumed(self): - with setuptools.sandbox.ExceptionSaver() as saved_exc: - raise ValueError("details") - - with pytest.raises(ValueError) as caught: - saved_exc.resume() - - assert isinstance(caught.value, ValueError) - assert str(caught.value) == 'details' - - def test_exception_reconstructed(self): - orig_exc = ValueError("details") - - with setuptools.sandbox.ExceptionSaver() as saved_exc: - raise orig_exc - - with pytest.raises(ValueError) as caught: - saved_exc.resume() - - assert isinstance(caught.value, ValueError) - assert caught.value is not orig_exc - - def test_no_exception_passes_quietly(self): - with setuptools.sandbox.ExceptionSaver() as saved_exc: - pass - - saved_exc.resume() - - def test_unpickleable_exception(self): - class CantPickleThis(Exception): - "This Exception is unpickleable because it's not in globals" - - with setuptools.sandbox.ExceptionSaver() as saved_exc: - raise CantPickleThis('detail') - - with pytest.raises(setuptools.sandbox.UnpickleableException) as caught: - saved_exc.resume() - - assert str(caught.value) == "CantPickleThis('detail',)" diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/test_sdist.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/test_sdist.py deleted file mode 100644 index 9013b50..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/test_sdist.py +++ /dev/null @@ -1,419 +0,0 @@ -# -*- coding: utf-8 -*- -"""sdist tests""" - -import locale -import os -import shutil -import sys -import tempfile -import unicodedata -import contextlib - -import pytest - -import pkg_resources -from setuptools.compat import StringIO, unicode, PY3, PY2 -from setuptools.command.sdist import sdist -from setuptools.command.egg_info import manifest_maker -from setuptools.dist import Distribution - -SETUP_ATTRS = { - 'name': 'sdist_test', - 'version': '0.0', - 'packages': ['sdist_test'], - 'package_data': {'sdist_test': ['*.txt']} -} - - -SETUP_PY = """\ -from setuptools import setup - -setup(**%r) -""" % SETUP_ATTRS - - -if PY3: - LATIN1_FILENAME = 'smörbröd.py'.encode('latin-1') -else: - LATIN1_FILENAME = 'sm\xf6rbr\xf6d.py' - - -# Cannot use context manager because of Python 2.4 -@contextlib.contextmanager -def quiet(): - old_stdout, old_stderr = sys.stdout, sys.stderr - sys.stdout, sys.stderr = StringIO(), StringIO() - try: - yield - finally: - sys.stdout, sys.stderr = old_stdout, old_stderr - - -# Fake byte literals for Python <= 2.5 -def b(s, encoding='utf-8'): - if PY3: - return s.encode(encoding) - return s - - -# Convert to POSIX path -def posix(path): - if PY3 and not isinstance(path, str): - return path.replace(os.sep.encode('ascii'), b('/')) - else: - return path.replace(os.sep, '/') - - -# HFS Plus uses decomposed UTF-8 -def decompose(path): - if isinstance(path, unicode): - return unicodedata.normalize('NFD', path) - try: - path = path.decode('utf-8') - path = unicodedata.normalize('NFD', path) - path = path.encode('utf-8') - except UnicodeError: - pass # Not UTF-8 - return path - - -class TestSdistTest: - - def setup_method(self, method): - self.temp_dir = tempfile.mkdtemp() - f = open(os.path.join(self.temp_dir, 'setup.py'), 'w') - f.write(SETUP_PY) - f.close() - - # Set up the rest of the test package - test_pkg = os.path.join(self.temp_dir, 'sdist_test') - os.mkdir(test_pkg) - # *.rst was not included in package_data, so c.rst should not be - # automatically added to the manifest when not under version control - for fname in ['__init__.py', 'a.txt', 'b.txt', 'c.rst']: - # Just touch the files; their contents are irrelevant - open(os.path.join(test_pkg, fname), 'w').close() - - self.old_cwd = os.getcwd() - os.chdir(self.temp_dir) - - def teardown_method(self, method): - os.chdir(self.old_cwd) - shutil.rmtree(self.temp_dir) - - def test_package_data_in_sdist(self): - """Regression test for pull request #4: ensures that files listed in - package_data are included in the manifest even if they're not added to - version control. - """ - - dist = Distribution(SETUP_ATTRS) - dist.script_name = 'setup.py' - cmd = sdist(dist) - cmd.ensure_finalized() - - with quiet(): - cmd.run() - - manifest = cmd.filelist.files - assert os.path.join('sdist_test', 'a.txt') in manifest - assert os.path.join('sdist_test', 'b.txt') in manifest - assert os.path.join('sdist_test', 'c.rst') not in manifest - - - def test_defaults_case_sensitivity(self): - """ - Make sure default files (README.*, etc.) are added in a case-sensitive - way to avoid problems with packages built on Windows. - """ - - open(os.path.join(self.temp_dir, 'readme.rst'), 'w').close() - open(os.path.join(self.temp_dir, 'SETUP.cfg'), 'w').close() - - dist = Distribution(SETUP_ATTRS) - # the extension deliberately capitalized for this test - # to make sure the actual filename (not capitalized) gets added - # to the manifest - dist.script_name = 'setup.PY' - cmd = sdist(dist) - cmd.ensure_finalized() - - with quiet(): - cmd.run() - - # lowercase all names so we can test in a case-insensitive way to make sure the files are not included - manifest = map(lambda x: x.lower(), cmd.filelist.files) - assert 'readme.rst' not in manifest, manifest - assert 'setup.py' not in manifest, manifest - assert 'setup.cfg' not in manifest, manifest - - def test_manifest_is_written_with_utf8_encoding(self): - # Test for #303. - dist = Distribution(SETUP_ATTRS) - dist.script_name = 'setup.py' - mm = manifest_maker(dist) - mm.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt') - os.mkdir('sdist_test.egg-info') - - # UTF-8 filename - filename = os.path.join('sdist_test', 'smörbröd.py') - - # Must create the file or it will get stripped. - open(filename, 'w').close() - - # Add UTF-8 filename and write manifest - with quiet(): - mm.run() - mm.filelist.append(filename) - mm.write_manifest() - - manifest = open(mm.manifest, 'rbU') - contents = manifest.read() - manifest.close() - - # The manifest should be UTF-8 encoded - u_contents = contents.decode('UTF-8') - - # The manifest should contain the UTF-8 filename - if PY2: - fs_enc = sys.getfilesystemencoding() - filename = filename.decode(fs_enc) - - assert posix(filename) in u_contents - - # Python 3 only - if PY3: - - def test_write_manifest_allows_utf8_filenames(self): - # Test for #303. - dist = Distribution(SETUP_ATTRS) - dist.script_name = 'setup.py' - mm = manifest_maker(dist) - mm.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt') - os.mkdir('sdist_test.egg-info') - - # UTF-8 filename - filename = os.path.join(b('sdist_test'), b('smörbröd.py')) - - # Must touch the file or risk removal - open(filename, "w").close() - - # Add filename and write manifest - with quiet(): - mm.run() - u_filename = filename.decode('utf-8') - mm.filelist.files.append(u_filename) - # Re-write manifest - mm.write_manifest() - - manifest = open(mm.manifest, 'rbU') - contents = manifest.read() - manifest.close() - - # The manifest should be UTF-8 encoded - contents.decode('UTF-8') - - # The manifest should contain the UTF-8 filename - assert posix(filename) in contents - - # The filelist should have been updated as well - assert u_filename in mm.filelist.files - - def test_write_manifest_skips_non_utf8_filenames(self): - """ - Files that cannot be encoded to UTF-8 (specifically, those that - weren't originally successfully decoded and have surrogate - escapes) should be omitted from the manifest. - See https://bitbucket.org/tarek/distribute/issue/303 for history. - """ - dist = Distribution(SETUP_ATTRS) - dist.script_name = 'setup.py' - mm = manifest_maker(dist) - mm.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt') - os.mkdir('sdist_test.egg-info') - - # Latin-1 filename - filename = os.path.join(b('sdist_test'), LATIN1_FILENAME) - - # Add filename with surrogates and write manifest - with quiet(): - mm.run() - u_filename = filename.decode('utf-8', 'surrogateescape') - mm.filelist.append(u_filename) - # Re-write manifest - mm.write_manifest() - - manifest = open(mm.manifest, 'rbU') - contents = manifest.read() - manifest.close() - - # The manifest should be UTF-8 encoded - contents.decode('UTF-8') - - # The Latin-1 filename should have been skipped - assert posix(filename) not in contents - - # The filelist should have been updated as well - assert u_filename not in mm.filelist.files - - def test_manifest_is_read_with_utf8_encoding(self): - # Test for #303. - dist = Distribution(SETUP_ATTRS) - dist.script_name = 'setup.py' - cmd = sdist(dist) - cmd.ensure_finalized() - - # Create manifest - with quiet(): - cmd.run() - - # Add UTF-8 filename to manifest - filename = os.path.join(b('sdist_test'), b('smörbröd.py')) - cmd.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt') - manifest = open(cmd.manifest, 'ab') - manifest.write(b('\n') + filename) - manifest.close() - - # The file must exist to be included in the filelist - open(filename, 'w').close() - - # Re-read manifest - cmd.filelist.files = [] - with quiet(): - cmd.read_manifest() - - # The filelist should contain the UTF-8 filename - if PY3: - filename = filename.decode('utf-8') - assert filename in cmd.filelist.files - - # Python 3 only - if PY3: - - def test_read_manifest_skips_non_utf8_filenames(self): - # Test for #303. - dist = Distribution(SETUP_ATTRS) - dist.script_name = 'setup.py' - cmd = sdist(dist) - cmd.ensure_finalized() - - # Create manifest - with quiet(): - cmd.run() - - # Add Latin-1 filename to manifest - filename = os.path.join(b('sdist_test'), LATIN1_FILENAME) - cmd.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt') - manifest = open(cmd.manifest, 'ab') - manifest.write(b('\n') + filename) - manifest.close() - - # The file must exist to be included in the filelist - open(filename, 'w').close() - - # Re-read manifest - cmd.filelist.files = [] - with quiet(): - cmd.read_manifest() - - # The Latin-1 filename should have been skipped - filename = filename.decode('latin-1') - assert filename not in cmd.filelist.files - - @pytest.mark.skipif(PY3 and locale.getpreferredencoding() != 'UTF-8', - reason='Unittest fails if locale is not utf-8 but the manifests is ' - 'recorded correctly') - def test_sdist_with_utf8_encoded_filename(self): - # Test for #303. - dist = Distribution(SETUP_ATTRS) - dist.script_name = 'setup.py' - cmd = sdist(dist) - cmd.ensure_finalized() - - # UTF-8 filename - filename = os.path.join(b('sdist_test'), b('smörbröd.py')) - open(filename, 'w').close() - - with quiet(): - cmd.run() - - if sys.platform == 'darwin': - filename = decompose(filename) - - if PY3: - fs_enc = sys.getfilesystemencoding() - - if sys.platform == 'win32': - if fs_enc == 'cp1252': - # Python 3 mangles the UTF-8 filename - filename = filename.decode('cp1252') - assert filename in cmd.filelist.files - else: - filename = filename.decode('mbcs') - assert filename in cmd.filelist.files - else: - filename = filename.decode('utf-8') - assert filename in cmd.filelist.files - else: - assert filename in cmd.filelist.files - - def test_sdist_with_latin1_encoded_filename(self): - # Test for #303. - dist = Distribution(SETUP_ATTRS) - dist.script_name = 'setup.py' - cmd = sdist(dist) - cmd.ensure_finalized() - - # Latin-1 filename - filename = os.path.join(b('sdist_test'), LATIN1_FILENAME) - open(filename, 'w').close() - assert os.path.isfile(filename) - - with quiet(): - cmd.run() - - if PY3: - # not all windows systems have a default FS encoding of cp1252 - if sys.platform == 'win32': - # Latin-1 is similar to Windows-1252 however - # on mbcs filesys it is not in latin-1 encoding - fs_enc = sys.getfilesystemencoding() - if fs_enc == 'mbcs': - filename = filename.decode('mbcs') - else: - filename = filename.decode('latin-1') - - assert filename in cmd.filelist.files - else: - # The Latin-1 filename should have been skipped - filename = filename.decode('latin-1') - filename not in cmd.filelist.files - else: - # Under Python 2 there seems to be no decoded string in the - # filelist. However, due to decode and encoding of the - # file name to get utf-8 Manifest the latin1 maybe excluded - try: - # fs_enc should match how one is expect the decoding to - # be proformed for the manifest output. - fs_enc = sys.getfilesystemencoding() - filename.decode(fs_enc) - assert filename in cmd.filelist.files - except UnicodeDecodeError: - filename not in cmd.filelist.files - - -def test_default_revctrl(): - """ - When _default_revctrl was removed from the `setuptools.command.sdist` - module in 10.0, it broke some systems which keep an old install of - setuptools (Distribute) around. Those old versions require that the - setuptools package continue to implement that interface, so this - function provides that interface, stubbed. See #320 for details. - - This interface must be maintained until Ubuntu 12.04 is no longer - supported (by Setuptools). - """ - ep_def = 'svn_cvs = setuptools.command.sdist:_default_revctrl' - ep = pkg_resources.EntryPoint.parse(ep_def) - res = ep.resolve() - assert hasattr(res, '__iter__') diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/test_test.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/test_test.py deleted file mode 100644 index a66294c..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/test_test.py +++ /dev/null @@ -1,91 +0,0 @@ -# -*- coding: UTF-8 -*- - -from __future__ import unicode_literals - -import os -import site - -import pytest - -from setuptools.command.test import test -from setuptools.dist import Distribution - -from .textwrap import DALS -from . import contexts - -SETUP_PY = DALS(""" - from setuptools import setup - - setup(name='foo', - packages=['name', 'name.space', 'name.space.tests'], - namespace_packages=['name'], - test_suite='name.space.tests.test_suite', - ) - """) - -NS_INIT = DALS(""" - # -*- coding: Latin-1 -*- - # Söme Arbiträry Ünicode to test Distribute Issüé 310 - try: - __import__('pkg_resources').declare_namespace(__name__) - except ImportError: - from pkgutil import extend_path - __path__ = extend_path(__path__, __name__) - """) - -TEST_PY = DALS(""" - import unittest - - class TestTest(unittest.TestCase): - def test_test(self): - print "Foo" # Should fail under Python 3 unless 2to3 is used - - test_suite = unittest.makeSuite(TestTest) - """) - - -@pytest.fixture -def sample_test(tmpdir_cwd): - os.makedirs('name/space/tests') - - # setup.py - with open('setup.py', 'wt') as f: - f.write(SETUP_PY) - - # name/__init__.py - with open('name/__init__.py', 'wb') as f: - f.write(NS_INIT.encode('Latin-1')) - - # name/space/__init__.py - with open('name/space/__init__.py', 'wt') as f: - f.write('#empty\n') - - # name/space/tests/__init__.py - with open('name/space/tests/__init__.py', 'wt') as f: - f.write(TEST_PY) - - -@pytest.mark.skipif('hasattr(sys, "real_prefix")') -@pytest.mark.usefixtures('user_override') -@pytest.mark.usefixtures('sample_test') -class TestTestTest: - - def test_test(self): - params = dict( - name='foo', - packages=['name', 'name.space', 'name.space.tests'], - namespace_packages=['name'], - test_suite='name.space.tests.test_suite', - use_2to3=True, - ) - dist = Distribution(params) - dist.script_name = 'setup.py' - cmd = test(dist) - cmd.user = 1 - cmd.ensure_finalized() - cmd.install_dir = site.USER_SITE - cmd.user = 1 - with contexts.quiet(): - # The test runner calls sys.exit - with contexts.suppress_exceptions(SystemExit): - cmd.run() diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/test_upload_docs.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/test_upload_docs.py deleted file mode 100644 index cc71cad..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/test_upload_docs.py +++ /dev/null @@ -1,59 +0,0 @@ -import os -import zipfile -import contextlib - -import pytest - -from setuptools.command.upload_docs import upload_docs -from setuptools.dist import Distribution - -from .textwrap import DALS -from . import contexts - - -SETUP_PY = DALS( - """ - from setuptools import setup - - setup(name='foo') - """) - - -@pytest.fixture -def sample_project(tmpdir_cwd): - # setup.py - with open('setup.py', 'wt') as f: - f.write(SETUP_PY) - - os.mkdir('build') - - # A test document. - with open('build/index.html', 'w') as f: - f.write("Hello world.") - - # An empty folder. - os.mkdir('build/empty') - - -@pytest.mark.usefixtures('sample_project') -@pytest.mark.usefixtures('user_override') -class TestUploadDocsTest: - - def test_create_zipfile(self): - """ - Ensure zipfile creation handles common cases, including a folder - containing an empty folder. - """ - - dist = Distribution() - - cmd = upload_docs(dist) - cmd.target_dir = cmd.upload_dir = 'build' - with contexts.tempdir() as tmp_dir: - tmp_file = os.path.join(tmp_dir, 'foo.zip') - zip_file = cmd.create_zipfile(tmp_file) - - assert zipfile.is_zipfile(tmp_file) - - with contextlib.closing(zipfile.ZipFile(tmp_file)) as zip_file: - assert zip_file.namelist() == ['index.html'] diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/test_windows_wrappers.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/test_windows_wrappers.py deleted file mode 100644 index 5b14d07..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/test_windows_wrappers.py +++ /dev/null @@ -1,183 +0,0 @@ -""" -Python Script Wrapper for Windows -================================= - -setuptools includes wrappers for Python scripts that allows them to be -executed like regular windows programs. There are 2 wrappers, one -for command-line programs, cli.exe, and one for graphical programs, -gui.exe. These programs are almost identical, function pretty much -the same way, and are generated from the same source file. The -wrapper programs are used by copying them to the directory containing -the script they are to wrap and with the same name as the script they -are to wrap. -""" - -from __future__ import absolute_import - -import sys -import textwrap -import subprocess - -import pytest - -from setuptools.command.easy_install import nt_quote_arg -import pkg_resources - - -pytestmark = pytest.mark.skipif(sys.platform != 'win32', reason="Windows only") - - -class WrapperTester: - - @classmethod - def prep_script(cls, template): - python_exe = nt_quote_arg(sys.executable) - return template % locals() - - @classmethod - def create_script(cls, tmpdir): - """ - Create a simple script, foo-script.py - - Note that the script starts with a Unix-style '#!' line saying which - Python executable to run. The wrapper will use this line to find the - correct Python executable. - """ - - script = cls.prep_script(cls.script_tmpl) - - with (tmpdir / cls.script_name).open('w') as f: - f.write(script) - - # also copy cli.exe to the sample directory - with (tmpdir / cls.wrapper_name).open('wb') as f: - w = pkg_resources.resource_string('setuptools', cls.wrapper_source) - f.write(w) - - -class TestCLI(WrapperTester): - script_name = 'foo-script.py' - wrapper_source = 'cli-32.exe' - wrapper_name = 'foo.exe' - script_tmpl = textwrap.dedent(""" - #!%(python_exe)s - import sys - input = repr(sys.stdin.read()) - print(sys.argv[0][-14:]) - print(sys.argv[1:]) - print(input) - if __debug__: - print('non-optimized') - """).lstrip() - - def test_basic(self, tmpdir): - """ - When the copy of cli.exe, foo.exe in this example, runs, it examines - the path name it was run with and computes a Python script path name - by removing the '.exe' suffix and adding the '-script.py' suffix. (For - GUI programs, the suffix '-script.pyw' is added.) This is why we - named out script the way we did. Now we can run out script by running - the wrapper: - - This example was a little pathological in that it exercised windows - (MS C runtime) quoting rules: - - - Strings containing spaces are surrounded by double quotes. - - - Double quotes in strings need to be escaped by preceding them with - back slashes. - - - One or more backslashes preceding double quotes need to be escaped - by preceding each of them with back slashes. - """ - self.create_script(tmpdir) - cmd = [ - str(tmpdir / 'foo.exe'), - 'arg1', - 'arg 2', - 'arg "2\\"', - 'arg 4\\', - 'arg5 a\\\\b', - ] - proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE) - stdout, stderr = proc.communicate('hello\nworld\n'.encode('ascii')) - actual = stdout.decode('ascii').replace('\r\n', '\n') - expected = textwrap.dedent(r""" - \foo-script.py - ['arg1', 'arg 2', 'arg "2\\"', 'arg 4\\', 'arg5 a\\\\b'] - 'hello\nworld\n' - non-optimized - """).lstrip() - assert actual == expected - - def test_with_options(self, tmpdir): - """ - Specifying Python Command-line Options - -------------------------------------- - - You can specify a single argument on the '#!' line. This can be used - to specify Python options like -O, to run in optimized mode or -i - to start the interactive interpreter. You can combine multiple - options as usual. For example, to run in optimized mode and - enter the interpreter after running the script, you could use -Oi: - """ - self.create_script(tmpdir) - tmpl = textwrap.dedent(""" - #!%(python_exe)s -Oi - import sys - input = repr(sys.stdin.read()) - print(sys.argv[0][-14:]) - print(sys.argv[1:]) - print(input) - if __debug__: - print('non-optimized') - sys.ps1 = '---' - """).lstrip() - with (tmpdir / 'foo-script.py').open('w') as f: - f.write(self.prep_script(tmpl)) - cmd = [str(tmpdir / 'foo.exe')] - proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT) - stdout, stderr = proc.communicate() - actual = stdout.decode('ascii').replace('\r\n', '\n') - expected = textwrap.dedent(r""" - \foo-script.py - [] - '' - --- - """).lstrip() - assert actual == expected - - -class TestGUI(WrapperTester): - """ - Testing the GUI Version - ----------------------- - """ - script_name = 'bar-script.pyw' - wrapper_source = 'gui-32.exe' - wrapper_name = 'bar.exe' - - script_tmpl = textwrap.dedent(""" - #!%(python_exe)s - import sys - f = open(sys.argv[1], 'wb') - bytes_written = f.write(repr(sys.argv[2]).encode('utf-8')) - f.close() - """).strip() - - def test_basic(self, tmpdir): - """Test the GUI version with the simple scipt, bar-script.py""" - self.create_script(tmpdir) - - cmd = [ - str(tmpdir / 'bar.exe'), - str(tmpdir / 'test_output.txt'), - 'Test Argument', - ] - proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT) - stdout, stderr = proc.communicate() - assert not stdout - assert not stderr - with (tmpdir / 'test_output.txt').open('rb') as f_out: - actual = f_out.read().decode('ascii') - assert actual == repr('Test Argument') diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/textwrap.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/textwrap.py deleted file mode 100644 index 5cd9e5b..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/tests/textwrap.py +++ /dev/null @@ -1,8 +0,0 @@ -from __future__ import absolute_import - -import textwrap - - -def DALS(s): - "dedent and left-strip" - return textwrap.dedent(s).lstrip() diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/unicode_utils.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/unicode_utils.py deleted file mode 100644 index d2de941..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/unicode_utils.py +++ /dev/null @@ -1,41 +0,0 @@ -import unicodedata -import sys -from setuptools.compat import unicode as decoded_string - - -# HFS Plus uses decomposed UTF-8 -def decompose(path): - if isinstance(path, decoded_string): - return unicodedata.normalize('NFD', path) - try: - path = path.decode('utf-8') - path = unicodedata.normalize('NFD', path) - path = path.encode('utf-8') - except UnicodeError: - pass # Not UTF-8 - return path - - -def filesys_decode(path): - """ - Ensure that the given path is decoded, - NONE when no expected encoding works - """ - - fs_enc = sys.getfilesystemencoding() - if isinstance(path, decoded_string): - return path - - for enc in (fs_enc, "utf-8"): - try: - return path.decode(enc) - except UnicodeDecodeError: - continue - - -def try_encode(string, enc): - "turn unicode encoding into a functional routine" - try: - return string.encode(enc) - except UnicodeEncodeError: - return None diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/utils.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/utils.py deleted file mode 100644 index 91e4b87..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/utils.py +++ /dev/null @@ -1,11 +0,0 @@ -import os -import os.path - - -def cs_path_exists(fspath): - if not os.path.exists(fspath): - return False - # make absolute so we always have a directory - abspath = os.path.abspath(fspath) - directory, filename = os.path.split(abspath) - return filename in os.listdir(directory)
\ No newline at end of file diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/version.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/version.py deleted file mode 100644 index a2f34e9..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = '14.3.2' diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/windows_support.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/windows_support.py deleted file mode 100644 index cb977cf..0000000 --- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/windows_support.py +++ /dev/null @@ -1,29 +0,0 @@ -import platform -import ctypes - - -def windows_only(func): - if platform.system() != 'Windows': - return lambda *args, **kwargs: None - return func - - -@windows_only -def hide_file(path): - """ - Set the hidden attribute on a file or directory. - - From http://stackoverflow.com/questions/19622133/ - - `path` must be text. - """ - __import__('ctypes.wintypes') - SetFileAttributes = ctypes.windll.kernel32.SetFileAttributesW - SetFileAttributes.argtypes = ctypes.wintypes.LPWSTR, ctypes.wintypes.DWORD - SetFileAttributes.restype = ctypes.wintypes.BOOL - - FILE_ATTRIBUTE_HIDDEN = 0x02 - - ret = SetFileAttributes(path, FILE_ATTRIBUTE_HIDDEN) - if not ret: - raise ctypes.WinError() |