summaryrefslogtreecommitdiffstats
path: root/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command
diff options
context:
space:
mode:
Diffstat (limited to 'jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command')
-rw-r--r--jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/__init__.py18
-rw-r--r--jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/alias.py78
-rw-r--r--jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/bdist_egg.py470
-rw-r--r--jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/bdist_rpm.py43
-rw-r--r--jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/bdist_wininst.py21
-rw-r--r--jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/build_ext.py305
-rw-r--r--jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/build_py.py215
-rw-r--r--jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/develop.py169
-rw-r--r--jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/easy_install.py2262
-rw-r--r--jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/egg_info.py480
-rw-r--r--jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/install.py125
-rw-r--r--jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/install_egg_info.py116
-rw-r--r--jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/install_lib.py118
-rw-r--r--jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/install_scripts.py60
-rw-r--r--jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/launcher manifest.xml15
-rw-r--r--jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/register.py10
-rw-r--r--jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/rotate.py61
-rw-r--r--jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/saveopts.py22
-rw-r--r--jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/sdist.py197
-rw-r--r--jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/setopt.py150
-rw-r--r--jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/test.py175
-rw-r--r--jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/upload_docs.py190
22 files changed, 0 insertions, 5300 deletions
diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/__init__.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/__init__.py
deleted file mode 100644
index f6dbc39..0000000
--- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/__init__.py
+++ /dev/null
@@ -1,18 +0,0 @@
-__all__ = [
- 'alias', 'bdist_egg', 'bdist_rpm', 'build_ext', 'build_py', 'develop',
- 'easy_install', 'egg_info', 'install', 'install_lib', 'rotate', 'saveopts',
- 'sdist', 'setopt', 'test', 'install_egg_info', 'install_scripts',
- 'register', 'bdist_wininst', 'upload_docs',
-]
-
-from distutils.command.bdist import bdist
-import sys
-
-from setuptools.command import install_scripts
-
-
-if 'egg' not in bdist.format_commands:
- bdist.format_command['egg'] = ('bdist_egg', "Python .egg file")
- bdist.format_commands.append('egg')
-
-del bdist, sys
diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/alias.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/alias.py
deleted file mode 100644
index 452a924..0000000
--- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/alias.py
+++ /dev/null
@@ -1,78 +0,0 @@
-from distutils.errors import DistutilsOptionError
-
-from setuptools.command.setopt import edit_config, option_base, config_file
-
-
-def shquote(arg):
- """Quote an argument for later parsing by shlex.split()"""
- for c in '"', "'", "\\", "#":
- if c in arg:
- return repr(arg)
- if arg.split() != [arg]:
- return repr(arg)
- return arg
-
-
-class alias(option_base):
- """Define a shortcut that invokes one or more commands"""
-
- description = "define a shortcut to invoke one or more commands"
- command_consumes_arguments = True
-
- user_options = [
- ('remove', 'r', 'remove (unset) the alias'),
- ] + option_base.user_options
-
- boolean_options = option_base.boolean_options + ['remove']
-
- def initialize_options(self):
- option_base.initialize_options(self)
- self.args = None
- self.remove = None
-
- def finalize_options(self):
- option_base.finalize_options(self)
- if self.remove and len(self.args) != 1:
- raise DistutilsOptionError(
- "Must specify exactly one argument (the alias name) when "
- "using --remove"
- )
-
- def run(self):
- aliases = self.distribution.get_option_dict('aliases')
-
- if not self.args:
- print("Command Aliases")
- print("---------------")
- for alias in aliases:
- print("setup.py alias", format_alias(alias, aliases))
- return
-
- elif len(self.args) == 1:
- alias, = self.args
- if self.remove:
- command = None
- elif alias in aliases:
- print("setup.py alias", format_alias(alias, aliases))
- return
- else:
- print("No alias definition found for %r" % alias)
- return
- else:
- alias = self.args[0]
- command = ' '.join(map(shquote, self.args[1:]))
-
- edit_config(self.filename, {'aliases': {alias: command}}, self.dry_run)
-
-
-def format_alias(name, aliases):
- source, command = aliases[name]
- if source == config_file('global'):
- source = '--global-config '
- elif source == config_file('user'):
- source = '--user-config '
- elif source == config_file('local'):
- source = ''
- else:
- source = '--filename=%r' % source
- return source + name + ' ' + command
diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/bdist_egg.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/bdist_egg.py
deleted file mode 100644
index 87dce88..0000000
--- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/bdist_egg.py
+++ /dev/null
@@ -1,470 +0,0 @@
-"""setuptools.command.bdist_egg
-
-Build .egg distributions"""
-
-from distutils.errors import DistutilsSetupError
-from distutils.dir_util import remove_tree, mkpath
-from distutils import log
-from types import CodeType
-import sys
-import os
-import marshal
-import textwrap
-
-from pkg_resources import get_build_platform, Distribution, ensure_directory
-from pkg_resources import EntryPoint
-from setuptools.compat import basestring
-from setuptools.extension import Library
-from setuptools import Command
-
-try:
- # Python 2.7 or >=3.2
- from sysconfig import get_path, get_python_version
-
- def _get_purelib():
- return get_path("purelib")
-except ImportError:
- from distutils.sysconfig import get_python_lib, get_python_version
-
- def _get_purelib():
- return get_python_lib(False)
-
-
-def strip_module(filename):
- if '.' in filename:
- filename = os.path.splitext(filename)[0]
- if filename.endswith('module'):
- filename = filename[:-6]
- return filename
-
-
-def write_stub(resource, pyfile):
- _stub_template = textwrap.dedent("""
- def __bootstrap__():
- global __bootstrap__, __loader__, __file__
- import sys, pkg_resources, imp
- __file__ = pkg_resources.resource_filename(__name__, %r)
- __loader__ = None; del __bootstrap__, __loader__
- imp.load_dynamic(__name__,__file__)
- __bootstrap__()
- """).lstrip()
- with open(pyfile, 'w') as f:
- f.write(_stub_template % resource)
-
-
-class bdist_egg(Command):
- description = "create an \"egg\" distribution"
-
- user_options = [
- ('bdist-dir=', 'b',
- "temporary directory for creating the distribution"),
- ('plat-name=', 'p', "platform name to embed in generated filenames "
- "(default: %s)" % get_build_platform()),
- ('exclude-source-files', None,
- "remove all .py files from the generated egg"),
- ('keep-temp', 'k',
- "keep the pseudo-installation tree around after " +
- "creating the distribution archive"),
- ('dist-dir=', 'd',
- "directory to put final built distributions in"),
- ('skip-build', None,
- "skip rebuilding everything (for testing/debugging)"),
- ]
-
- boolean_options = [
- 'keep-temp', 'skip-build', 'exclude-source-files'
- ]
-
- def initialize_options(self):
- self.bdist_dir = None
- self.plat_name = None
- self.keep_temp = 0
- self.dist_dir = None
- self.skip_build = 0
- self.egg_output = None
- self.exclude_source_files = None
-
- def finalize_options(self):
- ei_cmd = self.ei_cmd = self.get_finalized_command("egg_info")
- self.egg_info = ei_cmd.egg_info
-
- if self.bdist_dir is None:
- bdist_base = self.get_finalized_command('bdist').bdist_base
- self.bdist_dir = os.path.join(bdist_base, 'egg')
-
- if self.plat_name is None:
- self.plat_name = get_build_platform()
-
- self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
-
- if self.egg_output is None:
-
- # Compute filename of the output egg
- basename = Distribution(
- None, None, ei_cmd.egg_name, ei_cmd.egg_version,
- get_python_version(),
- self.distribution.has_ext_modules() and self.plat_name
- ).egg_name()
-
- self.egg_output = os.path.join(self.dist_dir, basename + '.egg')
-
- def do_install_data(self):
- # Hack for packages that install data to install's --install-lib
- self.get_finalized_command('install').install_lib = self.bdist_dir
-
- site_packages = os.path.normcase(os.path.realpath(_get_purelib()))
- old, self.distribution.data_files = self.distribution.data_files, []
-
- for item in old:
- if isinstance(item, tuple) and len(item) == 2:
- if os.path.isabs(item[0]):
- realpath = os.path.realpath(item[0])
- normalized = os.path.normcase(realpath)
- if normalized == site_packages or normalized.startswith(
- site_packages + os.sep
- ):
- item = realpath[len(site_packages) + 1:], item[1]
- # XXX else: raise ???
- self.distribution.data_files.append(item)
-
- try:
- log.info("installing package data to %s" % self.bdist_dir)
- self.call_command('install_data', force=0, root=None)
- finally:
- self.distribution.data_files = old
-
- def get_outputs(self):
- return [self.egg_output]
-
- def call_command(self, cmdname, **kw):
- """Invoke reinitialized command `cmdname` with keyword args"""
- for dirname in INSTALL_DIRECTORY_ATTRS:
- kw.setdefault(dirname, self.bdist_dir)
- kw.setdefault('skip_build', self.skip_build)
- kw.setdefault('dry_run', self.dry_run)
- cmd = self.reinitialize_command(cmdname, **kw)
- self.run_command(cmdname)
- return cmd
-
- def run(self):
- # Generate metadata first
- self.run_command("egg_info")
- # We run install_lib before install_data, because some data hacks
- # pull their data path from the install_lib command.
- log.info("installing library code to %s" % self.bdist_dir)
- instcmd = self.get_finalized_command('install')
- old_root = instcmd.root
- instcmd.root = None
- if self.distribution.has_c_libraries() and not self.skip_build:
- self.run_command('build_clib')
- cmd = self.call_command('install_lib', warn_dir=0)
- instcmd.root = old_root
-
- all_outputs, ext_outputs = self.get_ext_outputs()
- self.stubs = []
- to_compile = []
- for (p, ext_name) in enumerate(ext_outputs):
- filename, ext = os.path.splitext(ext_name)
- pyfile = os.path.join(self.bdist_dir, strip_module(filename) +
- '.py')
- self.stubs.append(pyfile)
- log.info("creating stub loader for %s" % ext_name)
- if not self.dry_run:
- write_stub(os.path.basename(ext_name), pyfile)
- to_compile.append(pyfile)
- ext_outputs[p] = ext_name.replace(os.sep, '/')
-
- if to_compile:
- cmd.byte_compile(to_compile)
- if self.distribution.data_files:
- self.do_install_data()
-
- # Make the EGG-INFO directory
- archive_root = self.bdist_dir
- egg_info = os.path.join(archive_root, 'EGG-INFO')
- self.mkpath(egg_info)
- if self.distribution.scripts:
- script_dir = os.path.join(egg_info, 'scripts')
- log.info("installing scripts to %s" % script_dir)
- self.call_command('install_scripts', install_dir=script_dir,
- no_ep=1)
-
- self.copy_metadata_to(egg_info)
- native_libs = os.path.join(egg_info, "native_libs.txt")
- if all_outputs:
- log.info("writing %s" % native_libs)
- if not self.dry_run:
- ensure_directory(native_libs)
- libs_file = open(native_libs, 'wt')
- libs_file.write('\n'.join(all_outputs))
- libs_file.write('\n')
- libs_file.close()
- elif os.path.isfile(native_libs):
- log.info("removing %s" % native_libs)
- if not self.dry_run:
- os.unlink(native_libs)
-
- write_safety_flag(
- os.path.join(archive_root, 'EGG-INFO'), self.zip_safe()
- )
-
- if os.path.exists(os.path.join(self.egg_info, 'depends.txt')):
- log.warn(
- "WARNING: 'depends.txt' will not be used by setuptools 0.6!\n"
- "Use the install_requires/extras_require setup() args instead."
- )
-
- if self.exclude_source_files:
- self.zap_pyfiles()
-
- # Make the archive
- make_zipfile(self.egg_output, archive_root, verbose=self.verbose,
- dry_run=self.dry_run, mode=self.gen_header())
- if not self.keep_temp:
- remove_tree(self.bdist_dir, dry_run=self.dry_run)
-
- # Add to 'Distribution.dist_files' so that the "upload" command works
- getattr(self.distribution, 'dist_files', []).append(
- ('bdist_egg', get_python_version(), self.egg_output))
-
- def zap_pyfiles(self):
- log.info("Removing .py files from temporary directory")
- for base, dirs, files in walk_egg(self.bdist_dir):
- for name in files:
- if name.endswith('.py'):
- path = os.path.join(base, name)
- log.debug("Deleting %s", path)
- os.unlink(path)
-
- def zip_safe(self):
- safe = getattr(self.distribution, 'zip_safe', None)
- if safe is not None:
- return safe
- log.warn("zip_safe flag not set; analyzing archive contents...")
- return analyze_egg(self.bdist_dir, self.stubs)
-
- def gen_header(self):
- epm = EntryPoint.parse_map(self.distribution.entry_points or '')
- ep = epm.get('setuptools.installation', {}).get('eggsecutable')
- if ep is None:
- return 'w' # not an eggsecutable, do it the usual way.
-
- if not ep.attrs or ep.extras:
- raise DistutilsSetupError(
- "eggsecutable entry point (%r) cannot have 'extras' "
- "or refer to a module" % (ep,)
- )
-
- pyver = sys.version[:3]
- pkg = ep.module_name
- full = '.'.join(ep.attrs)
- base = ep.attrs[0]
- basename = os.path.basename(self.egg_output)
-
- header = (
- "#!/bin/sh\n"
- 'if [ `basename $0` = "%(basename)s" ]\n'
- 'then exec python%(pyver)s -c "'
- "import sys, os; sys.path.insert(0, os.path.abspath('$0')); "
- "from %(pkg)s import %(base)s; sys.exit(%(full)s())"
- '" "$@"\n'
- 'else\n'
- ' echo $0 is not the correct name for this egg file.\n'
- ' echo Please rename it back to %(basename)s and try again.\n'
- ' exec false\n'
- 'fi\n'
- ) % locals()
-
- if not self.dry_run:
- mkpath(os.path.dirname(self.egg_output), dry_run=self.dry_run)
- f = open(self.egg_output, 'w')
- f.write(header)
- f.close()
- return 'a'
-
- def copy_metadata_to(self, target_dir):
- "Copy metadata (egg info) to the target_dir"
- # normalize the path (so that a forward-slash in egg_info will
- # match using startswith below)
- norm_egg_info = os.path.normpath(self.egg_info)
- prefix = os.path.join(norm_egg_info, '')
- for path in self.ei_cmd.filelist.files:
- if path.startswith(prefix):
- target = os.path.join(target_dir, path[len(prefix):])
- ensure_directory(target)
- self.copy_file(path, target)
-
- def get_ext_outputs(self):
- """Get a list of relative paths to C extensions in the output distro"""
-
- all_outputs = []
- ext_outputs = []
-
- paths = {self.bdist_dir: ''}
- for base, dirs, files in os.walk(self.bdist_dir):
- for filename in files:
- if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS:
- all_outputs.append(paths[base] + filename)
- for filename in dirs:
- paths[os.path.join(base, filename)] = (paths[base] +
- filename + '/')
-
- if self.distribution.has_ext_modules():
- build_cmd = self.get_finalized_command('build_ext')
- for ext in build_cmd.extensions:
- if isinstance(ext, Library):
- continue
- fullname = build_cmd.get_ext_fullname(ext.name)
- filename = build_cmd.get_ext_filename(fullname)
- if not os.path.basename(filename).startswith('dl-'):
- if os.path.exists(os.path.join(self.bdist_dir, filename)):
- ext_outputs.append(filename)
-
- return all_outputs, ext_outputs
-
-
-NATIVE_EXTENSIONS = dict.fromkeys('.dll .so .dylib .pyd'.split())
-
-
-def walk_egg(egg_dir):
- """Walk an unpacked egg's contents, skipping the metadata directory"""
- walker = os.walk(egg_dir)
- base, dirs, files = next(walker)
- if 'EGG-INFO' in dirs:
- dirs.remove('EGG-INFO')
- yield base, dirs, files
- for bdf in walker:
- yield bdf
-
-
-def analyze_egg(egg_dir, stubs):
- # check for existing flag in EGG-INFO
- for flag, fn in safety_flags.items():
- if os.path.exists(os.path.join(egg_dir, 'EGG-INFO', fn)):
- return flag
- if not can_scan():
- return False
- safe = True
- for base, dirs, files in walk_egg(egg_dir):
- for name in files:
- if name.endswith('.py') or name.endswith('.pyw'):
- continue
- elif name.endswith('.pyc') or name.endswith('.pyo'):
- # always scan, even if we already know we're not safe
- safe = scan_module(egg_dir, base, name, stubs) and safe
- return safe
-
-
-def write_safety_flag(egg_dir, safe):
- # Write or remove zip safety flag file(s)
- for flag, fn in safety_flags.items():
- fn = os.path.join(egg_dir, fn)
- if os.path.exists(fn):
- if safe is None or bool(safe) != flag:
- os.unlink(fn)
- elif safe is not None and bool(safe) == flag:
- f = open(fn, 'wt')
- f.write('\n')
- f.close()
-
-
-safety_flags = {
- True: 'zip-safe',
- False: 'not-zip-safe',
-}
-
-
-def scan_module(egg_dir, base, name, stubs):
- """Check whether module possibly uses unsafe-for-zipfile stuff"""
-
- filename = os.path.join(base, name)
- if filename[:-1] in stubs:
- return True # Extension module
- pkg = base[len(egg_dir) + 1:].replace(os.sep, '.')
- module = pkg + (pkg and '.' or '') + os.path.splitext(name)[0]
- if sys.version_info < (3, 3):
- skip = 8 # skip magic & date
- else:
- skip = 12 # skip magic & date & file size
- f = open(filename, 'rb')
- f.read(skip)
- code = marshal.load(f)
- f.close()
- safe = True
- symbols = dict.fromkeys(iter_symbols(code))
- for bad in ['__file__', '__path__']:
- if bad in symbols:
- log.warn("%s: module references %s", module, bad)
- safe = False
- if 'inspect' in symbols:
- for bad in [
- 'getsource', 'getabsfile', 'getsourcefile', 'getfile'
- 'getsourcelines', 'findsource', 'getcomments', 'getframeinfo',
- 'getinnerframes', 'getouterframes', 'stack', 'trace'
- ]:
- if bad in symbols:
- log.warn("%s: module MAY be using inspect.%s", module, bad)
- safe = False
- return safe
-
-
-def iter_symbols(code):
- """Yield names and strings used by `code` and its nested code objects"""
- for name in code.co_names:
- yield name
- for const in code.co_consts:
- if isinstance(const, basestring):
- yield const
- elif isinstance(const, CodeType):
- for name in iter_symbols(const):
- yield name
-
-
-def can_scan():
- if not sys.platform.startswith('java') and sys.platform != 'cli':
- # CPython, PyPy, etc.
- return True
- log.warn("Unable to analyze compiled code on this platform.")
- log.warn("Please ask the author to include a 'zip_safe'"
- " setting (either True or False) in the package's setup.py")
-
-# Attribute names of options for commands that might need to be convinced to
-# install to the egg build directory
-
-INSTALL_DIRECTORY_ATTRS = [
- 'install_lib', 'install_dir', 'install_data', 'install_base'
-]
-
-
-def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=True,
- mode='w'):
- """Create a zip file from all the files under 'base_dir'. The output
- zip file will be named 'base_dir' + ".zip". Uses either the "zipfile"
- Python module (if available) or the InfoZIP "zip" utility (if installed
- and found on the default search path). If neither tool is available,
- raises DistutilsExecError. Returns the name of the output zip file.
- """
- import zipfile
-
- mkpath(os.path.dirname(zip_filename), dry_run=dry_run)
- log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir)
-
- def visit(z, dirname, names):
- for name in names:
- path = os.path.normpath(os.path.join(dirname, name))
- if os.path.isfile(path):
- p = path[len(base_dir) + 1:]
- if not dry_run:
- z.write(path, p)
- log.debug("adding '%s'" % p)
-
- compression = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED
- if not dry_run:
- z = zipfile.ZipFile(zip_filename, mode, compression=compression)
- for dirname, dirs, files in os.walk(base_dir):
- visit(z, dirname, files)
- z.close()
- else:
- for dirname, dirs, files in os.walk(base_dir):
- visit(None, dirname, files)
- return zip_filename
diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/bdist_rpm.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/bdist_rpm.py
deleted file mode 100644
index 7073092..0000000
--- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/bdist_rpm.py
+++ /dev/null
@@ -1,43 +0,0 @@
-import distutils.command.bdist_rpm as orig
-
-
-class bdist_rpm(orig.bdist_rpm):
- """
- Override the default bdist_rpm behavior to do the following:
-
- 1. Run egg_info to ensure the name and version are properly calculated.
- 2. Always run 'install' using --single-version-externally-managed to
- disable eggs in RPM distributions.
- 3. Replace dash with underscore in the version numbers for better RPM
- compatibility.
- """
-
- def run(self):
- # ensure distro name is up-to-date
- self.run_command('egg_info')
-
- orig.bdist_rpm.run(self)
-
- def _make_spec_file(self):
- version = self.distribution.get_version()
- rpmversion = version.replace('-', '_')
- spec = orig.bdist_rpm._make_spec_file(self)
- line23 = '%define version ' + version
- line24 = '%define version ' + rpmversion
- spec = [
- line.replace(
- "Source0: %{name}-%{version}.tar",
- "Source0: %{name}-%{unmangled_version}.tar"
- ).replace(
- "setup.py install ",
- "setup.py install --single-version-externally-managed "
- ).replace(
- "%setup",
- "%setup -n %{name}-%{unmangled_version}"
- ).replace(line23, line24)
- for line in spec
- ]
- insert_loc = spec.index(line24) + 1
- unmangled_version = "%define unmangled_version " + version
- spec.insert(insert_loc, unmangled_version)
- return spec
diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/bdist_wininst.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/bdist_wininst.py
deleted file mode 100644
index 073de97..0000000
--- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/bdist_wininst.py
+++ /dev/null
@@ -1,21 +0,0 @@
-import distutils.command.bdist_wininst as orig
-
-
-class bdist_wininst(orig.bdist_wininst):
- def reinitialize_command(self, command, reinit_subcommands=0):
- """
- Supplement reinitialize_command to work around
- http://bugs.python.org/issue20819
- """
- cmd = self.distribution.reinitialize_command(
- command, reinit_subcommands)
- if command in ('install', 'install_lib'):
- cmd.install_lib = None
- return cmd
-
- def run(self):
- self._is_running = True
- try:
- orig.bdist_wininst.run(self)
- finally:
- self._is_running = False
diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/build_ext.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/build_ext.py
deleted file mode 100644
index e4b2c59..0000000
--- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/build_ext.py
+++ /dev/null
@@ -1,305 +0,0 @@
-from distutils.command.build_ext import build_ext as _du_build_ext
-from distutils.file_util import copy_file
-from distutils.ccompiler import new_compiler
-from distutils.sysconfig import customize_compiler
-from distutils.errors import DistutilsError
-from distutils import log
-import os
-import sys
-import itertools
-
-from setuptools.extension import Library
-
-try:
- # Attempt to use Pyrex for building extensions, if available
- from Pyrex.Distutils.build_ext import build_ext as _build_ext
-except ImportError:
- _build_ext = _du_build_ext
-
-try:
- # Python 2.7 or >=3.2
- from sysconfig import _CONFIG_VARS
-except ImportError:
- from distutils.sysconfig import get_config_var
-
- get_config_var("LDSHARED") # make sure _config_vars is initialized
- del get_config_var
- from distutils.sysconfig import _config_vars as _CONFIG_VARS
-
-have_rtld = False
-use_stubs = False
-libtype = 'shared'
-
-if sys.platform == "darwin":
- use_stubs = True
-elif os.name != 'nt':
- try:
- import dl
- use_stubs = have_rtld = hasattr(dl, 'RTLD_NOW')
- except ImportError:
- pass
-
-
-if_dl = lambda s: s if have_rtld else ''
-
-
-class build_ext(_build_ext):
- def run(self):
- """Build extensions in build directory, then copy if --inplace"""
- old_inplace, self.inplace = self.inplace, 0
- _build_ext.run(self)
- self.inplace = old_inplace
- if old_inplace:
- self.copy_extensions_to_source()
-
- def copy_extensions_to_source(self):
- build_py = self.get_finalized_command('build_py')
- for ext in self.extensions:
- fullname = self.get_ext_fullname(ext.name)
- filename = self.get_ext_filename(fullname)
- modpath = fullname.split('.')
- package = '.'.join(modpath[:-1])
- package_dir = build_py.get_package_dir(package)
- dest_filename = os.path.join(package_dir,
- os.path.basename(filename))
- src_filename = os.path.join(self.build_lib, filename)
-
- # Always copy, even if source is older than destination, to ensure
- # that the right extensions for the current Python/platform are
- # used.
- copy_file(
- src_filename, dest_filename, verbose=self.verbose,
- dry_run=self.dry_run
- )
- if ext._needs_stub:
- self.write_stub(package_dir or os.curdir, ext, True)
-
- if _build_ext is not _du_build_ext and not hasattr(_build_ext,
- 'pyrex_sources'):
- # Workaround for problems using some Pyrex versions w/SWIG and/or 2.4
- def swig_sources(self, sources, *otherargs):
- # first do any Pyrex processing
- sources = _build_ext.swig_sources(self, sources) or sources
- # Then do any actual SWIG stuff on the remainder
- return _du_build_ext.swig_sources(self, sources, *otherargs)
-
- def get_ext_filename(self, fullname):
- filename = _build_ext.get_ext_filename(self, fullname)
- if fullname in self.ext_map:
- ext = self.ext_map[fullname]
- if isinstance(ext, Library):
- fn, ext = os.path.splitext(filename)
- return self.shlib_compiler.library_filename(fn, libtype)
- elif use_stubs and ext._links_to_dynamic:
- d, fn = os.path.split(filename)
- return os.path.join(d, 'dl-' + fn)
- return filename
-
- def initialize_options(self):
- _build_ext.initialize_options(self)
- self.shlib_compiler = None
- self.shlibs = []
- self.ext_map = {}
-
- def finalize_options(self):
- _build_ext.finalize_options(self)
- self.extensions = self.extensions or []
- self.check_extensions_list(self.extensions)
- self.shlibs = [ext for ext in self.extensions
- if isinstance(ext, Library)]
- if self.shlibs:
- self.setup_shlib_compiler()
- for ext in self.extensions:
- ext._full_name = self.get_ext_fullname(ext.name)
- for ext in self.extensions:
- fullname = ext._full_name
- self.ext_map[fullname] = ext
-
- # distutils 3.1 will also ask for module names
- # XXX what to do with conflicts?
- self.ext_map[fullname.split('.')[-1]] = ext
-
- ltd = self.shlibs and self.links_to_dynamic(ext) or False
- ns = ltd and use_stubs and not isinstance(ext, Library)
- ext._links_to_dynamic = ltd
- ext._needs_stub = ns
- filename = ext._file_name = self.get_ext_filename(fullname)
- libdir = os.path.dirname(os.path.join(self.build_lib, filename))
- if ltd and libdir not in ext.library_dirs:
- ext.library_dirs.append(libdir)
- if ltd and use_stubs and os.curdir not in ext.runtime_library_dirs:
- ext.runtime_library_dirs.append(os.curdir)
-
- def setup_shlib_compiler(self):
- compiler = self.shlib_compiler = new_compiler(
- compiler=self.compiler, dry_run=self.dry_run, force=self.force
- )
- if sys.platform == "darwin":
- tmp = _CONFIG_VARS.copy()
- try:
- # XXX Help! I don't have any idea whether these are right...
- _CONFIG_VARS['LDSHARED'] = (
- "gcc -Wl,-x -dynamiclib -undefined dynamic_lookup")
- _CONFIG_VARS['CCSHARED'] = " -dynamiclib"
- _CONFIG_VARS['SO'] = ".dylib"
- customize_compiler(compiler)
- finally:
- _CONFIG_VARS.clear()
- _CONFIG_VARS.update(tmp)
- else:
- customize_compiler(compiler)
-
- if self.include_dirs is not None:
- compiler.set_include_dirs(self.include_dirs)
- if self.define is not None:
- # 'define' option is a list of (name,value) tuples
- for (name, value) in self.define:
- compiler.define_macro(name, value)
- if self.undef is not None:
- for macro in self.undef:
- compiler.undefine_macro(macro)
- if self.libraries is not None:
- compiler.set_libraries(self.libraries)
- if self.library_dirs is not None:
- compiler.set_library_dirs(self.library_dirs)
- if self.rpath is not None:
- compiler.set_runtime_library_dirs(self.rpath)
- if self.link_objects is not None:
- compiler.set_link_objects(self.link_objects)
-
- # hack so distutils' build_extension() builds a library instead
- compiler.link_shared_object = link_shared_object.__get__(compiler)
-
- def get_export_symbols(self, ext):
- if isinstance(ext, Library):
- return ext.export_symbols
- return _build_ext.get_export_symbols(self, ext)
-
- def build_extension(self, ext):
- _compiler = self.compiler
- try:
- if isinstance(ext, Library):
- self.compiler = self.shlib_compiler
- _build_ext.build_extension(self, ext)
- if ext._needs_stub:
- cmd = self.get_finalized_command('build_py').build_lib
- self.write_stub(cmd, ext)
- finally:
- self.compiler = _compiler
-
- def links_to_dynamic(self, ext):
- """Return true if 'ext' links to a dynamic lib in the same package"""
- # XXX this should check to ensure the lib is actually being built
- # XXX as dynamic, and not just using a locally-found version or a
- # XXX static-compiled version
- libnames = dict.fromkeys([lib._full_name for lib in self.shlibs])
- pkg = '.'.join(ext._full_name.split('.')[:-1] + [''])
- return any(pkg + libname in libnames for libname in ext.libraries)
-
- def get_outputs(self):
- return _build_ext.get_outputs(self) + self.__get_stubs_outputs()
-
- def __get_stubs_outputs(self):
- # assemble the base name for each extension that needs a stub
- ns_ext_bases = (
- os.path.join(self.build_lib, *ext._full_name.split('.'))
- for ext in self.extensions
- if ext._needs_stub
- )
- # pair each base with the extension
- pairs = itertools.product(ns_ext_bases, self.__get_output_extensions())
- return list(base + fnext for base, fnext in pairs)
-
- def __get_output_extensions(self):
- yield '.py'
- yield '.pyc'
- if self.get_finalized_command('build_py').optimize:
- yield '.pyo'
-
- def write_stub(self, output_dir, ext, compile=False):
- log.info("writing stub loader for %s to %s", ext._full_name,
- output_dir)
- stub_file = (os.path.join(output_dir, *ext._full_name.split('.')) +
- '.py')
- if compile and os.path.exists(stub_file):
- raise DistutilsError(stub_file + " already exists! Please delete.")
- if not self.dry_run:
- f = open(stub_file, 'w')
- f.write(
- '\n'.join([
- "def __bootstrap__():",
- " global __bootstrap__, __file__, __loader__",
- " import sys, os, pkg_resources, imp" + if_dl(", dl"),
- " __file__ = pkg_resources.resource_filename"
- "(__name__,%r)"
- % os.path.basename(ext._file_name),
- " del __bootstrap__",
- " if '__loader__' in globals():",
- " del __loader__",
- if_dl(" old_flags = sys.getdlopenflags()"),
- " old_dir = os.getcwd()",
- " try:",
- " os.chdir(os.path.dirname(__file__))",
- if_dl(" sys.setdlopenflags(dl.RTLD_NOW)"),
- " imp.load_dynamic(__name__,__file__)",
- " finally:",
- if_dl(" sys.setdlopenflags(old_flags)"),
- " os.chdir(old_dir)",
- "__bootstrap__()",
- "" # terminal \n
- ])
- )
- f.close()
- if compile:
- from distutils.util import byte_compile
-
- byte_compile([stub_file], optimize=0,
- force=True, dry_run=self.dry_run)
- optimize = self.get_finalized_command('install_lib').optimize
- if optimize > 0:
- byte_compile([stub_file], optimize=optimize,
- force=True, dry_run=self.dry_run)
- if os.path.exists(stub_file) and not self.dry_run:
- os.unlink(stub_file)
-
-
-if use_stubs or os.name == 'nt':
- # Build shared libraries
- #
- def link_shared_object(
- self, objects, output_libname, output_dir=None, libraries=None,
- library_dirs=None, runtime_library_dirs=None, export_symbols=None,
- debug=0, extra_preargs=None, extra_postargs=None, build_temp=None,
- target_lang=None):
- self.link(
- self.SHARED_LIBRARY, objects, output_libname,
- output_dir, libraries, library_dirs, runtime_library_dirs,
- export_symbols, debug, extra_preargs, extra_postargs,
- build_temp, target_lang
- )
-else:
- # Build static libraries everywhere else
- libtype = 'static'
-
- def link_shared_object(
- self, objects, output_libname, output_dir=None, libraries=None,
- library_dirs=None, runtime_library_dirs=None, export_symbols=None,
- debug=0, extra_preargs=None, extra_postargs=None, build_temp=None,
- target_lang=None):
- # XXX we need to either disallow these attrs on Library instances,
- # or warn/abort here if set, or something...
- # libraries=None, library_dirs=None, runtime_library_dirs=None,
- # export_symbols=None, extra_preargs=None, extra_postargs=None,
- # build_temp=None
-
- assert output_dir is None # distutils build_ext doesn't pass this
- output_dir, filename = os.path.split(output_libname)
- basename, ext = os.path.splitext(filename)
- if self.library_filename("x").startswith('lib'):
- # strip 'lib' prefix; this is kludgy if some platform uses
- # a different prefix
- basename = basename[3:]
-
- self.create_static_lib(
- objects, basename, output_dir, debug, target_lang
- )
diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/build_py.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/build_py.py
deleted file mode 100644
index a873d54..0000000
--- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/build_py.py
+++ /dev/null
@@ -1,215 +0,0 @@
-from glob import glob
-from distutils.util import convert_path
-import distutils.command.build_py as orig
-import os
-import sys
-import fnmatch
-import textwrap
-
-try:
- from setuptools.lib2to3_ex import Mixin2to3
-except ImportError:
- class Mixin2to3:
- def run_2to3(self, files, doctests=True):
- "do nothing"
-
-
-class build_py(orig.build_py, Mixin2to3):
- """Enhanced 'build_py' command that includes data files with packages
-
- The data files are specified via a 'package_data' argument to 'setup()'.
- See 'setuptools.dist.Distribution' for more details.
-
- Also, this version of the 'build_py' command allows you to specify both
- 'py_modules' and 'packages' in the same setup operation.
- """
-
- def finalize_options(self):
- orig.build_py.finalize_options(self)
- self.package_data = self.distribution.package_data
- self.exclude_package_data = (self.distribution.exclude_package_data or
- {})
- if 'data_files' in self.__dict__:
- del self.__dict__['data_files']
- self.__updated_files = []
- self.__doctests_2to3 = []
-
- def run(self):
- """Build modules, packages, and copy data files to build directory"""
- if not self.py_modules and not self.packages:
- return
-
- if self.py_modules:
- self.build_modules()
-
- if self.packages:
- self.build_packages()
- self.build_package_data()
-
- self.run_2to3(self.__updated_files, False)
- self.run_2to3(self.__updated_files, True)
- self.run_2to3(self.__doctests_2to3, True)
-
- # Only compile actual .py files, using our base class' idea of what our
- # output files are.
- self.byte_compile(orig.build_py.get_outputs(self, include_bytecode=0))
-
- def __getattr__(self, attr):
- if attr == 'data_files': # lazily compute data files
- self.data_files = files = self._get_data_files()
- return files
- return orig.build_py.__getattr__(self, attr)
-
- def build_module(self, module, module_file, package):
- outfile, copied = orig.build_py.build_module(self, module, module_file,
- package)
- if copied:
- self.__updated_files.append(outfile)
- return outfile, copied
-
- def _get_data_files(self):
- """Generate list of '(package,src_dir,build_dir,filenames)' tuples"""
- self.analyze_manifest()
- data = []
- for package in self.packages or ():
- # Locate package source directory
- src_dir = self.get_package_dir(package)
-
- # Compute package build directory
- build_dir = os.path.join(*([self.build_lib] + package.split('.')))
-
- # Length of path to strip from found files
- plen = len(src_dir) + 1
-
- # Strip directory from globbed filenames
- filenames = [
- file[plen:] for file in self.find_data_files(package, src_dir)
- ]
- data.append((package, src_dir, build_dir, filenames))
- return data
-
- def find_data_files(self, package, src_dir):
- """Return filenames for package's data files in 'src_dir'"""
- globs = (self.package_data.get('', [])
- + self.package_data.get(package, []))
- files = self.manifest_files.get(package, [])[:]
- for pattern in globs:
- # Each pattern has to be converted to a platform-specific path
- files.extend(glob(os.path.join(src_dir, convert_path(pattern))))
- return self.exclude_data_files(package, src_dir, files)
-
- def build_package_data(self):
- """Copy data files into build directory"""
- for package, src_dir, build_dir, filenames in self.data_files:
- for filename in filenames:
- target = os.path.join(build_dir, filename)
- self.mkpath(os.path.dirname(target))
- srcfile = os.path.join(src_dir, filename)
- outf, copied = self.copy_file(srcfile, target)
- srcfile = os.path.abspath(srcfile)
- if (copied and
- srcfile in self.distribution.convert_2to3_doctests):
- self.__doctests_2to3.append(outf)
-
- def analyze_manifest(self):
- self.manifest_files = mf = {}
- if not self.distribution.include_package_data:
- return
- src_dirs = {}
- for package in self.packages or ():
- # Locate package source directory
- src_dirs[assert_relative(self.get_package_dir(package))] = package
-
- self.run_command('egg_info')
- ei_cmd = self.get_finalized_command('egg_info')
- for path in ei_cmd.filelist.files:
- d, f = os.path.split(assert_relative(path))
- prev = None
- oldf = f
- while d and d != prev and d not in src_dirs:
- prev = d
- d, df = os.path.split(d)
- f = os.path.join(df, f)
- if d in src_dirs:
- if path.endswith('.py') and f == oldf:
- continue # it's a module, not data
- mf.setdefault(src_dirs[d], []).append(path)
-
- def get_data_files(self):
- pass # Lazily compute data files in _get_data_files() function.
-
- def check_package(self, package, package_dir):
- """Check namespace packages' __init__ for declare_namespace"""
- try:
- return self.packages_checked[package]
- except KeyError:
- pass
-
- init_py = orig.build_py.check_package(self, package, package_dir)
- self.packages_checked[package] = init_py
-
- if not init_py or not self.distribution.namespace_packages:
- return init_py
-
- for pkg in self.distribution.namespace_packages:
- if pkg == package or pkg.startswith(package + '.'):
- break
- else:
- return init_py
-
- f = open(init_py, 'rbU')
- if 'declare_namespace'.encode() not in f.read():
- from distutils.errors import DistutilsError
-
- raise DistutilsError(
- "Namespace package problem: %s is a namespace package, but "
- "its\n__init__.py does not call declare_namespace()! Please "
- 'fix it.\n(See the setuptools manual under '
- '"Namespace Packages" for details.)\n"' % (package,)
- )
- f.close()
- return init_py
-
- def initialize_options(self):
- self.packages_checked = {}
- orig.build_py.initialize_options(self)
-
- def get_package_dir(self, package):
- res = orig.build_py.get_package_dir(self, package)
- if self.distribution.src_root is not None:
- return os.path.join(self.distribution.src_root, res)
- return res
-
- def exclude_data_files(self, package, src_dir, files):
- """Filter filenames for package's data files in 'src_dir'"""
- globs = (self.exclude_package_data.get('', [])
- + self.exclude_package_data.get(package, []))
- bad = []
- for pattern in globs:
- bad.extend(
- fnmatch.filter(
- files, os.path.join(src_dir, convert_path(pattern))
- )
- )
- bad = dict.fromkeys(bad)
- seen = {}
- return [
- f for f in files if f not in bad
- and f not in seen and seen.setdefault(f, 1) # ditch dupes
- ]
-
-
-def assert_relative(path):
- if not os.path.isabs(path):
- return path
- from distutils.errors import DistutilsSetupError
-
- msg = textwrap.dedent("""
- Error: setup script specifies an absolute path:
-
- %s
-
- setup() arguments must *always* be /-separated paths relative to the
- setup.py directory, *never* absolute paths.
- """).lstrip() % path
- raise DistutilsSetupError(msg)
diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/develop.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/develop.py
deleted file mode 100644
index 368b64f..0000000
--- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/develop.py
+++ /dev/null
@@ -1,169 +0,0 @@
-from distutils.util import convert_path
-from distutils import log
-from distutils.errors import DistutilsError, DistutilsOptionError
-import os
-import glob
-
-from pkg_resources import Distribution, PathMetadata, normalize_path
-from setuptools.command.easy_install import easy_install
-from setuptools.compat import PY3
-import setuptools
-
-
-class develop(easy_install):
- """Set up package for development"""
-
- description = "install package in 'development mode'"
-
- user_options = easy_install.user_options + [
- ("uninstall", "u", "Uninstall this source package"),
- ("egg-path=", None, "Set the path to be used in the .egg-link file"),
- ]
-
- boolean_options = easy_install.boolean_options + ['uninstall']
-
- command_consumes_arguments = False # override base
-
- def run(self):
- if self.uninstall:
- self.multi_version = True
- self.uninstall_link()
- else:
- self.install_for_development()
- self.warn_deprecated_options()
-
- def initialize_options(self):
- self.uninstall = None
- self.egg_path = None
- easy_install.initialize_options(self)
- self.setup_path = None
- self.always_copy_from = '.' # always copy eggs installed in curdir
-
- def finalize_options(self):
- ei = self.get_finalized_command("egg_info")
- if ei.broken_egg_info:
- template = "Please rename %r to %r before using 'develop'"
- args = ei.egg_info, ei.broken_egg_info
- raise DistutilsError(template % args)
- self.args = [ei.egg_name]
-
- easy_install.finalize_options(self)
- self.expand_basedirs()
- self.expand_dirs()
- # pick up setup-dir .egg files only: no .egg-info
- self.package_index.scan(glob.glob('*.egg'))
-
- self.egg_link = os.path.join(self.install_dir, ei.egg_name +
- '.egg-link')
- self.egg_base = ei.egg_base
- if self.egg_path is None:
- self.egg_path = os.path.abspath(ei.egg_base)
-
- target = normalize_path(self.egg_base)
- egg_path = normalize_path(os.path.join(self.install_dir,
- self.egg_path))
- if egg_path != target:
- raise DistutilsOptionError(
- "--egg-path must be a relative path from the install"
- " directory to " + target
- )
-
- # Make a distribution for the package's source
- self.dist = Distribution(
- target,
- PathMetadata(target, os.path.abspath(ei.egg_info)),
- project_name=ei.egg_name
- )
-
- p = self.egg_base.replace(os.sep, '/')
- if p != os.curdir:
- p = '../' * (p.count('/') + 1)
- self.setup_path = p
- p = normalize_path(os.path.join(self.install_dir, self.egg_path, p))
- if p != normalize_path(os.curdir):
- raise DistutilsOptionError(
- "Can't get a consistent path to setup script from"
- " installation directory", p, normalize_path(os.curdir))
-
- def install_for_development(self):
- if PY3 and getattr(self.distribution, 'use_2to3', False):
- # If we run 2to3 we can not do this inplace:
-
- # Ensure metadata is up-to-date
- self.reinitialize_command('build_py', inplace=0)
- self.run_command('build_py')
- bpy_cmd = self.get_finalized_command("build_py")
- build_path = normalize_path(bpy_cmd.build_lib)
-
- # Build extensions
- self.reinitialize_command('egg_info', egg_base=build_path)
- self.run_command('egg_info')
-
- self.reinitialize_command('build_ext', inplace=0)
- self.run_command('build_ext')
-
- # Fixup egg-link and easy-install.pth
- ei_cmd = self.get_finalized_command("egg_info")
- self.egg_path = build_path
- self.dist.location = build_path
- # XXX
- self.dist._provider = PathMetadata(build_path, ei_cmd.egg_info)
- else:
- # Without 2to3 inplace works fine:
- self.run_command('egg_info')
-
- # Build extensions in-place
- self.reinitialize_command('build_ext', inplace=1)
- self.run_command('build_ext')
-
- self.install_site_py() # ensure that target dir is site-safe
- if setuptools.bootstrap_install_from:
- self.easy_install(setuptools.bootstrap_install_from)
- setuptools.bootstrap_install_from = None
-
- # create an .egg-link in the installation dir, pointing to our egg
- log.info("Creating %s (link to %s)", self.egg_link, self.egg_base)
- if not self.dry_run:
- f = open(self.egg_link, "w")
- f.write(self.egg_path + "\n" + self.setup_path)
- f.close()
- # postprocess the installed distro, fixing up .pth, installing scripts,
- # and handling requirements
- self.process_distribution(None, self.dist, not self.no_deps)
-
- def uninstall_link(self):
- if os.path.exists(self.egg_link):
- log.info("Removing %s (link to %s)", self.egg_link, self.egg_base)
- egg_link_file = open(self.egg_link)
- contents = [line.rstrip() for line in egg_link_file]
- egg_link_file.close()
- if contents not in ([self.egg_path],
- [self.egg_path, self.setup_path]):
- log.warn("Link points to %s: uninstall aborted", contents)
- return
- if not self.dry_run:
- os.unlink(self.egg_link)
- if not self.dry_run:
- self.update_pth(self.dist) # remove any .pth link to us
- if self.distribution.scripts:
- # XXX should also check for entry point scripts!
- log.warn("Note: you must uninstall or replace scripts manually!")
-
- def install_egg_scripts(self, dist):
- if dist is not self.dist:
- # Installing a dependency, so fall back to normal behavior
- return easy_install.install_egg_scripts(self, dist)
-
- # create wrapper scripts in the script dir, pointing to dist.scripts
-
- # new-style...
- self.install_wrapper_scripts(dist)
-
- # ...and old-style
- for script_name in self.distribution.scripts or []:
- script_path = os.path.abspath(convert_path(script_name))
- script_name = os.path.basename(script_path)
- f = open(script_path, 'rU')
- script_text = f.read()
- f.close()
- self.install_script(dist, script_name, script_text, script_path)
diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/easy_install.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/easy_install.py
deleted file mode 100644
index daf9fe5..0000000
--- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/easy_install.py
+++ /dev/null
@@ -1,2262 +0,0 @@
-#!/usr/bin/env python
-
-"""
-Easy Install
-------------
-
-A tool for doing automatic download/extract/build of distutils-based Python
-packages. For detailed documentation, see the accompanying EasyInstall.txt
-file, or visit the `EasyInstall home page`__.
-
-__ https://pythonhosted.org/setuptools/easy_install.html
-
-"""
-
-from glob import glob
-from distutils.util import get_platform
-from distutils.util import convert_path, subst_vars
-from distutils.errors import DistutilsArgError, DistutilsOptionError, \
- DistutilsError, DistutilsPlatformError
-from distutils.command.install import INSTALL_SCHEMES, SCHEME_KEYS
-from distutils import log, dir_util
-from distutils.command.build_scripts import first_line_re
-import sys
-import os
-import zipimport
-import shutil
-import tempfile
-import zipfile
-import re
-import stat
-import random
-import platform
-import textwrap
-import warnings
-import site
-import struct
-import contextlib
-import subprocess
-import shlex
-import io
-
-from setuptools import Command
-from setuptools.sandbox import run_setup
-from setuptools.py31compat import get_path, get_config_vars
-from setuptools.command import setopt
-from setuptools.archive_util import unpack_archive
-from setuptools.package_index import PackageIndex
-from setuptools.package_index import URL_SCHEME
-from setuptools.command import bdist_egg, egg_info
-from setuptools.compat import (iteritems, maxsize, basestring, unicode,
- reraise, PY2, PY3, IS_WINDOWS)
-from pkg_resources import (
- yield_lines, normalize_path, resource_string, ensure_directory,
- get_distribution, find_distributions, Environment, Requirement,
- Distribution, PathMetadata, EggMetadata, WorkingSet, DistributionNotFound,
- VersionConflict, DEVELOP_DIST,
-)
-import pkg_resources
-
-# Turn on PEP440Warnings
-warnings.filterwarnings("default", category=pkg_resources.PEP440Warning)
-
-
-__all__ = [
- 'samefile', 'easy_install', 'PthDistributions', 'extract_wininst_cfg',
- 'main', 'get_exe_prefixes',
-]
-
-
-def is_64bit():
- return struct.calcsize("P") == 8
-
-
-def samefile(p1, p2):
- both_exist = os.path.exists(p1) and os.path.exists(p2)
- use_samefile = hasattr(os.path, 'samefile') and both_exist
- if use_samefile:
- return os.path.samefile(p1, p2)
- norm_p1 = os.path.normpath(os.path.normcase(p1))
- norm_p2 = os.path.normpath(os.path.normcase(p2))
- return norm_p1 == norm_p2
-
-
-if PY2:
- def _to_ascii(s):
- return s
-
- def isascii(s):
- try:
- unicode(s, 'ascii')
- return True
- except UnicodeError:
- return False
-else:
- def _to_ascii(s):
- return s.encode('ascii')
-
- def isascii(s):
- try:
- s.encode('ascii')
- return True
- except UnicodeError:
- return False
-
-
-class easy_install(Command):
- """Manage a download/build/install process"""
- description = "Find/get/install Python packages"
- command_consumes_arguments = True
-
- user_options = [
- ('prefix=', None, "installation prefix"),
- ("zip-ok", "z", "install package as a zipfile"),
- ("multi-version", "m", "make apps have to require() a version"),
- ("upgrade", "U", "force upgrade (searches PyPI for latest versions)"),
- ("install-dir=", "d", "install package to DIR"),
- ("script-dir=", "s", "install scripts to DIR"),
- ("exclude-scripts", "x", "Don't install scripts"),
- ("always-copy", "a", "Copy all needed packages to install dir"),
- ("index-url=", "i", "base URL of Python Package Index"),
- ("find-links=", "f", "additional URL(s) to search for packages"),
- ("build-directory=", "b",
- "download/extract/build in DIR; keep the results"),
- ('optimize=', 'O',
- "also compile with optimization: -O1 for \"python -O\", "
- "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
- ('record=', None,
- "filename in which to record list of installed files"),
- ('always-unzip', 'Z', "don't install as a zipfile, no matter what"),
- ('site-dirs=', 'S', "list of directories where .pth files work"),
- ('editable', 'e', "Install specified packages in editable form"),
- ('no-deps', 'N', "don't install dependencies"),
- ('allow-hosts=', 'H', "pattern(s) that hostnames must match"),
- ('local-snapshots-ok', 'l',
- "allow building eggs from local checkouts"),
- ('version', None, "print version information and exit"),
- ('no-find-links', None,
- "Don't load find-links defined in packages being installed")
- ]
- boolean_options = [
- 'zip-ok', 'multi-version', 'exclude-scripts', 'upgrade', 'always-copy',
- 'editable',
- 'no-deps', 'local-snapshots-ok', 'version'
- ]
-
- if site.ENABLE_USER_SITE:
- help_msg = "install in user site-package '%s'" % site.USER_SITE
- user_options.append(('user', None, help_msg))
- boolean_options.append('user')
-
- negative_opt = {'always-unzip': 'zip-ok'}
- create_index = PackageIndex
-
- def initialize_options(self):
- # the --user option seemst to be an opt-in one,
- # so the default should be False.
- self.user = 0
- self.zip_ok = self.local_snapshots_ok = None
- self.install_dir = self.script_dir = self.exclude_scripts = None
- self.index_url = None
- self.find_links = None
- self.build_directory = None
- self.args = None
- self.optimize = self.record = None
- self.upgrade = self.always_copy = self.multi_version = None
- self.editable = self.no_deps = self.allow_hosts = None
- self.root = self.prefix = self.no_report = None
- self.version = None
- self.install_purelib = None # for pure module distributions
- self.install_platlib = None # non-pure (dists w/ extensions)
- self.install_headers = None # for C/C++ headers
- self.install_lib = None # set to either purelib or platlib
- self.install_scripts = None
- self.install_data = None
- self.install_base = None
- self.install_platbase = None
- if site.ENABLE_USER_SITE:
- self.install_userbase = site.USER_BASE
- self.install_usersite = site.USER_SITE
- else:
- self.install_userbase = None
- self.install_usersite = None
- self.no_find_links = None
-
- # Options not specifiable via command line
- self.package_index = None
- self.pth_file = self.always_copy_from = None
- self.site_dirs = None
- self.installed_projects = {}
- self.sitepy_installed = False
- # Always read easy_install options, even if we are subclassed, or have
- # an independent instance created. This ensures that defaults will
- # always come from the standard configuration file(s)' "easy_install"
- # section, even if this is a "develop" or "install" command, or some
- # other embedding.
- self._dry_run = None
- self.verbose = self.distribution.verbose
- self.distribution._set_command_options(
- self, self.distribution.get_option_dict('easy_install')
- )
-
- def delete_blockers(self, blockers):
- extant_blockers = (
- filename for filename in blockers
- if os.path.exists(filename) or os.path.islink(filename)
- )
- list(map(self._delete_path, extant_blockers))
-
- def _delete_path(self, path):
- log.info("Deleting %s", path)
- if self.dry_run:
- return
-
- is_tree = os.path.isdir(path) and not os.path.islink(path)
- remover = rmtree if is_tree else os.unlink
- remover(path)
-
- def finalize_options(self):
- if self.version:
- print('setuptools %s' % get_distribution('setuptools').version)
- sys.exit()
-
- py_version = sys.version.split()[0]
- prefix, exec_prefix = get_config_vars('prefix', 'exec_prefix')
-
- self.config_vars = {
- 'dist_name': self.distribution.get_name(),
- 'dist_version': self.distribution.get_version(),
- 'dist_fullname': self.distribution.get_fullname(),
- 'py_version': py_version,
- 'py_version_short': py_version[0:3],
- 'py_version_nodot': py_version[0] + py_version[2],
- 'sys_prefix': prefix,
- 'prefix': prefix,
- 'sys_exec_prefix': exec_prefix,
- 'exec_prefix': exec_prefix,
- # Only python 3.2+ has abiflags
- 'abiflags': getattr(sys, 'abiflags', ''),
- }
-
- if site.ENABLE_USER_SITE:
- self.config_vars['userbase'] = self.install_userbase
- self.config_vars['usersite'] = self.install_usersite
-
- self._fix_install_dir_for_user_site()
-
- self.expand_basedirs()
- self.expand_dirs()
-
- self._expand('install_dir', 'script_dir', 'build_directory',
- 'site_dirs')
- # If a non-default installation directory was specified, default the
- # script directory to match it.
- if self.script_dir is None:
- self.script_dir = self.install_dir
-
- if self.no_find_links is None:
- self.no_find_links = False
-
- # Let install_dir get set by install_lib command, which in turn
- # gets its info from the install command, and takes into account
- # --prefix and --home and all that other crud.
- self.set_undefined_options(
- 'install_lib', ('install_dir', 'install_dir')
- )
- # Likewise, set default script_dir from 'install_scripts.install_dir'
- self.set_undefined_options(
- 'install_scripts', ('install_dir', 'script_dir')
- )
-
- if self.user and self.install_purelib:
- self.install_dir = self.install_purelib
- self.script_dir = self.install_scripts
- # default --record from the install command
- self.set_undefined_options('install', ('record', 'record'))
- # Should this be moved to the if statement below? It's not used
- # elsewhere
- normpath = map(normalize_path, sys.path)
- self.all_site_dirs = get_site_dirs()
- if self.site_dirs is not None:
- site_dirs = [
- os.path.expanduser(s.strip()) for s in
- self.site_dirs.split(',')
- ]
- for d in site_dirs:
- if not os.path.isdir(d):
- log.warn("%s (in --site-dirs) does not exist", d)
- elif normalize_path(d) not in normpath:
- raise DistutilsOptionError(
- d + " (in --site-dirs) is not on sys.path"
- )
- else:
- self.all_site_dirs.append(normalize_path(d))
- if not self.editable:
- self.check_site_dir()
- self.index_url = self.index_url or "https://pypi.python.org/simple"
- self.shadow_path = self.all_site_dirs[:]
- for path_item in self.install_dir, normalize_path(self.script_dir):
- if path_item not in self.shadow_path:
- self.shadow_path.insert(0, path_item)
-
- if self.allow_hosts is not None:
- hosts = [s.strip() for s in self.allow_hosts.split(',')]
- else:
- hosts = ['*']
- if self.package_index is None:
- self.package_index = self.create_index(
- self.index_url, search_path=self.shadow_path, hosts=hosts,
- )
- self.local_index = Environment(self.shadow_path + sys.path)
-
- if self.find_links is not None:
- if isinstance(self.find_links, basestring):
- self.find_links = self.find_links.split()
- else:
- self.find_links = []
- if self.local_snapshots_ok:
- self.package_index.scan_egg_links(self.shadow_path + sys.path)
- if not self.no_find_links:
- self.package_index.add_find_links(self.find_links)
- self.set_undefined_options('install_lib', ('optimize', 'optimize'))
- if not isinstance(self.optimize, int):
- try:
- self.optimize = int(self.optimize)
- if not (0 <= self.optimize <= 2):
- raise ValueError
- except ValueError:
- raise DistutilsOptionError("--optimize must be 0, 1, or 2")
-
- if self.editable and not self.build_directory:
- raise DistutilsArgError(
- "Must specify a build directory (-b) when using --editable"
- )
- if not self.args:
- raise DistutilsArgError(
- "No urls, filenames, or requirements specified (see --help)")
-
- self.outputs = []
-
- def _fix_install_dir_for_user_site(self):
- """
- Fix the install_dir if "--user" was used.
- """
- if not self.user or not site.ENABLE_USER_SITE:
- return
-
- self.create_home_path()
- if self.install_userbase is None:
- msg = "User base directory is not specified"
- raise DistutilsPlatformError(msg)
- self.install_base = self.install_platbase = self.install_userbase
- scheme_name = os.name.replace('posix', 'unix') + '_user'
- self.select_scheme(scheme_name)
-
- def _expand_attrs(self, attrs):
- for attr in attrs:
- val = getattr(self, attr)
- if val is not None:
- if os.name == 'posix' or IS_WINDOWS:
- val = os.path.expanduser(val)
- val = subst_vars(val, self.config_vars)
- setattr(self, attr, val)
-
- def expand_basedirs(self):
- """Calls `os.path.expanduser` on install_base, install_platbase and
- root."""
- self._expand_attrs(['install_base', 'install_platbase', 'root'])
-
- def expand_dirs(self):
- """Calls `os.path.expanduser` on install dirs."""
- self._expand_attrs(['install_purelib', 'install_platlib',
- 'install_lib', 'install_headers',
- 'install_scripts', 'install_data', ])
-
- def run(self):
- if self.verbose != self.distribution.verbose:
- log.set_verbosity(self.verbose)
- try:
- for spec in self.args:
- self.easy_install(spec, not self.no_deps)
- if self.record:
- outputs = self.outputs
- if self.root: # strip any package prefix
- root_len = len(self.root)
- for counter in range(len(outputs)):
- outputs[counter] = outputs[counter][root_len:]
- from distutils import file_util
-
- self.execute(
- file_util.write_file, (self.record, outputs),
- "writing list of installed files to '%s'" %
- self.record
- )
- self.warn_deprecated_options()
- finally:
- log.set_verbosity(self.distribution.verbose)
-
- def pseudo_tempname(self):
- """Return a pseudo-tempname base in the install directory.
- This code is intentionally naive; if a malicious party can write to
- the target directory you're already in deep doodoo.
- """
- try:
- pid = os.getpid()
- except:
- pid = random.randint(0, maxsize)
- return os.path.join(self.install_dir, "test-easy-install-%s" % pid)
-
- def warn_deprecated_options(self):
- pass
-
- def check_site_dir(self):
- """Verify that self.install_dir is .pth-capable dir, if needed"""
-
- instdir = normalize_path(self.install_dir)
- pth_file = os.path.join(instdir, 'easy-install.pth')
-
- # Is it a configured, PYTHONPATH, implicit, or explicit site dir?
- is_site_dir = instdir in self.all_site_dirs
-
- if not is_site_dir and not self.multi_version:
- # No? Then directly test whether it does .pth file processing
- is_site_dir = self.check_pth_processing()
- else:
- # make sure we can write to target dir
- testfile = self.pseudo_tempname() + '.write-test'
- test_exists = os.path.exists(testfile)
- try:
- if test_exists:
- os.unlink(testfile)
- open(testfile, 'w').close()
- os.unlink(testfile)
- except (OSError, IOError):
- self.cant_write_to_target()
-
- if not is_site_dir and not self.multi_version:
- # Can't install non-multi to non-site dir
- raise DistutilsError(self.no_default_version_msg())
-
- if is_site_dir:
- if self.pth_file is None:
- self.pth_file = PthDistributions(pth_file, self.all_site_dirs)
- else:
- self.pth_file = None
-
- PYTHONPATH = os.environ.get('PYTHONPATH', '').split(os.pathsep)
- if instdir not in map(normalize_path, filter(None, PYTHONPATH)):
- # only PYTHONPATH dirs need a site.py, so pretend it's there
- self.sitepy_installed = True
- elif self.multi_version and not os.path.exists(pth_file):
- self.sitepy_installed = True # don't need site.py in this case
- self.pth_file = None # and don't create a .pth file
- self.install_dir = instdir
-
- __cant_write_msg = textwrap.dedent("""
- can't create or remove files in install directory
-
- The following error occurred while trying to add or remove files in the
- installation directory:
-
- %s
-
- The installation directory you specified (via --install-dir, --prefix, or
- the distutils default setting) was:
-
- %s
- """).lstrip()
-
- __not_exists_id = textwrap.dedent("""
- This directory does not currently exist. Please create it and try again, or
- choose a different installation directory (using the -d or --install-dir
- option).
- """).lstrip()
-
- __access_msg = textwrap.dedent("""
- Perhaps your account does not have write access to this directory? If the
- installation directory is a system-owned directory, you may need to sign in
- as the administrator or "root" account. If you do not have administrative
- access to this machine, you may wish to choose a different installation
- directory, preferably one that is listed in your PYTHONPATH environment
- variable.
-
- For information on other options, you may wish to consult the
- documentation at:
-
- https://pythonhosted.org/setuptools/easy_install.html
-
- Please make the appropriate changes for your system and try again.
- """).lstrip()
-
- def cant_write_to_target(self):
- msg = self.__cant_write_msg % (sys.exc_info()[1], self.install_dir,)
-
- if not os.path.exists(self.install_dir):
- msg += '\n' + self.__not_exists_id
- else:
- msg += '\n' + self.__access_msg
- raise DistutilsError(msg)
-
- def check_pth_processing(self):
- """Empirically verify whether .pth files are supported in inst. dir"""
- instdir = self.install_dir
- log.info("Checking .pth file support in %s", instdir)
- pth_file = self.pseudo_tempname() + ".pth"
- ok_file = pth_file + '.ok'
- ok_exists = os.path.exists(ok_file)
- try:
- if ok_exists:
- os.unlink(ok_file)
- dirname = os.path.dirname(ok_file)
- if not os.path.exists(dirname):
- os.makedirs(dirname)
- f = open(pth_file, 'w')
- except (OSError, IOError):
- self.cant_write_to_target()
- else:
- try:
- f.write("import os; f = open(%r, 'w'); f.write('OK'); "
- "f.close()\n" % (ok_file,))
- f.close()
- f = None
- executable = sys.executable
- if os.name == 'nt':
- dirname, basename = os.path.split(executable)
- alt = os.path.join(dirname, 'pythonw.exe')
- if (basename.lower() == 'python.exe' and
- os.path.exists(alt)):
- # use pythonw.exe to avoid opening a console window
- executable = alt
-
- from distutils.spawn import spawn
-
- spawn([executable, '-E', '-c', 'pass'], 0)
-
- if os.path.exists(ok_file):
- log.info(
- "TEST PASSED: %s appears to support .pth files",
- instdir
- )
- return True
- finally:
- if f:
- f.close()
- if os.path.exists(ok_file):
- os.unlink(ok_file)
- if os.path.exists(pth_file):
- os.unlink(pth_file)
- if not self.multi_version:
- log.warn("TEST FAILED: %s does NOT support .pth files", instdir)
- return False
-
- def install_egg_scripts(self, dist):
- """Write all the scripts for `dist`, unless scripts are excluded"""
- if not self.exclude_scripts and dist.metadata_isdir('scripts'):
- for script_name in dist.metadata_listdir('scripts'):
- if dist.metadata_isdir('scripts/' + script_name):
- # The "script" is a directory, likely a Python 3
- # __pycache__ directory, so skip it.
- continue
- self.install_script(
- dist, script_name,
- dist.get_metadata('scripts/' + script_name)
- )
- self.install_wrapper_scripts(dist)
-
- def add_output(self, path):
- if os.path.isdir(path):
- for base, dirs, files in os.walk(path):
- for filename in files:
- self.outputs.append(os.path.join(base, filename))
- else:
- self.outputs.append(path)
-
- def not_editable(self, spec):
- if self.editable:
- raise DistutilsArgError(
- "Invalid argument %r: you can't use filenames or URLs "
- "with --editable (except via the --find-links option)."
- % (spec,)
- )
-
- def check_editable(self, spec):
- if not self.editable:
- return
-
- if os.path.exists(os.path.join(self.build_directory, spec.key)):
- raise DistutilsArgError(
- "%r already exists in %s; can't do a checkout there" %
- (spec.key, self.build_directory)
- )
-
- def easy_install(self, spec, deps=False):
- tmpdir = tempfile.mkdtemp(prefix="easy_install-")
- download = None
- if not self.editable:
- self.install_site_py()
-
- try:
- if not isinstance(spec, Requirement):
- if URL_SCHEME(spec):
- # It's a url, download it to tmpdir and process
- self.not_editable(spec)
- download = self.package_index.download(spec, tmpdir)
- return self.install_item(None, download, tmpdir, deps,
- True)
-
- elif os.path.exists(spec):
- # Existing file or directory, just process it directly
- self.not_editable(spec)
- return self.install_item(None, spec, tmpdir, deps, True)
- else:
- spec = parse_requirement_arg(spec)
-
- self.check_editable(spec)
- dist = self.package_index.fetch_distribution(
- spec, tmpdir, self.upgrade, self.editable,
- not self.always_copy, self.local_index
- )
- if dist is None:
- msg = "Could not find suitable distribution for %r" % spec
- if self.always_copy:
- msg += " (--always-copy skips system and development eggs)"
- raise DistutilsError(msg)
- elif dist.precedence == DEVELOP_DIST:
- # .egg-info dists don't need installing, just process deps
- self.process_distribution(spec, dist, deps, "Using")
- return dist
- else:
- return self.install_item(spec, dist.location, tmpdir, deps)
-
- finally:
- if os.path.exists(tmpdir):
- rmtree(tmpdir)
-
- def install_item(self, spec, download, tmpdir, deps, install_needed=False):
-
- # Installation is also needed if file in tmpdir or is not an egg
- install_needed = install_needed or self.always_copy
- install_needed = install_needed or os.path.dirname(download) == tmpdir
- install_needed = install_needed or not download.endswith('.egg')
- install_needed = install_needed or (
- self.always_copy_from is not None and
- os.path.dirname(normalize_path(download)) ==
- normalize_path(self.always_copy_from)
- )
-
- if spec and not install_needed:
- # at this point, we know it's a local .egg, we just don't know if
- # it's already installed.
- for dist in self.local_index[spec.project_name]:
- if dist.location == download:
- break
- else:
- install_needed = True # it's not in the local index
-
- log.info("Processing %s", os.path.basename(download))
-
- if install_needed:
- dists = self.install_eggs(spec, download, tmpdir)
- for dist in dists:
- self.process_distribution(spec, dist, deps)
- else:
- dists = [self.egg_distribution(download)]
- self.process_distribution(spec, dists[0], deps, "Using")
-
- if spec is not None:
- for dist in dists:
- if dist in spec:
- return dist
-
- def select_scheme(self, name):
- """Sets the install directories by applying the install schemes."""
- # it's the caller's problem if they supply a bad name!
- scheme = INSTALL_SCHEMES[name]
- for key in SCHEME_KEYS:
- attrname = 'install_' + key
- if getattr(self, attrname) is None:
- setattr(self, attrname, scheme[key])
-
- def process_distribution(self, requirement, dist, deps=True, *info):
- self.update_pth(dist)
- self.package_index.add(dist)
- if dist in self.local_index[dist.key]:
- self.local_index.remove(dist)
- self.local_index.add(dist)
- self.install_egg_scripts(dist)
- self.installed_projects[dist.key] = dist
- log.info(self.installation_report(requirement, dist, *info))
- if (dist.has_metadata('dependency_links.txt') and
- not self.no_find_links):
- self.package_index.add_find_links(
- dist.get_metadata_lines('dependency_links.txt')
- )
- if not deps and not self.always_copy:
- return
- elif requirement is not None and dist.key != requirement.key:
- log.warn("Skipping dependencies for %s", dist)
- return # XXX this is not the distribution we were looking for
- elif requirement is None or dist not in requirement:
- # if we wound up with a different version, resolve what we've got
- distreq = dist.as_requirement()
- requirement = requirement or distreq
- requirement = Requirement(
- distreq.project_name, distreq.specs, requirement.extras
- )
- log.info("Processing dependencies for %s", requirement)
- try:
- distros = WorkingSet([]).resolve(
- [requirement], self.local_index, self.easy_install
- )
- except DistributionNotFound as e:
- raise DistutilsError(
- "Could not find required distribution %s" % e.args
- )
- except VersionConflict as e:
- raise DistutilsError(e.report())
- if self.always_copy or self.always_copy_from:
- # Force all the relevant distros to be copied or activated
- for dist in distros:
- if dist.key not in self.installed_projects:
- self.easy_install(dist.as_requirement())
- log.info("Finished processing dependencies for %s", requirement)
-
- def should_unzip(self, dist):
- if self.zip_ok is not None:
- return not self.zip_ok
- if dist.has_metadata('not-zip-safe'):
- return True
- if not dist.has_metadata('zip-safe'):
- return True
- return False
-
- def maybe_move(self, spec, dist_filename, setup_base):
- dst = os.path.join(self.build_directory, spec.key)
- if os.path.exists(dst):
- msg = ("%r already exists in %s; build directory %s will not be "
- "kept")
- log.warn(msg, spec.key, self.build_directory, setup_base)
- return setup_base
- if os.path.isdir(dist_filename):
- setup_base = dist_filename
- else:
- if os.path.dirname(dist_filename) == setup_base:
- os.unlink(dist_filename) # get it out of the tmp dir
- contents = os.listdir(setup_base)
- if len(contents) == 1:
- dist_filename = os.path.join(setup_base, contents[0])
- if os.path.isdir(dist_filename):
- # if the only thing there is a directory, move it instead
- setup_base = dist_filename
- ensure_directory(dst)
- shutil.move(setup_base, dst)
- return dst
-
- def install_wrapper_scripts(self, dist):
- if not self.exclude_scripts:
- for args in ScriptWriter.best().get_args(dist):
- self.write_script(*args)
-
- def install_script(self, dist, script_name, script_text, dev_path=None):
- """Generate a legacy script wrapper and install it"""
- spec = str(dist.as_requirement())
- is_script = is_python_script(script_text, script_name)
-
- if is_script:
- script_text = (ScriptWriter.get_header(script_text) +
- self._load_template(dev_path) % locals())
- self.write_script(script_name, _to_ascii(script_text), 'b')
-
- @staticmethod
- def _load_template(dev_path):
- """
- There are a couple of template scripts in the package. This
- function loads one of them and prepares it for use.
- """
- # See https://bitbucket.org/pypa/setuptools/issue/134 for info
- # on script file naming and downstream issues with SVR4
- name = 'script.tmpl'
- if dev_path:
- name = name.replace('.tmpl', ' (dev).tmpl')
-
- raw_bytes = resource_string('setuptools', name)
- return raw_bytes.decode('utf-8')
-
- def write_script(self, script_name, contents, mode="t", blockers=()):
- """Write an executable file to the scripts directory"""
- self.delete_blockers( # clean up old .py/.pyw w/o a script
- [os.path.join(self.script_dir, x) for x in blockers]
- )
- log.info("Installing %s script to %s", script_name, self.script_dir)
- target = os.path.join(self.script_dir, script_name)
- self.add_output(target)
-
- mask = current_umask()
- if not self.dry_run:
- ensure_directory(target)
- if os.path.exists(target):
- os.unlink(target)
- f = open(target, "w" + mode)
- f.write(contents)
- f.close()
- chmod(target, 0o777 - mask)
-
- def install_eggs(self, spec, dist_filename, tmpdir):
- # .egg dirs or files are already built, so just return them
- if dist_filename.lower().endswith('.egg'):
- return [self.install_egg(dist_filename, tmpdir)]
- elif dist_filename.lower().endswith('.exe'):
- return [self.install_exe(dist_filename, tmpdir)]
-
- # Anything else, try to extract and build
- setup_base = tmpdir
- if os.path.isfile(dist_filename) and not dist_filename.endswith('.py'):
- unpack_archive(dist_filename, tmpdir, self.unpack_progress)
- elif os.path.isdir(dist_filename):
- setup_base = os.path.abspath(dist_filename)
-
- if (setup_base.startswith(tmpdir) # something we downloaded
- and self.build_directory and spec is not None):
- setup_base = self.maybe_move(spec, dist_filename, setup_base)
-
- # Find the setup.py file
- setup_script = os.path.join(setup_base, 'setup.py')
-
- if not os.path.exists(setup_script):
- setups = glob(os.path.join(setup_base, '*', 'setup.py'))
- if not setups:
- raise DistutilsError(
- "Couldn't find a setup script in %s" %
- os.path.abspath(dist_filename)
- )
- if len(setups) > 1:
- raise DistutilsError(
- "Multiple setup scripts in %s" %
- os.path.abspath(dist_filename)
- )
- setup_script = setups[0]
-
- # Now run it, and return the result
- if self.editable:
- log.info(self.report_editable(spec, setup_script))
- return []
- else:
- return self.build_and_install(setup_script, setup_base)
-
- def egg_distribution(self, egg_path):
- if os.path.isdir(egg_path):
- metadata = PathMetadata(egg_path, os.path.join(egg_path,
- 'EGG-INFO'))
- else:
- metadata = EggMetadata(zipimport.zipimporter(egg_path))
- return Distribution.from_filename(egg_path, metadata=metadata)
-
- def install_egg(self, egg_path, tmpdir):
- destination = os.path.join(self.install_dir,
- os.path.basename(egg_path))
- destination = os.path.abspath(destination)
- if not self.dry_run:
- ensure_directory(destination)
-
- dist = self.egg_distribution(egg_path)
- if not samefile(egg_path, destination):
- if os.path.isdir(destination) and not os.path.islink(destination):
- dir_util.remove_tree(destination, dry_run=self.dry_run)
- elif os.path.exists(destination):
- self.execute(os.unlink, (destination,), "Removing " +
- destination)
- try:
- new_dist_is_zipped = False
- if os.path.isdir(egg_path):
- if egg_path.startswith(tmpdir):
- f, m = shutil.move, "Moving"
- else:
- f, m = shutil.copytree, "Copying"
- elif self.should_unzip(dist):
- self.mkpath(destination)
- f, m = self.unpack_and_compile, "Extracting"
- else:
- new_dist_is_zipped = True
- if egg_path.startswith(tmpdir):
- f, m = shutil.move, "Moving"
- else:
- f, m = shutil.copy2, "Copying"
- self.execute(f, (egg_path, destination),
- (m + " %s to %s") %
- (os.path.basename(egg_path),
- os.path.dirname(destination)))
- update_dist_caches(destination,
- fix_zipimporter_caches=new_dist_is_zipped)
- except:
- update_dist_caches(destination, fix_zipimporter_caches=False)
- raise
-
- self.add_output(destination)
- return self.egg_distribution(destination)
-
- def install_exe(self, dist_filename, tmpdir):
- # See if it's valid, get data
- cfg = extract_wininst_cfg(dist_filename)
- if cfg is None:
- raise DistutilsError(
- "%s is not a valid distutils Windows .exe" % dist_filename
- )
- # Create a dummy distribution object until we build the real distro
- dist = Distribution(
- None,
- project_name=cfg.get('metadata', 'name'),
- version=cfg.get('metadata', 'version'), platform=get_platform(),
- )
-
- # Convert the .exe to an unpacked egg
- egg_path = dist.location = os.path.join(tmpdir, dist.egg_name() +
- '.egg')
- egg_tmp = egg_path + '.tmp'
- _egg_info = os.path.join(egg_tmp, 'EGG-INFO')
- pkg_inf = os.path.join(_egg_info, 'PKG-INFO')
- ensure_directory(pkg_inf) # make sure EGG-INFO dir exists
- dist._provider = PathMetadata(egg_tmp, _egg_info) # XXX
- self.exe_to_egg(dist_filename, egg_tmp)
-
- # Write EGG-INFO/PKG-INFO
- if not os.path.exists(pkg_inf):
- f = open(pkg_inf, 'w')
- f.write('Metadata-Version: 1.0\n')
- for k, v in cfg.items('metadata'):
- if k != 'target_version':
- f.write('%s: %s\n' % (k.replace('_', '-').title(), v))
- f.close()
- script_dir = os.path.join(_egg_info, 'scripts')
- # delete entry-point scripts to avoid duping
- self.delete_blockers(
- [os.path.join(script_dir, args[0]) for args in
- ScriptWriter.get_args(dist)]
- )
- # Build .egg file from tmpdir
- bdist_egg.make_zipfile(
- egg_path, egg_tmp, verbose=self.verbose, dry_run=self.dry_run
- )
- # install the .egg
- return self.install_egg(egg_path, tmpdir)
-
- def exe_to_egg(self, dist_filename, egg_tmp):
- """Extract a bdist_wininst to the directories an egg would use"""
- # Check for .pth file and set up prefix translations
- prefixes = get_exe_prefixes(dist_filename)
- to_compile = []
- native_libs = []
- top_level = {}
-
- def process(src, dst):
- s = src.lower()
- for old, new in prefixes:
- if s.startswith(old):
- src = new + src[len(old):]
- parts = src.split('/')
- dst = os.path.join(egg_tmp, *parts)
- dl = dst.lower()
- if dl.endswith('.pyd') or dl.endswith('.dll'):
- parts[-1] = bdist_egg.strip_module(parts[-1])
- top_level[os.path.splitext(parts[0])[0]] = 1
- native_libs.append(src)
- elif dl.endswith('.py') and old != 'SCRIPTS/':
- top_level[os.path.splitext(parts[0])[0]] = 1
- to_compile.append(dst)
- return dst
- if not src.endswith('.pth'):
- log.warn("WARNING: can't process %s", src)
- return None
-
- # extract, tracking .pyd/.dll->native_libs and .py -> to_compile
- unpack_archive(dist_filename, egg_tmp, process)
- stubs = []
- for res in native_libs:
- if res.lower().endswith('.pyd'): # create stubs for .pyd's
- parts = res.split('/')
- resource = parts[-1]
- parts[-1] = bdist_egg.strip_module(parts[-1]) + '.py'
- pyfile = os.path.join(egg_tmp, *parts)
- to_compile.append(pyfile)
- stubs.append(pyfile)
- bdist_egg.write_stub(resource, pyfile)
- self.byte_compile(to_compile) # compile .py's
- bdist_egg.write_safety_flag(
- os.path.join(egg_tmp, 'EGG-INFO'),
- bdist_egg.analyze_egg(egg_tmp, stubs)) # write zip-safety flag
-
- for name in 'top_level', 'native_libs':
- if locals()[name]:
- txt = os.path.join(egg_tmp, 'EGG-INFO', name + '.txt')
- if not os.path.exists(txt):
- f = open(txt, 'w')
- f.write('\n'.join(locals()[name]) + '\n')
- f.close()
-
- __mv_warning = textwrap.dedent("""
- Because this distribution was installed --multi-version, before you can
- import modules from this package in an application, you will need to
- 'import pkg_resources' and then use a 'require()' call similar to one of
- these examples, in order to select the desired version:
-
- pkg_resources.require("%(name)s") # latest installed version
- pkg_resources.require("%(name)s==%(version)s") # this exact version
- pkg_resources.require("%(name)s>=%(version)s") # this version or higher
- """).lstrip()
-
- __id_warning = textwrap.dedent("""
- Note also that the installation directory must be on sys.path at runtime for
- this to work. (e.g. by being the application's script directory, by being on
- PYTHONPATH, or by being added to sys.path by your code.)
- """)
-
- def installation_report(self, req, dist, what="Installed"):
- """Helpful installation message for display to package users"""
- msg = "\n%(what)s %(eggloc)s%(extras)s"
- if self.multi_version and not self.no_report:
- msg += '\n' + self.__mv_warning
- if self.install_dir not in map(normalize_path, sys.path):
- msg += '\n' + self.__id_warning
-
- eggloc = dist.location
- name = dist.project_name
- version = dist.version
- extras = '' # TODO: self.report_extras(req, dist)
- return msg % locals()
-
- __editable_msg = textwrap.dedent("""
- Extracted editable version of %(spec)s to %(dirname)s
-
- If it uses setuptools in its setup script, you can activate it in
- "development" mode by going to that directory and running::
-
- %(python)s setup.py develop
-
- See the setuptools documentation for the "develop" command for more info.
- """).lstrip()
-
- def report_editable(self, spec, setup_script):
- dirname = os.path.dirname(setup_script)
- python = sys.executable
- return '\n' + self.__editable_msg % locals()
-
- def run_setup(self, setup_script, setup_base, args):
- sys.modules.setdefault('distutils.command.bdist_egg', bdist_egg)
- sys.modules.setdefault('distutils.command.egg_info', egg_info)
-
- args = list(args)
- if self.verbose > 2:
- v = 'v' * (self.verbose - 1)
- args.insert(0, '-' + v)
- elif self.verbose < 2:
- args.insert(0, '-q')
- if self.dry_run:
- args.insert(0, '-n')
- log.info(
- "Running %s %s", setup_script[len(setup_base) + 1:], ' '.join(args)
- )
- try:
- run_setup(setup_script, args)
- except SystemExit as v:
- raise DistutilsError("Setup script exited with %s" % (v.args[0],))
-
- def build_and_install(self, setup_script, setup_base):
- args = ['bdist_egg', '--dist-dir']
-
- dist_dir = tempfile.mkdtemp(
- prefix='egg-dist-tmp-', dir=os.path.dirname(setup_script)
- )
- try:
- self._set_fetcher_options(os.path.dirname(setup_script))
- args.append(dist_dir)
-
- self.run_setup(setup_script, setup_base, args)
- all_eggs = Environment([dist_dir])
- eggs = []
- for key in all_eggs:
- for dist in all_eggs[key]:
- eggs.append(self.install_egg(dist.location, setup_base))
- if not eggs and not self.dry_run:
- log.warn("No eggs found in %s (setup script problem?)",
- dist_dir)
- return eggs
- finally:
- rmtree(dist_dir)
- log.set_verbosity(self.verbose) # restore our log verbosity
-
- def _set_fetcher_options(self, base):
- """
- When easy_install is about to run bdist_egg on a source dist, that
- source dist might have 'setup_requires' directives, requiring
- additional fetching. Ensure the fetcher options given to easy_install
- are available to that command as well.
- """
- # find the fetch options from easy_install and write them out
- # to the setup.cfg file.
- ei_opts = self.distribution.get_option_dict('easy_install').copy()
- fetch_directives = (
- 'find_links', 'site_dirs', 'index_url', 'optimize',
- 'site_dirs', 'allow_hosts',
- )
- fetch_options = {}
- for key, val in ei_opts.items():
- if key not in fetch_directives:
- continue
- fetch_options[key.replace('_', '-')] = val[1]
- # create a settings dictionary suitable for `edit_config`
- settings = dict(easy_install=fetch_options)
- cfg_filename = os.path.join(base, 'setup.cfg')
- setopt.edit_config(cfg_filename, settings)
-
- def update_pth(self, dist):
- if self.pth_file is None:
- return
-
- for d in self.pth_file[dist.key]: # drop old entries
- if self.multi_version or d.location != dist.location:
- log.info("Removing %s from easy-install.pth file", d)
- self.pth_file.remove(d)
- if d.location in self.shadow_path:
- self.shadow_path.remove(d.location)
-
- if not self.multi_version:
- if dist.location in self.pth_file.paths:
- log.info(
- "%s is already the active version in easy-install.pth",
- dist
- )
- else:
- log.info("Adding %s to easy-install.pth file", dist)
- self.pth_file.add(dist) # add new entry
- if dist.location not in self.shadow_path:
- self.shadow_path.append(dist.location)
-
- if not self.dry_run:
-
- self.pth_file.save()
-
- if dist.key == 'setuptools':
- # Ensure that setuptools itself never becomes unavailable!
- # XXX should this check for latest version?
- filename = os.path.join(self.install_dir, 'setuptools.pth')
- if os.path.islink(filename):
- os.unlink(filename)
- f = open(filename, 'wt')
- f.write(self.pth_file.make_relative(dist.location) + '\n')
- f.close()
-
- def unpack_progress(self, src, dst):
- # Progress filter for unpacking
- log.debug("Unpacking %s to %s", src, dst)
- return dst # only unpack-and-compile skips files for dry run
-
- def unpack_and_compile(self, egg_path, destination):
- to_compile = []
- to_chmod = []
-
- def pf(src, dst):
- if dst.endswith('.py') and not src.startswith('EGG-INFO/'):
- to_compile.append(dst)
- elif dst.endswith('.dll') or dst.endswith('.so'):
- to_chmod.append(dst)
- self.unpack_progress(src, dst)
- return not self.dry_run and dst or None
-
- unpack_archive(egg_path, destination, pf)
- self.byte_compile(to_compile)
- if not self.dry_run:
- for f in to_chmod:
- mode = ((os.stat(f)[stat.ST_MODE]) | 0o555) & 0o7755
- chmod(f, mode)
-
- def byte_compile(self, to_compile):
- if sys.dont_write_bytecode:
- self.warn('byte-compiling is disabled, skipping.')
- return
-
- from distutils.util import byte_compile
-
- try:
- # try to make the byte compile messages quieter
- log.set_verbosity(self.verbose - 1)
-
- byte_compile(to_compile, optimize=0, force=1, dry_run=self.dry_run)
- if self.optimize:
- byte_compile(
- to_compile, optimize=self.optimize, force=1,
- dry_run=self.dry_run
- )
- finally:
- log.set_verbosity(self.verbose) # restore original verbosity
-
- __no_default_msg = textwrap.dedent("""
- bad install directory or PYTHONPATH
-
- You are attempting to install a package to a directory that is not
- on PYTHONPATH and which Python does not read ".pth" files from. The
- installation directory you specified (via --install-dir, --prefix, or
- the distutils default setting) was:
-
- %s
-
- and your PYTHONPATH environment variable currently contains:
-
- %r
-
- Here are some of your options for correcting the problem:
-
- * You can choose a different installation directory, i.e., one that is
- on PYTHONPATH or supports .pth files
-
- * You can add the installation directory to the PYTHONPATH environment
- variable. (It must then also be on PYTHONPATH whenever you run
- Python and want to use the package(s) you are installing.)
-
- * You can set up the installation directory to support ".pth" files by
- using one of the approaches described here:
-
- https://pythonhosted.org/setuptools/easy_install.html#custom-installation-locations
-
- Please make the appropriate changes for your system and try again.""").lstrip()
-
- def no_default_version_msg(self):
- template = self.__no_default_msg
- return template % (self.install_dir, os.environ.get('PYTHONPATH', ''))
-
- def install_site_py(self):
- """Make sure there's a site.py in the target dir, if needed"""
-
- if self.sitepy_installed:
- return # already did it, or don't need to
-
- sitepy = os.path.join(self.install_dir, "site.py")
- source = resource_string("setuptools", "site-patch.py")
- current = ""
-
- if os.path.exists(sitepy):
- log.debug("Checking existing site.py in %s", self.install_dir)
- f = open(sitepy, 'rb')
- current = f.read()
- # we want str, not bytes
- if PY3:
- current = current.decode()
-
- f.close()
- if not current.startswith('def __boot():'):
- raise DistutilsError(
- "%s is not a setuptools-generated site.py; please"
- " remove it." % sitepy
- )
-
- if current != source:
- log.info("Creating %s", sitepy)
- if not self.dry_run:
- ensure_directory(sitepy)
- f = open(sitepy, 'wb')
- f.write(source)
- f.close()
- self.byte_compile([sitepy])
-
- self.sitepy_installed = True
-
- def create_home_path(self):
- """Create directories under ~."""
- if not self.user:
- return
- home = convert_path(os.path.expanduser("~"))
- for name, path in iteritems(self.config_vars):
- if path.startswith(home) and not os.path.isdir(path):
- self.debug_print("os.makedirs('%s', 0o700)" % path)
- os.makedirs(path, 0o700)
-
- INSTALL_SCHEMES = dict(
- posix=dict(
- install_dir='$base/lib/python$py_version_short/site-packages',
- script_dir='$base/bin',
- ),
- )
-
- DEFAULT_SCHEME = dict(
- install_dir='$base/Lib/site-packages',
- script_dir='$base/Scripts',
- )
-
- def _expand(self, *attrs):
- config_vars = self.get_finalized_command('install').config_vars
-
- if self.prefix:
- # Set default install_dir/scripts from --prefix
- config_vars = config_vars.copy()
- config_vars['base'] = self.prefix
- scheme = self.INSTALL_SCHEMES.get(os.name, self.DEFAULT_SCHEME)
- for attr, val in scheme.items():
- if getattr(self, attr, None) is None:
- setattr(self, attr, val)
-
- from distutils.util import subst_vars
-
- for attr in attrs:
- val = getattr(self, attr)
- if val is not None:
- val = subst_vars(val, config_vars)
- if os.name == 'posix':
- val = os.path.expanduser(val)
- setattr(self, attr, val)
-
-
-def get_site_dirs():
- # return a list of 'site' dirs
- sitedirs = [_f for _f in os.environ.get('PYTHONPATH',
- '').split(os.pathsep) if _f]
- prefixes = [sys.prefix]
- if sys.exec_prefix != sys.prefix:
- prefixes.append(sys.exec_prefix)
- for prefix in prefixes:
- if prefix:
- if sys.platform in ('os2emx', 'riscos'):
- sitedirs.append(os.path.join(prefix, "Lib", "site-packages"))
- elif os.sep == '/':
- sitedirs.extend([os.path.join(prefix,
- "lib",
- "python" + sys.version[:3],
- "site-packages"),
- os.path.join(prefix, "lib", "site-python")])
- else:
- sitedirs.extend(
- [prefix, os.path.join(prefix, "lib", "site-packages")]
- )
- if sys.platform == 'darwin':
- # for framework builds *only* we add the standard Apple
- # locations. Currently only per-user, but /Library and
- # /Network/Library could be added too
- if 'Python.framework' in prefix:
- home = os.environ.get('HOME')
- if home:
- sitedirs.append(
- os.path.join(home,
- 'Library',
- 'Python',
- sys.version[:3],
- 'site-packages'))
- lib_paths = get_path('purelib'), get_path('platlib')
- for site_lib in lib_paths:
- if site_lib not in sitedirs:
- sitedirs.append(site_lib)
-
- if site.ENABLE_USER_SITE:
- sitedirs.append(site.USER_SITE)
-
- sitedirs = list(map(normalize_path, sitedirs))
-
- return sitedirs
-
-
-def expand_paths(inputs):
- """Yield sys.path directories that might contain "old-style" packages"""
-
- seen = {}
-
- for dirname in inputs:
- dirname = normalize_path(dirname)
- if dirname in seen:
- continue
-
- seen[dirname] = 1
- if not os.path.isdir(dirname):
- continue
-
- files = os.listdir(dirname)
- yield dirname, files
-
- for name in files:
- if not name.endswith('.pth'):
- # We only care about the .pth files
- continue
- if name in ('easy-install.pth', 'setuptools.pth'):
- # Ignore .pth files that we control
- continue
-
- # Read the .pth file
- f = open(os.path.join(dirname, name))
- lines = list(yield_lines(f))
- f.close()
-
- # Yield existing non-dupe, non-import directory lines from it
- for line in lines:
- if not line.startswith("import"):
- line = normalize_path(line.rstrip())
- if line not in seen:
- seen[line] = 1
- if not os.path.isdir(line):
- continue
- yield line, os.listdir(line)
-
-
-def extract_wininst_cfg(dist_filename):
- """Extract configuration data from a bdist_wininst .exe
-
- Returns a ConfigParser.RawConfigParser, or None
- """
- f = open(dist_filename, 'rb')
- try:
- endrec = zipfile._EndRecData(f)
- if endrec is None:
- return None
-
- prepended = (endrec[9] - endrec[5]) - endrec[6]
- if prepended < 12: # no wininst data here
- return None
- f.seek(prepended - 12)
-
- from setuptools.compat import StringIO, ConfigParser
- import struct
-
- tag, cfglen, bmlen = struct.unpack("<iii", f.read(12))
- if tag not in (0x1234567A, 0x1234567B):
- return None # not a valid tag
-
- f.seek(prepended - (12 + cfglen))
- cfg = ConfigParser.RawConfigParser(
- {'version': '', 'target_version': ''})
- try:
- part = f.read(cfglen)
- # Read up to the first null byte.
- config = part.split(b'\0', 1)[0]
- # Now the config is in bytes, but for RawConfigParser, it should
- # be text, so decode it.
- config = config.decode(sys.getfilesystemencoding())
- cfg.readfp(StringIO(config))
- except ConfigParser.Error:
- return None
- if not cfg.has_section('metadata') or not cfg.has_section('Setup'):
- return None
- return cfg
-
- finally:
- f.close()
-
-
-def get_exe_prefixes(exe_filename):
- """Get exe->egg path translations for a given .exe file"""
-
- prefixes = [
- ('PURELIB/', ''), ('PLATLIB/pywin32_system32', ''),
- ('PLATLIB/', ''),
- ('SCRIPTS/', 'EGG-INFO/scripts/'),
- ('DATA/lib/site-packages', ''),
- ]
- z = zipfile.ZipFile(exe_filename)
- try:
- for info in z.infolist():
- name = info.filename
- parts = name.split('/')
- if len(parts) == 3 and parts[2] == 'PKG-INFO':
- if parts[1].endswith('.egg-info'):
- prefixes.insert(0, ('/'.join(parts[:2]), 'EGG-INFO/'))
- break
- if len(parts) != 2 or not name.endswith('.pth'):
- continue
- if name.endswith('-nspkg.pth'):
- continue
- if parts[0].upper() in ('PURELIB', 'PLATLIB'):
- contents = z.read(name)
- if PY3:
- contents = contents.decode()
- for pth in yield_lines(contents):
- pth = pth.strip().replace('\\', '/')
- if not pth.startswith('import'):
- prefixes.append((('%s/%s/' % (parts[0], pth)), ''))
- finally:
- z.close()
- prefixes = [(x.lower(), y) for x, y in prefixes]
- prefixes.sort()
- prefixes.reverse()
- return prefixes
-
-
-def parse_requirement_arg(spec):
- try:
- return Requirement.parse(spec)
- except ValueError:
- raise DistutilsError(
- "Not a URL, existing file, or requirement spec: %r" % (spec,)
- )
-
-
-class PthDistributions(Environment):
- """A .pth file with Distribution paths in it"""
-
- dirty = False
-
- def __init__(self, filename, sitedirs=()):
- self.filename = filename
- self.sitedirs = list(map(normalize_path, sitedirs))
- self.basedir = normalize_path(os.path.dirname(self.filename))
- self._load()
- Environment.__init__(self, [], None, None)
- for path in yield_lines(self.paths):
- list(map(self.add, find_distributions(path, True)))
-
- def _load(self):
- self.paths = []
- saw_import = False
- seen = dict.fromkeys(self.sitedirs)
- if os.path.isfile(self.filename):
- f = open(self.filename, 'rt')
- for line in f:
- if line.startswith('import'):
- saw_import = True
- continue
- path = line.rstrip()
- self.paths.append(path)
- if not path.strip() or path.strip().startswith('#'):
- continue
- # skip non-existent paths, in case somebody deleted a package
- # manually, and duplicate paths as well
- path = self.paths[-1] = normalize_path(
- os.path.join(self.basedir, path)
- )
- if not os.path.exists(path) or path in seen:
- self.paths.pop() # skip it
- self.dirty = True # we cleaned up, so we're dirty now :)
- continue
- seen[path] = 1
- f.close()
-
- if self.paths and not saw_import:
- self.dirty = True # ensure anything we touch has import wrappers
- while self.paths and not self.paths[-1].strip():
- self.paths.pop()
-
- def save(self):
- """Write changed .pth file back to disk"""
- if not self.dirty:
- return
-
- data = '\n'.join(map(self.make_relative, self.paths))
- if data:
- log.debug("Saving %s", self.filename)
- data = (
- "import sys; sys.__plen = len(sys.path)\n"
- "%s\n"
- "import sys; new=sys.path[sys.__plen:];"
- " del sys.path[sys.__plen:];"
- " p=getattr(sys,'__egginsert',0); sys.path[p:p]=new;"
- " sys.__egginsert = p+len(new)\n"
- ) % data
-
- if os.path.islink(self.filename):
- os.unlink(self.filename)
- f = open(self.filename, 'wt')
- f.write(data)
- f.close()
-
- elif os.path.exists(self.filename):
- log.debug("Deleting empty %s", self.filename)
- os.unlink(self.filename)
-
- self.dirty = False
-
- def add(self, dist):
- """Add `dist` to the distribution map"""
- new_path = (
- dist.location not in self.paths and (
- dist.location not in self.sitedirs or
- # account for '.' being in PYTHONPATH
- dist.location == os.getcwd()
- )
- )
- if new_path:
- self.paths.append(dist.location)
- self.dirty = True
- Environment.add(self, dist)
-
- def remove(self, dist):
- """Remove `dist` from the distribution map"""
- while dist.location in self.paths:
- self.paths.remove(dist.location)
- self.dirty = True
- Environment.remove(self, dist)
-
- def make_relative(self, path):
- npath, last = os.path.split(normalize_path(path))
- baselen = len(self.basedir)
- parts = [last]
- sep = os.altsep == '/' and '/' or os.sep
- while len(npath) >= baselen:
- if npath == self.basedir:
- parts.append(os.curdir)
- parts.reverse()
- return sep.join(parts)
- npath, last = os.path.split(npath)
- parts.append(last)
- else:
- return path
-
-
-def _first_line_re():
- """
- Return a regular expression based on first_line_re suitable for matching
- strings.
- """
- if isinstance(first_line_re.pattern, str):
- return first_line_re
-
- # first_line_re in Python >=3.1.4 and >=3.2.1 is a bytes pattern.
- return re.compile(first_line_re.pattern.decode())
-
-
-def auto_chmod(func, arg, exc):
- if func is os.remove and IS_WINDOWS:
- chmod(arg, stat.S_IWRITE)
- return func(arg)
- et, ev, _ = sys.exc_info()
- reraise(et, (ev[0], ev[1] + (" %s %s" % (func, arg))))
-
-
-def update_dist_caches(dist_path, fix_zipimporter_caches):
- """
- Fix any globally cached `dist_path` related data
-
- `dist_path` should be a path of a newly installed egg distribution (zipped
- or unzipped).
-
- sys.path_importer_cache contains finder objects that have been cached when
- importing data from the original distribution. Any such finders need to be
- cleared since the replacement distribution might be packaged differently,
- e.g. a zipped egg distribution might get replaced with an unzipped egg
- folder or vice versa. Having the old finders cached may then cause Python
- to attempt loading modules from the replacement distribution using an
- incorrect loader.
-
- zipimport.zipimporter objects are Python loaders charged with importing
- data packaged inside zip archives. If stale loaders referencing the
- original distribution, are left behind, they can fail to load modules from
- the replacement distribution. E.g. if an old zipimport.zipimporter instance
- is used to load data from a new zipped egg archive, it may cause the
- operation to attempt to locate the requested data in the wrong location -
- one indicated by the original distribution's zip archive directory
- information. Such an operation may then fail outright, e.g. report having
- read a 'bad local file header', or even worse, it may fail silently &
- return invalid data.
-
- zipimport._zip_directory_cache contains cached zip archive directory
- information for all existing zipimport.zipimporter instances and all such
- instances connected to the same archive share the same cached directory
- information.
-
- If asked, and the underlying Python implementation allows it, we can fix
- all existing zipimport.zipimporter instances instead of having to track
- them down and remove them one by one, by updating their shared cached zip
- archive directory information. This, of course, assumes that the
- replacement distribution is packaged as a zipped egg.
-
- If not asked to fix existing zipimport.zipimporter instances, we still do
- our best to clear any remaining zipimport.zipimporter related cached data
- that might somehow later get used when attempting to load data from the new
- distribution and thus cause such load operations to fail. Note that when
- tracking down such remaining stale data, we can not catch every conceivable
- usage from here, and we clear only those that we know of and have found to
- cause problems if left alive. Any remaining caches should be updated by
- whomever is in charge of maintaining them, i.e. they should be ready to
- handle us replacing their zip archives with new distributions at runtime.
-
- """
- # There are several other known sources of stale zipimport.zipimporter
- # instances that we do not clear here, but might if ever given a reason to
- # do so:
- # * Global setuptools pkg_resources.working_set (a.k.a. 'master working
- # set') may contain distributions which may in turn contain their
- # zipimport.zipimporter loaders.
- # * Several zipimport.zipimporter loaders held by local variables further
- # up the function call stack when running the setuptools installation.
- # * Already loaded modules may have their __loader__ attribute set to the
- # exact loader instance used when importing them. Python 3.4 docs state
- # that this information is intended mostly for introspection and so is
- # not expected to cause us problems.
- normalized_path = normalize_path(dist_path)
- _uncache(normalized_path, sys.path_importer_cache)
- if fix_zipimporter_caches:
- _replace_zip_directory_cache_data(normalized_path)
- else:
- # Here, even though we do not want to fix existing and now stale
- # zipimporter cache information, we still want to remove it. Related to
- # Python's zip archive directory information cache, we clear each of
- # its stale entries in two phases:
- # 1. Clear the entry so attempting to access zip archive information
- # via any existing stale zipimport.zipimporter instances fails.
- # 2. Remove the entry from the cache so any newly constructed
- # zipimport.zipimporter instances do not end up using old stale
- # zip archive directory information.
- # This whole stale data removal step does not seem strictly necessary,
- # but has been left in because it was done before we started replacing
- # the zip archive directory information cache content if possible, and
- # there are no relevant unit tests that we can depend on to tell us if
- # this is really needed.
- _remove_and_clear_zip_directory_cache_data(normalized_path)
-
-
-def _collect_zipimporter_cache_entries(normalized_path, cache):
- """
- Return zipimporter cache entry keys related to a given normalized path.
-
- Alternative path spellings (e.g. those using different character case or
- those using alternative path separators) related to the same path are
- included. Any sub-path entries are included as well, i.e. those
- corresponding to zip archives embedded in other zip archives.
-
- """
- result = []
- prefix_len = len(normalized_path)
- for p in cache:
- np = normalize_path(p)
- if (np.startswith(normalized_path) and
- np[prefix_len:prefix_len + 1] in (os.sep, '')):
- result.append(p)
- return result
-
-
-def _update_zipimporter_cache(normalized_path, cache, updater=None):
- """
- Update zipimporter cache data for a given normalized path.
-
- Any sub-path entries are processed as well, i.e. those corresponding to zip
- archives embedded in other zip archives.
-
- Given updater is a callable taking a cache entry key and the original entry
- (after already removing the entry from the cache), and expected to update
- the entry and possibly return a new one to be inserted in its place.
- Returning None indicates that the entry should not be replaced with a new
- one. If no updater is given, the cache entries are simply removed without
- any additional processing, the same as if the updater simply returned None.
-
- """
- for p in _collect_zipimporter_cache_entries(normalized_path, cache):
- # N.B. pypy's custom zipimport._zip_directory_cache implementation does
- # not support the complete dict interface:
- # * Does not support item assignment, thus not allowing this function
- # to be used only for removing existing cache entries.
- # * Does not support the dict.pop() method, forcing us to use the
- # get/del patterns instead. For more detailed information see the
- # following links:
- # https://bitbucket.org/pypa/setuptools/issue/202/more-robust-zipimporter-cache-invalidation#comment-10495960
- # https://bitbucket.org/pypy/pypy/src/dd07756a34a41f674c0cacfbc8ae1d4cc9ea2ae4/pypy/module/zipimport/interp_zipimport.py#cl-99
- old_entry = cache[p]
- del cache[p]
- new_entry = updater and updater(p, old_entry)
- if new_entry is not None:
- cache[p] = new_entry
-
-
-def _uncache(normalized_path, cache):
- _update_zipimporter_cache(normalized_path, cache)
-
-
-def _remove_and_clear_zip_directory_cache_data(normalized_path):
- def clear_and_remove_cached_zip_archive_directory_data(path, old_entry):
- old_entry.clear()
-
- _update_zipimporter_cache(
- normalized_path, zipimport._zip_directory_cache,
- updater=clear_and_remove_cached_zip_archive_directory_data)
-
-# PyPy Python implementation does not allow directly writing to the
-# zipimport._zip_directory_cache and so prevents us from attempting to correct
-# its content. The best we can do there is clear the problematic cache content
-# and have PyPy repopulate it as needed. The downside is that if there are any
-# stale zipimport.zipimporter instances laying around, attempting to use them
-# will fail due to not having its zip archive directory information available
-# instead of being automatically corrected to use the new correct zip archive
-# directory information.
-if '__pypy__' in sys.builtin_module_names:
- _replace_zip_directory_cache_data = \
- _remove_and_clear_zip_directory_cache_data
-else:
- def _replace_zip_directory_cache_data(normalized_path):
- def replace_cached_zip_archive_directory_data(path, old_entry):
- # N.B. In theory, we could load the zip directory information just
- # once for all updated path spellings, and then copy it locally and
- # update its contained path strings to contain the correct
- # spelling, but that seems like a way too invasive move (this cache
- # structure is not officially documented anywhere and could in
- # theory change with new Python releases) for no significant
- # benefit.
- old_entry.clear()
- zipimport.zipimporter(path)
- old_entry.update(zipimport._zip_directory_cache[path])
- return old_entry
-
- _update_zipimporter_cache(
- normalized_path, zipimport._zip_directory_cache,
- updater=replace_cached_zip_archive_directory_data)
-
-
-def is_python(text, filename='<string>'):
- "Is this string a valid Python script?"
- try:
- compile(text, filename, 'exec')
- except (SyntaxError, TypeError):
- return False
- else:
- return True
-
-
-def is_sh(executable):
- """Determine if the specified executable is a .sh (contains a #! line)"""
- try:
- with io.open(executable, encoding='latin-1') as fp:
- magic = fp.read(2)
- except (OSError, IOError):
- return executable
- return magic == '#!'
-
-
-def nt_quote_arg(arg):
- """Quote a command line argument according to Windows parsing rules"""
- return subprocess.list2cmdline([arg])
-
-
-def is_python_script(script_text, filename):
- """Is this text, as a whole, a Python script? (as opposed to shell/bat/etc.
- """
- if filename.endswith('.py') or filename.endswith('.pyw'):
- return True # extension says it's Python
- if is_python(script_text, filename):
- return True # it's syntactically valid Python
- if script_text.startswith('#!'):
- # It begins with a '#!' line, so check if 'python' is in it somewhere
- return 'python' in script_text.splitlines()[0].lower()
-
- return False # Not any Python I can recognize
-
-
-try:
- from os import chmod as _chmod
-except ImportError:
- # Jython compatibility
- def _chmod(*args):
- pass
-
-
-def chmod(path, mode):
- log.debug("changing mode of %s to %o", path, mode)
- try:
- _chmod(path, mode)
- except os.error as e:
- log.debug("chmod failed: %s", e)
-
-
-def fix_jython_executable(executable, options):
- warnings.warn("Use JythonCommandSpec", DeprecationWarning, stacklevel=2)
-
- if not JythonCommandSpec.relevant():
- return executable
-
- cmd = CommandSpec.best().from_param(executable)
- cmd.install_options(options)
- return cmd.as_header().lstrip('#!').rstrip('\n')
-
-
-class CommandSpec(list):
- """
- A command spec for a #! header, specified as a list of arguments akin to
- those passed to Popen.
- """
-
- options = []
- split_args = dict()
-
- @classmethod
- def best(cls):
- """
- Choose the best CommandSpec class based on environmental conditions.
- """
- return cls if not JythonCommandSpec.relevant() else JythonCommandSpec
-
- @classmethod
- def _sys_executable(cls):
- _default = os.path.normpath(sys.executable)
- return os.environ.get('__PYVENV_LAUNCHER__', _default)
-
- @classmethod
- def from_param(cls, param):
- """
- Construct a CommandSpec from a parameter to build_scripts, which may
- be None.
- """
- if isinstance(param, cls):
- return param
- if isinstance(param, list):
- return cls(param)
- if param is None:
- return cls.from_environment()
- # otherwise, assume it's a string.
- return cls.from_string(param)
-
- @classmethod
- def from_environment(cls):
- return cls([cls._sys_executable()])
-
- @classmethod
- def from_string(cls, string):
- """
- Construct a command spec from a simple string representing a command
- line parseable by shlex.split.
- """
- items = shlex.split(string, **cls.split_args)
- return cls(items)
-
- def install_options(self, script_text):
- self.options = shlex.split(self._extract_options(script_text))
- cmdline = subprocess.list2cmdline(self)
- if not isascii(cmdline):
- self.options[:0] = ['-x']
-
- @staticmethod
- def _extract_options(orig_script):
- """
- Extract any options from the first line of the script.
- """
- first = (orig_script + '\n').splitlines()[0]
- match = _first_line_re().match(first)
- options = match.group(1) or '' if match else ''
- return options.strip()
-
- def as_header(self):
- return self._render(self + list(self.options))
-
- @staticmethod
- def _render(items):
- cmdline = subprocess.list2cmdline(items)
- return '#!' + cmdline + '\n'
-
-# For pbr compat; will be removed in a future version.
-sys_executable = CommandSpec._sys_executable()
-
-
-class WindowsCommandSpec(CommandSpec):
- split_args = dict(posix=False)
-
-
-class JythonCommandSpec(CommandSpec):
- @classmethod
- def relevant(cls):
- return (
- sys.platform.startswith('java')
- and
- __import__('java').lang.System.getProperty('os.name') != 'Linux'
- )
-
- @classmethod
- def from_string(cls, string):
- return cls([string])
-
- def as_header(self):
- """
- Workaround Jython's sys.executable being a .sh (an invalid
- shebang line interpreter)
- """
- if not is_sh(self[0]):
- return super(JythonCommandSpec, self).as_header()
-
- if self.options:
- # Can't apply the workaround, leave it broken
- log.warn(
- "WARNING: Unable to adapt shebang line for Jython,"
- " the following script is NOT executable\n"
- " see http://bugs.jython.org/issue1112 for"
- " more information.")
- return super(JythonCommandSpec, self).as_header()
-
- items = ['/usr/bin/env'] + self + list(self.options)
- return self._render(items)
-
-
-class ScriptWriter(object):
- """
- Encapsulates behavior around writing entry point scripts for console and
- gui apps.
- """
-
- template = textwrap.dedent("""
- # EASY-INSTALL-ENTRY-SCRIPT: %(spec)r,%(group)r,%(name)r
- __requires__ = %(spec)r
- import sys
- from pkg_resources import load_entry_point
-
- if __name__ == '__main__':
- sys.exit(
- load_entry_point(%(spec)r, %(group)r, %(name)r)()
- )
- """).lstrip()
-
- command_spec_class = CommandSpec
-
- @classmethod
- def get_script_args(cls, dist, executable=None, wininst=False):
- # for backward compatibility
- warnings.warn("Use get_args", DeprecationWarning)
- writer = (WindowsScriptWriter if wininst else ScriptWriter).best()
- header = cls.get_script_header("", executable, wininst)
- return writer.get_args(dist, header)
-
- @classmethod
- def get_script_header(cls, script_text, executable=None, wininst=False):
- # for backward compatibility
- warnings.warn("Use get_header", DeprecationWarning)
- if wininst:
- executable = "python.exe"
- cmd = cls.command_spec_class.best().from_param(executable)
- cmd.install_options(script_text)
- return cmd.as_header()
-
- @classmethod
- def get_args(cls, dist, header=None):
- """
- Yield write_script() argument tuples for a distribution's entrypoints
- """
- if header is None:
- header = cls.get_header()
- spec = str(dist.as_requirement())
- for type_ in 'console', 'gui':
- group = type_ + '_scripts'
- for name, ep in dist.get_entry_map(group).items():
- script_text = cls.template % locals()
- for res in cls._get_script_args(type_, name, header,
- script_text):
- yield res
-
- @classmethod
- def get_writer(cls, force_windows):
- # for backward compatibility
- warnings.warn("Use best", DeprecationWarning)
- return WindowsScriptWriter.best() if force_windows else cls.best()
-
- @classmethod
- def best(cls):
- """
- Select the best ScriptWriter for this environment.
- """
- return WindowsScriptWriter.best() if IS_WINDOWS else cls
-
- @classmethod
- def _get_script_args(cls, type_, name, header, script_text):
- # Simply write the stub with no extension.
- yield (name, header + script_text)
-
- @classmethod
- def get_header(cls, script_text="", executable=None):
- """Create a #! line, getting options (if any) from script_text"""
- cmd = cls.command_spec_class.best().from_param(executable)
- cmd.install_options(script_text)
- return cmd.as_header()
-
-
-class WindowsScriptWriter(ScriptWriter):
- command_spec_class = WindowsCommandSpec
-
- @classmethod
- def get_writer(cls):
- # for backward compatibility
- warnings.warn("Use best", DeprecationWarning)
- return cls.best()
-
- @classmethod
- def best(cls):
- """
- Select the best ScriptWriter suitable for Windows
- """
- writer_lookup = dict(
- executable=WindowsExecutableLauncherWriter,
- natural=cls,
- )
- # for compatibility, use the executable launcher by default
- launcher = os.environ.get('SETUPTOOLS_LAUNCHER', 'executable')
- return writer_lookup[launcher]
-
- @classmethod
- def _get_script_args(cls, type_, name, header, script_text):
- "For Windows, add a .py extension"
- ext = dict(console='.pya', gui='.pyw')[type_]
- if ext not in os.environ['PATHEXT'].lower().split(';'):
- warnings.warn("%s not listed in PATHEXT; scripts will not be "
- "recognized as executables." % ext, UserWarning)
- old = ['.pya', '.py', '-script.py', '.pyc', '.pyo', '.pyw', '.exe']
- old.remove(ext)
- header = cls._adjust_header(type_, header)
- blockers = [name + x for x in old]
- yield name + ext, header + script_text, 't', blockers
-
- @staticmethod
- def _adjust_header(type_, orig_header):
- """
- Make sure 'pythonw' is used for gui and and 'python' is used for
- console (regardless of what sys.executable is).
- """
- pattern = 'pythonw.exe'
- repl = 'python.exe'
- if type_ == 'gui':
- pattern, repl = repl, pattern
- pattern_ob = re.compile(re.escape(pattern), re.IGNORECASE)
- new_header = pattern_ob.sub(string=orig_header, repl=repl)
- clean_header = new_header[2:-1].strip('"')
- if sys.platform == 'win32' and not os.path.exists(clean_header):
- # the adjusted version doesn't exist, so return the original
- return orig_header
- return new_header
-
-
-class WindowsExecutableLauncherWriter(WindowsScriptWriter):
- @classmethod
- def _get_script_args(cls, type_, name, header, script_text):
- """
- For Windows, add a .py extension and an .exe launcher
- """
- if type_ == 'gui':
- launcher_type = 'gui'
- ext = '-script.pyw'
- old = ['.pyw']
- else:
- launcher_type = 'cli'
- ext = '-script.py'
- old = ['.py', '.pyc', '.pyo']
- hdr = cls._adjust_header(type_, header)
- blockers = [name + x for x in old]
- yield (name + ext, hdr + script_text, 't', blockers)
- yield (
- name + '.exe', get_win_launcher(launcher_type),
- 'b' # write in binary mode
- )
- if not is_64bit():
- # install a manifest for the launcher to prevent Windows
- # from detecting it as an installer (which it will for
- # launchers like easy_install.exe). Consider only
- # adding a manifest for launchers detected as installers.
- # See Distribute #143 for details.
- m_name = name + '.exe.manifest'
- yield (m_name, load_launcher_manifest(name), 't')
-
-
-# for backward-compatibility
-get_script_args = ScriptWriter.get_script_args
-get_script_header = ScriptWriter.get_script_header
-
-
-def get_win_launcher(type):
- """
- Load the Windows launcher (executable) suitable for launching a script.
-
- `type` should be either 'cli' or 'gui'
-
- Returns the executable as a byte string.
- """
- launcher_fn = '%s.exe' % type
- if platform.machine().lower() == 'arm':
- launcher_fn = launcher_fn.replace(".", "-arm.")
- if is_64bit():
- launcher_fn = launcher_fn.replace(".", "-64.")
- else:
- launcher_fn = launcher_fn.replace(".", "-32.")
- return resource_string('setuptools', launcher_fn)
-
-
-def load_launcher_manifest(name):
- manifest = pkg_resources.resource_string(__name__, 'launcher manifest.xml')
- if PY2:
- return manifest % vars()
- else:
- return manifest.decode('utf-8') % vars()
-
-
-def rmtree(path, ignore_errors=False, onerror=auto_chmod):
- """Recursively delete a directory tree.
-
- This code is taken from the Python 2.4 version of 'shutil', because
- the 2.3 version doesn't really work right.
- """
- if ignore_errors:
- def onerror(*args):
- pass
- elif onerror is None:
- def onerror(*args):
- raise
- names = []
- try:
- names = os.listdir(path)
- except os.error:
- onerror(os.listdir, path, sys.exc_info())
- for name in names:
- fullname = os.path.join(path, name)
- try:
- mode = os.lstat(fullname).st_mode
- except os.error:
- mode = 0
- if stat.S_ISDIR(mode):
- rmtree(fullname, ignore_errors, onerror)
- else:
- try:
- os.remove(fullname)
- except os.error:
- onerror(os.remove, fullname, sys.exc_info())
- try:
- os.rmdir(path)
- except os.error:
- onerror(os.rmdir, path, sys.exc_info())
-
-
-def current_umask():
- tmp = os.umask(0o022)
- os.umask(tmp)
- return tmp
-
-
-def bootstrap():
- # This function is called when setuptools*.egg is run using /bin/sh
- import setuptools
-
- argv0 = os.path.dirname(setuptools.__path__[0])
- sys.argv[0] = argv0
- sys.argv.append(argv0)
- main()
-
-
-def main(argv=None, **kw):
- from setuptools import setup
- from setuptools.dist import Distribution
-
- class DistributionWithoutHelpCommands(Distribution):
- common_usage = ""
-
- def _show_help(self, *args, **kw):
- with _patch_usage():
- Distribution._show_help(self, *args, **kw)
-
- if argv is None:
- argv = sys.argv[1:]
-
- with _patch_usage():
- setup(
- script_args=['-q', 'easy_install', '-v'] + argv,
- script_name=sys.argv[0] or 'easy_install',
- distclass=DistributionWithoutHelpCommands, **kw
- )
-
-
-@contextlib.contextmanager
-def _patch_usage():
- import distutils.core
- USAGE = textwrap.dedent("""
- usage: %(script)s [options] requirement_or_url ...
- or: %(script)s --help
- """).lstrip()
-
- def gen_usage(script_name):
- return USAGE % dict(
- script=os.path.basename(script_name),
- )
-
- saved = distutils.core.gen_usage
- distutils.core.gen_usage = gen_usage
- try:
- yield
- finally:
- distutils.core.gen_usage = saved
diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/egg_info.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/egg_info.py
deleted file mode 100644
index 50f3d5c..0000000
--- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/egg_info.py
+++ /dev/null
@@ -1,480 +0,0 @@
-"""setuptools.command.egg_info
-
-Create a distribution's .egg-info directory and contents"""
-
-from distutils.filelist import FileList as _FileList
-from distutils.util import convert_path
-from distutils import log
-import distutils.errors
-import distutils.filelist
-import os
-import re
-import sys
-
-try:
- from setuptools_svn import svn_utils
-except ImportError:
- pass
-
-from setuptools import Command
-from setuptools.command.sdist import sdist
-from setuptools.compat import basestring, PY3, StringIO
-from setuptools.command.sdist import walk_revctrl
-from pkg_resources import (
- parse_requirements, safe_name, parse_version,
- safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename)
-import setuptools.unicode_utils as unicode_utils
-
-from pkg_resources import packaging
-
-class egg_info(Command):
- description = "create a distribution's .egg-info directory"
-
- user_options = [
- ('egg-base=', 'e', "directory containing .egg-info directories"
- " (default: top of the source tree)"),
- ('tag-svn-revision', 'r',
- "Add subversion revision ID to version number"),
- ('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"),
- ('tag-build=', 'b', "Specify explicit tag to add to version number"),
- ('no-svn-revision', 'R',
- "Don't add subversion revision ID [default]"),
- ('no-date', 'D', "Don't include date stamp [default]"),
- ]
-
- boolean_options = ['tag-date', 'tag-svn-revision']
- negative_opt = {'no-svn-revision': 'tag-svn-revision',
- 'no-date': 'tag-date'}
-
- def initialize_options(self):
- self.egg_name = None
- self.egg_version = None
- self.egg_base = None
- self.egg_info = None
- self.tag_build = None
- self.tag_svn_revision = 0
- self.tag_date = 0
- self.broken_egg_info = False
- self.vtags = None
-
- def save_version_info(self, filename):
- from setuptools.command.setopt import edit_config
-
- values = dict(
- egg_info=dict(
- tag_svn_revision=0,
- tag_date=0,
- tag_build=self.tags(),
- )
- )
- edit_config(filename, values)
-
- def finalize_options(self):
- self.egg_name = safe_name(self.distribution.get_name())
- self.vtags = self.tags()
- self.egg_version = self.tagged_version()
-
- parsed_version = parse_version(self.egg_version)
-
- try:
- is_version = isinstance(parsed_version, packaging.version.Version)
- spec = (
- "%s==%s" if is_version else "%s===%s"
- )
- list(
- parse_requirements(spec % (self.egg_name, self.egg_version))
- )
- except ValueError:
- raise distutils.errors.DistutilsOptionError(
- "Invalid distribution name or version syntax: %s-%s" %
- (self.egg_name, self.egg_version)
- )
-
- if self.egg_base is None:
- dirs = self.distribution.package_dir
- self.egg_base = (dirs or {}).get('', os.curdir)
-
- self.ensure_dirname('egg_base')
- self.egg_info = to_filename(self.egg_name) + '.egg-info'
- if self.egg_base != os.curdir:
- self.egg_info = os.path.join(self.egg_base, self.egg_info)
- if '-' in self.egg_name:
- self.check_broken_egg_info()
-
- # Set package version for the benefit of dumber commands
- # (e.g. sdist, bdist_wininst, etc.)
- #
- self.distribution.metadata.version = self.egg_version
-
- # If we bootstrapped around the lack of a PKG-INFO, as might be the
- # case in a fresh checkout, make sure that any special tags get added
- # to the version info
- #
- pd = self.distribution._patched_dist
- if pd is not None and pd.key == self.egg_name.lower():
- pd._version = self.egg_version
- pd._parsed_version = parse_version(self.egg_version)
- self.distribution._patched_dist = None
-
- def write_or_delete_file(self, what, filename, data, force=False):
- """Write `data` to `filename` or delete if empty
-
- If `data` is non-empty, this routine is the same as ``write_file()``.
- If `data` is empty but not ``None``, this is the same as calling
- ``delete_file(filename)`. If `data` is ``None``, then this is a no-op
- unless `filename` exists, in which case a warning is issued about the
- orphaned file (if `force` is false), or deleted (if `force` is true).
- """
- if data:
- self.write_file(what, filename, data)
- elif os.path.exists(filename):
- if data is None and not force:
- log.warn(
- "%s not set in setup(), but %s exists", what, filename
- )
- return
- else:
- self.delete_file(filename)
-
- def write_file(self, what, filename, data):
- """Write `data` to `filename` (if not a dry run) after announcing it
-
- `what` is used in a log message to identify what is being written
- to the file.
- """
- log.info("writing %s to %s", what, filename)
- if PY3:
- data = data.encode("utf-8")
- if not self.dry_run:
- f = open(filename, 'wb')
- f.write(data)
- f.close()
-
- def delete_file(self, filename):
- """Delete `filename` (if not a dry run) after announcing it"""
- log.info("deleting %s", filename)
- if not self.dry_run:
- os.unlink(filename)
-
- def tagged_version(self):
- version = self.distribution.get_version()
- # egg_info may be called more than once for a distribution,
- # in which case the version string already contains all tags.
- if self.vtags and version.endswith(self.vtags):
- return safe_version(version)
- return safe_version(version + self.vtags)
-
- def run(self):
- self.mkpath(self.egg_info)
- installer = self.distribution.fetch_build_egg
- for ep in iter_entry_points('egg_info.writers'):
- ep.require(installer=installer)
- writer = ep.resolve()
- writer(self, ep.name, os.path.join(self.egg_info, ep.name))
-
- # Get rid of native_libs.txt if it was put there by older bdist_egg
- nl = os.path.join(self.egg_info, "native_libs.txt")
- if os.path.exists(nl):
- self.delete_file(nl)
-
- self.find_sources()
-
- def tags(self):
- version = ''
- if self.tag_build:
- version += self.tag_build
- if self.tag_svn_revision:
- rev = self.get_svn_revision()
- if rev: # is 0 if it's not an svn working copy
- version += '-r%s' % rev
- if self.tag_date:
- import time
-
- version += time.strftime("-%Y%m%d")
- return version
-
- @staticmethod
- def get_svn_revision():
- if 'svn_utils' not in globals():
- return "0"
- return str(svn_utils.SvnInfo.load(os.curdir).get_revision())
-
- def find_sources(self):
- """Generate SOURCES.txt manifest file"""
- manifest_filename = os.path.join(self.egg_info, "SOURCES.txt")
- mm = manifest_maker(self.distribution)
- mm.manifest = manifest_filename
- mm.run()
- self.filelist = mm.filelist
-
- def check_broken_egg_info(self):
- bei = self.egg_name + '.egg-info'
- if self.egg_base != os.curdir:
- bei = os.path.join(self.egg_base, bei)
- if os.path.exists(bei):
- log.warn(
- "-" * 78 + '\n'
- "Note: Your current .egg-info directory has a '-' in its name;"
- '\nthis will not work correctly with "setup.py develop".\n\n'
- 'Please rename %s to %s to correct this problem.\n' + '-' * 78,
- bei, self.egg_info
- )
- self.broken_egg_info = self.egg_info
- self.egg_info = bei # make it work for now
-
-
-class FileList(_FileList):
- """File list that accepts only existing, platform-independent paths"""
-
- def append(self, item):
- if item.endswith('\r'): # Fix older sdists built on Windows
- item = item[:-1]
- path = convert_path(item)
-
- if self._safe_path(path):
- self.files.append(path)
-
- def extend(self, paths):
- self.files.extend(filter(self._safe_path, paths))
-
- def _repair(self):
- """
- Replace self.files with only safe paths
-
- Because some owners of FileList manipulate the underlying
- ``files`` attribute directly, this method must be called to
- repair those paths.
- """
- self.files = list(filter(self._safe_path, self.files))
-
- def _safe_path(self, path):
- enc_warn = "'%s' not %s encodable -- skipping"
-
- # To avoid accidental trans-codings errors, first to unicode
- u_path = unicode_utils.filesys_decode(path)
- if u_path is None:
- log.warn("'%s' in unexpected encoding -- skipping" % path)
- return False
-
- # Must ensure utf-8 encodability
- utf8_path = unicode_utils.try_encode(u_path, "utf-8")
- if utf8_path is None:
- log.warn(enc_warn, path, 'utf-8')
- return False
-
- try:
- # accept is either way checks out
- if os.path.exists(u_path) or os.path.exists(utf8_path):
- return True
- # this will catch any encode errors decoding u_path
- except UnicodeEncodeError:
- log.warn(enc_warn, path, sys.getfilesystemencoding())
-
-
-class manifest_maker(sdist):
- template = "MANIFEST.in"
-
- def initialize_options(self):
- self.use_defaults = 1
- self.prune = 1
- self.manifest_only = 1
- self.force_manifest = 1
-
- def finalize_options(self):
- pass
-
- def run(self):
- self.filelist = FileList()
- if not os.path.exists(self.manifest):
- self.write_manifest() # it must exist so it'll get in the list
- self.filelist.findall()
- self.add_defaults()
- if os.path.exists(self.template):
- self.read_template()
- self.prune_file_list()
- self.filelist.sort()
- self.filelist.remove_duplicates()
- self.write_manifest()
-
- def _manifest_normalize(self, path):
- path = unicode_utils.filesys_decode(path)
- return path.replace(os.sep, '/')
-
- def write_manifest(self):
- """
- Write the file list in 'self.filelist' to the manifest file
- named by 'self.manifest'.
- """
- self.filelist._repair()
-
- # Now _repairs should encodability, but not unicode
- files = [self._manifest_normalize(f) for f in self.filelist.files]
- msg = "writing manifest file '%s'" % self.manifest
- self.execute(write_file, (self.manifest, files), msg)
-
- def warn(self, msg): # suppress missing-file warnings from sdist
- if not msg.startswith("standard file not found:"):
- sdist.warn(self, msg)
-
- def add_defaults(self):
- sdist.add_defaults(self)
- self.filelist.append(self.template)
- self.filelist.append(self.manifest)
- rcfiles = list(walk_revctrl())
- if rcfiles:
- self.filelist.extend(rcfiles)
- elif os.path.exists(self.manifest):
- self.read_manifest()
- ei_cmd = self.get_finalized_command('egg_info')
- self._add_egg_info(cmd=ei_cmd)
- self.filelist.include_pattern("*", prefix=ei_cmd.egg_info)
-
- def _add_egg_info(self, cmd):
- """
- Add paths for egg-info files for an external egg-base.
-
- The egg-info files are written to egg-base. If egg-base is
- outside the current working directory, this method
- searchs the egg-base directory for files to include
- in the manifest. Uses distutils.filelist.findall (which is
- really the version monkeypatched in by setuptools/__init__.py)
- to perform the search.
-
- Since findall records relative paths, prefix the returned
- paths with cmd.egg_base, so add_default's include_pattern call
- (which is looking for the absolute cmd.egg_info) will match
- them.
- """
- if cmd.egg_base == os.curdir:
- # egg-info files were already added by something else
- return
-
- discovered = distutils.filelist.findall(cmd.egg_base)
- resolved = (os.path.join(cmd.egg_base, path) for path in discovered)
- self.filelist.allfiles.extend(resolved)
-
- def prune_file_list(self):
- build = self.get_finalized_command('build')
- base_dir = self.distribution.get_fullname()
- self.filelist.exclude_pattern(None, prefix=build.build_base)
- self.filelist.exclude_pattern(None, prefix=base_dir)
- sep = re.escape(os.sep)
- self.filelist.exclude_pattern(r'(^|' + sep + r')(RCS|CVS|\.svn)' + sep,
- is_regex=1)
-
-
-def write_file(filename, contents):
- """Create a file with the specified name and write 'contents' (a
- sequence of strings without line terminators) to it.
- """
- contents = "\n".join(contents)
-
- # assuming the contents has been vetted for utf-8 encoding
- contents = contents.encode("utf-8")
-
- with open(filename, "wb") as f: # always write POSIX-style manifest
- f.write(contents)
-
-
-def write_pkg_info(cmd, basename, filename):
- log.info("writing %s", filename)
- if not cmd.dry_run:
- metadata = cmd.distribution.metadata
- metadata.version, oldver = cmd.egg_version, metadata.version
- metadata.name, oldname = cmd.egg_name, metadata.name
- try:
- # write unescaped data to PKG-INFO, so older pkg_resources
- # can still parse it
- metadata.write_pkg_info(cmd.egg_info)
- finally:
- metadata.name, metadata.version = oldname, oldver
-
- safe = getattr(cmd.distribution, 'zip_safe', None)
- from setuptools.command import bdist_egg
-
- bdist_egg.write_safety_flag(cmd.egg_info, safe)
-
-
-def warn_depends_obsolete(cmd, basename, filename):
- if os.path.exists(filename):
- log.warn(
- "WARNING: 'depends.txt' is not used by setuptools 0.6!\n"
- "Use the install_requires/extras_require setup() args instead."
- )
-
-
-def _write_requirements(stream, reqs):
- lines = yield_lines(reqs or ())
- append_cr = lambda line: line + '\n'
- lines = map(append_cr, lines)
- stream.writelines(lines)
-
-
-def write_requirements(cmd, basename, filename):
- dist = cmd.distribution
- data = StringIO()
- _write_requirements(data, dist.install_requires)
- extras_require = dist.extras_require or {}
- for extra in sorted(extras_require):
- data.write('\n[{extra}]\n'.format(**vars()))
- _write_requirements(data, extras_require[extra])
- cmd.write_or_delete_file("requirements", filename, data.getvalue())
-
-
-def write_setup_requirements(cmd, basename, filename):
- data = StringIO()
- _write_requirements(data, cmd.distribution.setup_requires)
- cmd.write_or_delete_file("setup-requirements", filename, data.getvalue())
-
-
-def write_toplevel_names(cmd, basename, filename):
- pkgs = dict.fromkeys(
- [
- k.split('.', 1)[0]
- for k in cmd.distribution.iter_distribution_names()
- ]
- )
- cmd.write_file("top-level names", filename, '\n'.join(sorted(pkgs)) + '\n')
-
-
-def overwrite_arg(cmd, basename, filename):
- write_arg(cmd, basename, filename, True)
-
-
-def write_arg(cmd, basename, filename, force=False):
- argname = os.path.splitext(basename)[0]
- value = getattr(cmd.distribution, argname, None)
- if value is not None:
- value = '\n'.join(value) + '\n'
- cmd.write_or_delete_file(argname, filename, value, force)
-
-
-def write_entries(cmd, basename, filename):
- ep = cmd.distribution.entry_points
-
- if isinstance(ep, basestring) or ep is None:
- data = ep
- elif ep is not None:
- data = []
- for section, contents in sorted(ep.items()):
- if not isinstance(contents, basestring):
- contents = EntryPoint.parse_group(section, contents)
- contents = '\n'.join(sorted(map(str, contents.values())))
- data.append('[%s]\n%s\n\n' % (section, contents))
- data = ''.join(data)
-
- cmd.write_or_delete_file('entry points', filename, data, True)
-
-
-def get_pkg_info_revision():
- # See if we can get a -r### off of PKG-INFO, in case this is an sdist of
- # a subversion revision
- #
- if os.path.exists('PKG-INFO'):
- f = open('PKG-INFO', 'rU')
- for line in f:
- match = re.match(r"Version:.*-r(\d+)\s*$", line)
- if match:
- return int(match.group(1))
- f.close()
- return 0
diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/install.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/install.py
deleted file mode 100644
index d2bca2e..0000000
--- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/install.py
+++ /dev/null
@@ -1,125 +0,0 @@
-from distutils.errors import DistutilsArgError
-import inspect
-import glob
-import warnings
-import platform
-import distutils.command.install as orig
-
-import setuptools
-
-# Prior to numpy 1.9, NumPy relies on the '_install' name, so provide it for
-# now. See https://bitbucket.org/pypa/setuptools/issue/199/
-_install = orig.install
-
-
-class install(orig.install):
- """Use easy_install to install the package, w/dependencies"""
-
- user_options = orig.install.user_options + [
- ('old-and-unmanageable', None, "Try not to use this!"),
- ('single-version-externally-managed', None,
- "used by system package builders to create 'flat' eggs"),
- ]
- boolean_options = orig.install.boolean_options + [
- 'old-and-unmanageable', 'single-version-externally-managed',
- ]
- new_commands = [
- ('install_egg_info', lambda self: True),
- ('install_scripts', lambda self: True),
- ]
- _nc = dict(new_commands)
-
- def initialize_options(self):
- orig.install.initialize_options(self)
- self.old_and_unmanageable = None
- self.single_version_externally_managed = None
-
- def finalize_options(self):
- orig.install.finalize_options(self)
- if self.root:
- self.single_version_externally_managed = True
- elif self.single_version_externally_managed:
- if not self.root and not self.record:
- raise DistutilsArgError(
- "You must specify --record or --root when building system"
- " packages"
- )
-
- def handle_extra_path(self):
- if self.root or self.single_version_externally_managed:
- # explicit backward-compatibility mode, allow extra_path to work
- return orig.install.handle_extra_path(self)
-
- # Ignore extra_path when installing an egg (or being run by another
- # command without --root or --single-version-externally-managed
- self.path_file = None
- self.extra_dirs = ''
-
- def run(self):
- # Explicit request for old-style install? Just do it
- if self.old_and_unmanageable or self.single_version_externally_managed:
- return orig.install.run(self)
-
- if not self._called_from_setup(inspect.currentframe()):
- # Run in backward-compatibility mode to support bdist_* commands.
- orig.install.run(self)
- else:
- self.do_egg_install()
-
- @staticmethod
- def _called_from_setup(run_frame):
- """
- Attempt to detect whether run() was called from setup() or by another
- command. If called by setup(), the parent caller will be the
- 'run_command' method in 'distutils.dist', and *its* caller will be
- the 'run_commands' method. If called any other way, the
- immediate caller *might* be 'run_command', but it won't have been
- called by 'run_commands'. Return True in that case or if a call stack
- is unavailable. Return False otherwise.
- """
- if run_frame is None:
- msg = "Call stack not available. bdist_* commands may fail."
- warnings.warn(msg)
- if platform.python_implementation() == 'IronPython':
- msg = "For best results, pass -X:Frames to enable call stack."
- warnings.warn(msg)
- return True
- res = inspect.getouterframes(run_frame)[2]
- caller, = res[:1]
- info = inspect.getframeinfo(caller)
- caller_module = caller.f_globals.get('__name__', '')
- return (
- caller_module == 'distutils.dist'
- and info.function == 'run_commands'
- )
-
- def do_egg_install(self):
-
- easy_install = self.distribution.get_command_class('easy_install')
-
- cmd = easy_install(
- self.distribution, args="x", root=self.root, record=self.record,
- )
- cmd.ensure_finalized() # finalize before bdist_egg munges install cmd
- cmd.always_copy_from = '.' # make sure local-dir eggs get installed
-
- # pick up setup-dir .egg files only: no .egg-info
- cmd.package_index.scan(glob.glob('*.egg'))
-
- self.run_command('bdist_egg')
- args = [self.distribution.get_command_obj('bdist_egg').egg_output]
-
- if setuptools.bootstrap_install_from:
- # Bootstrap self-installation of setuptools
- args.insert(0, setuptools.bootstrap_install_from)
-
- cmd.args = args
- cmd.run()
- setuptools.bootstrap_install_from = None
-
-
-# XXX Python 3.1 doesn't see _nc if this is inside the class
-install.sub_commands = (
- [cmd for cmd in orig.install.sub_commands if cmd[0] not in install._nc] +
- install.new_commands
-)
diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/install_egg_info.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/install_egg_info.py
deleted file mode 100644
index fd0f118..0000000
--- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/install_egg_info.py
+++ /dev/null
@@ -1,116 +0,0 @@
-from distutils import log, dir_util
-import os
-
-from setuptools import Command
-from setuptools.archive_util import unpack_archive
-import pkg_resources
-
-
-class install_egg_info(Command):
- """Install an .egg-info directory for the package"""
-
- description = "Install an .egg-info directory for the package"
-
- user_options = [
- ('install-dir=', 'd', "directory to install to"),
- ]
-
- def initialize_options(self):
- self.install_dir = None
-
- def finalize_options(self):
- self.set_undefined_options('install_lib',
- ('install_dir', 'install_dir'))
- ei_cmd = self.get_finalized_command("egg_info")
- basename = pkg_resources.Distribution(
- None, None, ei_cmd.egg_name, ei_cmd.egg_version
- ).egg_name() + '.egg-info'
- self.source = ei_cmd.egg_info
- self.target = os.path.join(self.install_dir, basename)
- self.outputs = [self.target]
-
- def run(self):
- self.run_command('egg_info')
- if os.path.isdir(self.target) and not os.path.islink(self.target):
- dir_util.remove_tree(self.target, dry_run=self.dry_run)
- elif os.path.exists(self.target):
- self.execute(os.unlink, (self.target,), "Removing " + self.target)
- if not self.dry_run:
- pkg_resources.ensure_directory(self.target)
- self.execute(
- self.copytree, (), "Copying %s to %s" % (self.source, self.target)
- )
- self.install_namespaces()
-
- def get_outputs(self):
- return self.outputs
-
- def copytree(self):
- # Copy the .egg-info tree to site-packages
- def skimmer(src, dst):
- # filter out source-control directories; note that 'src' is always
- # a '/'-separated path, regardless of platform. 'dst' is a
- # platform-specific path.
- for skip in '.svn/', 'CVS/':
- if src.startswith(skip) or '/' + skip in src:
- return None
- self.outputs.append(dst)
- log.debug("Copying %s to %s", src, dst)
- return dst
-
- unpack_archive(self.source, self.target, skimmer)
-
- def install_namespaces(self):
- nsp = self._get_all_ns_packages()
- if not nsp:
- return
- filename, ext = os.path.splitext(self.target)
- filename += '-nspkg.pth'
- self.outputs.append(filename)
- log.info("Installing %s", filename)
- lines = map(self._gen_nspkg_line, nsp)
-
- if self.dry_run:
- # always generate the lines, even in dry run
- list(lines)
- return
-
- with open(filename, 'wt') as f:
- f.writelines(lines)
-
- _nspkg_tmpl = (
- "import sys, types, os",
- "p = os.path.join(sys._getframe(1).f_locals['sitedir'], *%(pth)r)",
- "ie = os.path.exists(os.path.join(p,'__init__.py'))",
- "m = not ie and "
- "sys.modules.setdefault(%(pkg)r, types.ModuleType(%(pkg)r))",
- "mp = (m or []) and m.__dict__.setdefault('__path__',[])",
- "(p not in mp) and mp.append(p)",
- )
- "lines for the namespace installer"
-
- _nspkg_tmpl_multi = (
- 'm and setattr(sys.modules[%(parent)r], %(child)r, m)',
- )
- "additional line(s) when a parent package is indicated"
-
- @classmethod
- def _gen_nspkg_line(cls, pkg):
- # ensure pkg is not a unicode string under Python 2.7
- pkg = str(pkg)
- pth = tuple(pkg.split('.'))
- tmpl_lines = cls._nspkg_tmpl
- parent, sep, child = pkg.rpartition('.')
- if parent:
- tmpl_lines += cls._nspkg_tmpl_multi
- return ';'.join(tmpl_lines) % locals() + '\n'
-
- def _get_all_ns_packages(self):
- """Return sorted list of all package namespaces"""
- nsp = set()
- for pkg in self.distribution.namespace_packages or []:
- pkg = pkg.split('.')
- while pkg:
- nsp.add('.'.join(pkg))
- pkg.pop()
- return sorted(nsp)
diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/install_lib.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/install_lib.py
deleted file mode 100644
index 9b77222..0000000
--- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/install_lib.py
+++ /dev/null
@@ -1,118 +0,0 @@
-import os
-import imp
-from itertools import product, starmap
-import distutils.command.install_lib as orig
-
-class install_lib(orig.install_lib):
- """Don't add compiled flags to filenames of non-Python files"""
-
- def run(self):
- self.build()
- outfiles = self.install()
- if outfiles is not None:
- # always compile, in case we have any extension stubs to deal with
- self.byte_compile(outfiles)
-
- def get_exclusions(self):
- """
- Return a collections.Sized collections.Container of paths to be
- excluded for single_version_externally_managed installations.
- """
- all_packages = (
- pkg
- for ns_pkg in self._get_SVEM_NSPs()
- for pkg in self._all_packages(ns_pkg)
- )
-
- excl_specs = product(all_packages, self._gen_exclusion_paths())
- return set(starmap(self._exclude_pkg_path, excl_specs))
-
- def _exclude_pkg_path(self, pkg, exclusion_path):
- """
- Given a package name and exclusion path within that package,
- compute the full exclusion path.
- """
- parts = pkg.split('.') + [exclusion_path]
- return os.path.join(self.install_dir, *parts)
-
- @staticmethod
- def _all_packages(pkg_name):
- """
- >>> list(install_lib._all_packages('foo.bar.baz'))
- ['foo.bar.baz', 'foo.bar', 'foo']
- """
- while pkg_name:
- yield pkg_name
- pkg_name, sep, child = pkg_name.rpartition('.')
-
- def _get_SVEM_NSPs(self):
- """
- Get namespace packages (list) but only for
- single_version_externally_managed installations and empty otherwise.
- """
- # TODO: is it necessary to short-circuit here? i.e. what's the cost
- # if get_finalized_command is called even when namespace_packages is
- # False?
- if not self.distribution.namespace_packages:
- return []
-
- install_cmd = self.get_finalized_command('install')
- svem = install_cmd.single_version_externally_managed
-
- return self.distribution.namespace_packages if svem else []
-
- @staticmethod
- def _gen_exclusion_paths():
- """
- Generate file paths to be excluded for namespace packages (bytecode
- cache files).
- """
- # always exclude the package module itself
- yield '__init__.py'
-
- yield '__init__.pyc'
- yield '__init__.pyo'
-
- if not hasattr(imp, 'get_tag'):
- return
-
- base = os.path.join('__pycache__', '__init__.' + imp.get_tag())
- yield base + '.pyc'
- yield base + '.pyo'
-
- def copy_tree(
- self, infile, outfile,
- preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1
- ):
- assert preserve_mode and preserve_times and not preserve_symlinks
- exclude = self.get_exclusions()
-
- if not exclude:
- return orig.install_lib.copy_tree(self, infile, outfile)
-
- # Exclude namespace package __init__.py* files from the output
-
- from setuptools.archive_util import unpack_directory
- from distutils import log
-
- outfiles = []
-
- def pf(src, dst):
- if dst in exclude:
- log.warn("Skipping installation of %s (namespace package)",
- dst)
- return False
-
- log.info("copying %s -> %s", src, os.path.dirname(dst))
- outfiles.append(dst)
- return dst
-
- unpack_directory(infile, outfile, pf)
- return outfiles
-
- def get_outputs(self):
- outputs = orig.install_lib.get_outputs(self)
- exclude = self.get_exclusions()
- if exclude:
- return [f for f in outputs if f not in exclude]
- return outputs
diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/install_scripts.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/install_scripts.py
deleted file mode 100644
index be66cb2..0000000
--- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/install_scripts.py
+++ /dev/null
@@ -1,60 +0,0 @@
-from distutils import log
-import distutils.command.install_scripts as orig
-import os
-
-from pkg_resources import Distribution, PathMetadata, ensure_directory
-
-
-class install_scripts(orig.install_scripts):
- """Do normal script install, plus any egg_info wrapper scripts"""
-
- def initialize_options(self):
- orig.install_scripts.initialize_options(self)
- self.no_ep = False
-
- def run(self):
- import setuptools.command.easy_install as ei
-
- self.run_command("egg_info")
- if self.distribution.scripts:
- orig.install_scripts.run(self) # run first to set up self.outfiles
- else:
- self.outfiles = []
- if self.no_ep:
- # don't install entry point scripts into .egg file!
- return
-
- ei_cmd = self.get_finalized_command("egg_info")
- dist = Distribution(
- ei_cmd.egg_base, PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info),
- ei_cmd.egg_name, ei_cmd.egg_version,
- )
- bs_cmd = self.get_finalized_command('build_scripts')
- exec_param = getattr(bs_cmd, 'executable', None)
- bw_cmd = self.get_finalized_command("bdist_wininst")
- is_wininst = getattr(bw_cmd, '_is_running', False)
- writer = ei.ScriptWriter
- if is_wininst:
- exec_param = "python.exe"
- writer = ei.WindowsScriptWriter
- # resolve the writer to the environment
- writer = writer.best()
- cmd = writer.command_spec_class.best().from_param(exec_param)
- for args in writer.get_args(dist, cmd.as_header()):
- self.write_script(*args)
-
- def write_script(self, script_name, contents, mode="t", *ignored):
- """Write an executable file to the scripts directory"""
- from setuptools.command.easy_install import chmod, current_umask
-
- log.info("Installing %s script to %s", script_name, self.install_dir)
- target = os.path.join(self.install_dir, script_name)
- self.outfiles.append(target)
-
- mask = current_umask()
- if not self.dry_run:
- ensure_directory(target)
- f = open(target, "w" + mode)
- f.write(contents)
- f.close()
- chmod(target, 0o777 - mask)
diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/launcher manifest.xml b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/launcher manifest.xml
deleted file mode 100644
index 5972a96..0000000
--- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/launcher manifest.xml
+++ /dev/null
@@ -1,15 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
-<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
- <assemblyIdentity version="1.0.0.0"
- processorArchitecture="X86"
- name="%(name)s"
- type="win32"/>
- <!-- Identify the application security requirements. -->
- <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
- <security>
- <requestedPrivileges>
- <requestedExecutionLevel level="asInvoker" uiAccess="false"/>
- </requestedPrivileges>
- </security>
- </trustInfo>
-</assembly>
diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/register.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/register.py
deleted file mode 100644
index 8d6336a..0000000
--- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/register.py
+++ /dev/null
@@ -1,10 +0,0 @@
-import distutils.command.register as orig
-
-
-class register(orig.register):
- __doc__ = orig.register.__doc__
-
- def run(self):
- # Make sure that we are using valid current name/version info
- self.run_command('egg_info')
- orig.register.run(self)
diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/rotate.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/rotate.py
deleted file mode 100644
index 1b07362..0000000
--- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/rotate.py
+++ /dev/null
@@ -1,61 +0,0 @@
-from distutils.util import convert_path
-from distutils import log
-from distutils.errors import DistutilsOptionError
-import os
-
-from setuptools import Command
-from setuptools.compat import basestring
-
-
-class rotate(Command):
- """Delete older distributions"""
-
- description = "delete older distributions, keeping N newest files"
- user_options = [
- ('match=', 'm', "patterns to match (required)"),
- ('dist-dir=', 'd', "directory where the distributions are"),
- ('keep=', 'k', "number of matching distributions to keep"),
- ]
-
- boolean_options = []
-
- def initialize_options(self):
- self.match = None
- self.dist_dir = None
- self.keep = None
-
- def finalize_options(self):
- if self.match is None:
- raise DistutilsOptionError(
- "Must specify one or more (comma-separated) match patterns "
- "(e.g. '.zip' or '.egg')"
- )
- if self.keep is None:
- raise DistutilsOptionError("Must specify number of files to keep")
- try:
- self.keep = int(self.keep)
- except ValueError:
- raise DistutilsOptionError("--keep must be an integer")
- if isinstance(self.match, basestring):
- self.match = [
- convert_path(p.strip()) for p in self.match.split(',')
- ]
- self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
-
- def run(self):
- self.run_command("egg_info")
- from glob import glob
-
- for pattern in self.match:
- pattern = self.distribution.get_name() + '*' + pattern
- files = glob(os.path.join(self.dist_dir, pattern))
- files = [(os.path.getmtime(f), f) for f in files]
- files.sort()
- files.reverse()
-
- log.info("%d file(s) matching %s", len(files), pattern)
- files = files[self.keep:]
- for (t, f) in files:
- log.info("Deleting %s", f)
- if not self.dry_run:
- os.unlink(f)
diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/saveopts.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/saveopts.py
deleted file mode 100644
index 611cec5..0000000
--- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/saveopts.py
+++ /dev/null
@@ -1,22 +0,0 @@
-from setuptools.command.setopt import edit_config, option_base
-
-
-class saveopts(option_base):
- """Save command-line options to a file"""
-
- description = "save supplied options to setup.cfg or other config file"
-
- def run(self):
- dist = self.distribution
- settings = {}
-
- for cmd in dist.command_options:
-
- if cmd == 'saveopts':
- continue # don't save our own options!
-
- for opt, (src, val) in dist.get_option_dict(cmd).items():
- if src == "command line":
- settings.setdefault(cmd, {})[opt] = val
-
- edit_config(self.filename, settings, self.dry_run)
diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/sdist.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/sdist.py
deleted file mode 100644
index 851a177..0000000
--- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/sdist.py
+++ /dev/null
@@ -1,197 +0,0 @@
-from glob import glob
-from distutils import log
-import distutils.command.sdist as orig
-import os
-import sys
-
-from setuptools.compat import PY3
-from setuptools.utils import cs_path_exists
-
-import pkg_resources
-
-READMES = 'README', 'README.rst', 'README.txt'
-
-_default_revctrl = list
-
-def walk_revctrl(dirname=''):
- """Find all files under revision control"""
- for ep in pkg_resources.iter_entry_points('setuptools.file_finders'):
- for item in ep.load()(dirname):
- yield item
-
-
-class sdist(orig.sdist):
- """Smart sdist that finds anything supported by revision control"""
-
- user_options = [
- ('formats=', None,
- "formats for source distribution (comma-separated list)"),
- ('keep-temp', 'k',
- "keep the distribution tree around after creating " +
- "archive file(s)"),
- ('dist-dir=', 'd',
- "directory to put the source distribution archive(s) in "
- "[default: dist]"),
- ]
-
- negative_opt = {}
-
- def run(self):
- self.run_command('egg_info')
- ei_cmd = self.get_finalized_command('egg_info')
- self.filelist = ei_cmd.filelist
- self.filelist.append(os.path.join(ei_cmd.egg_info, 'SOURCES.txt'))
- self.check_readme()
-
- # Run sub commands
- for cmd_name in self.get_sub_commands():
- self.run_command(cmd_name)
-
- # Call check_metadata only if no 'check' command
- # (distutils <= 2.6)
- import distutils.command
-
- if 'check' not in distutils.command.__all__:
- self.check_metadata()
-
- self.make_distribution()
-
- dist_files = getattr(self.distribution, 'dist_files', [])
- for file in self.archive_files:
- data = ('sdist', '', file)
- if data not in dist_files:
- dist_files.append(data)
-
- def __read_template_hack(self):
- # This grody hack closes the template file (MANIFEST.in) if an
- # exception occurs during read_template.
- # Doing so prevents an error when easy_install attempts to delete the
- # file.
- try:
- orig.sdist.read_template(self)
- except:
- _, _, tb = sys.exc_info()
- tb.tb_next.tb_frame.f_locals['template'].close()
- raise
-
- # Beginning with Python 2.7.2, 3.1.4, and 3.2.1, this leaky file handle
- # has been fixed, so only override the method if we're using an earlier
- # Python.
- has_leaky_handle = (
- sys.version_info < (2, 7, 2)
- or (3, 0) <= sys.version_info < (3, 1, 4)
- or (3, 2) <= sys.version_info < (3, 2, 1)
- )
- if has_leaky_handle:
- read_template = __read_template_hack
-
- def add_defaults(self):
- standards = [READMES,
- self.distribution.script_name]
- for fn in standards:
- if isinstance(fn, tuple):
- alts = fn
- got_it = 0
- for fn in alts:
- if cs_path_exists(fn):
- got_it = 1
- self.filelist.append(fn)
- break
-
- if not got_it:
- self.warn("standard file not found: should have one of " +
- ', '.join(alts))
- else:
- if cs_path_exists(fn):
- self.filelist.append(fn)
- else:
- self.warn("standard file '%s' not found" % fn)
-
- optional = ['test/test*.py', 'setup.cfg']
- for pattern in optional:
- files = list(filter(cs_path_exists, glob(pattern)))
- if files:
- self.filelist.extend(files)
-
- # getting python files
- if self.distribution.has_pure_modules():
- build_py = self.get_finalized_command('build_py')
- self.filelist.extend(build_py.get_source_files())
- # This functionality is incompatible with include_package_data, and
- # will in fact create an infinite recursion if include_package_data
- # is True. Use of include_package_data will imply that
- # distutils-style automatic handling of package_data is disabled
- if not self.distribution.include_package_data:
- for _, src_dir, _, filenames in build_py.data_files:
- self.filelist.extend([os.path.join(src_dir, filename)
- for filename in filenames])
-
- if self.distribution.has_ext_modules():
- build_ext = self.get_finalized_command('build_ext')
- self.filelist.extend(build_ext.get_source_files())
-
- if self.distribution.has_c_libraries():
- build_clib = self.get_finalized_command('build_clib')
- self.filelist.extend(build_clib.get_source_files())
-
- if self.distribution.has_scripts():
- build_scripts = self.get_finalized_command('build_scripts')
- self.filelist.extend(build_scripts.get_source_files())
-
- def check_readme(self):
- for f in READMES:
- if os.path.exists(f):
- return
- else:
- self.warn(
- "standard file not found: should have one of " +
- ', '.join(READMES)
- )
-
- def make_release_tree(self, base_dir, files):
- orig.sdist.make_release_tree(self, base_dir, files)
-
- # Save any egg_info command line options used to create this sdist
- dest = os.path.join(base_dir, 'setup.cfg')
- if hasattr(os, 'link') and os.path.exists(dest):
- # unlink and re-copy, since it might be hard-linked, and
- # we don't want to change the source version
- os.unlink(dest)
- self.copy_file('setup.cfg', dest)
-
- self.get_finalized_command('egg_info').save_version_info(dest)
-
- def _manifest_is_not_generated(self):
- # check for special comment used in 2.7.1 and higher
- if not os.path.isfile(self.manifest):
- return False
-
- fp = open(self.manifest, 'rbU')
- try:
- first_line = fp.readline()
- finally:
- fp.close()
- return (first_line !=
- '# file GENERATED by distutils, do NOT edit\n'.encode())
-
- def read_manifest(self):
- """Read the manifest file (named by 'self.manifest') and use it to
- fill in 'self.filelist', the list of files to include in the source
- distribution.
- """
- log.info("reading manifest file '%s'", self.manifest)
- manifest = open(self.manifest, 'rbU')
- for line in manifest:
- # The manifest must contain UTF-8. See #303.
- if PY3:
- try:
- line = line.decode('UTF-8')
- except UnicodeDecodeError:
- log.warn("%r not UTF-8 decodable -- skipping" % line)
- continue
- # ignore comments and blank lines
- line = line.strip()
- if line.startswith('#') or not line:
- continue
- self.filelist.append(line)
- manifest.close()
diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/setopt.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/setopt.py
deleted file mode 100644
index a04d603..0000000
--- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/setopt.py
+++ /dev/null
@@ -1,150 +0,0 @@
-from distutils.util import convert_path
-from distutils import log
-from distutils.errors import DistutilsOptionError
-import distutils
-import os
-
-from setuptools import Command
-
-
-__all__ = ['config_file', 'edit_config', 'option_base', 'setopt']
-
-
-def config_file(kind="local"):
- """Get the filename of the distutils, local, global, or per-user config
-
- `kind` must be one of "local", "global", or "user"
- """
- if kind == 'local':
- return 'setup.cfg'
- if kind == 'global':
- return os.path.join(
- os.path.dirname(distutils.__file__), 'distutils.cfg'
- )
- if kind == 'user':
- dot = os.name == 'posix' and '.' or ''
- return os.path.expanduser(convert_path("~/%spydistutils.cfg" % dot))
- raise ValueError(
- "config_file() type must be 'local', 'global', or 'user'", kind
- )
-
-
-def edit_config(filename, settings, dry_run=False):
- """Edit a configuration file to include `settings`
-
- `settings` is a dictionary of dictionaries or ``None`` values, keyed by
- command/section name. A ``None`` value means to delete the entire section,
- while a dictionary lists settings to be changed or deleted in that section.
- A setting of ``None`` means to delete that setting.
- """
- from setuptools.compat import ConfigParser
-
- log.debug("Reading configuration from %s", filename)
- opts = ConfigParser.RawConfigParser()
- opts.read([filename])
- for section, options in settings.items():
- if options is None:
- log.info("Deleting section [%s] from %s", section, filename)
- opts.remove_section(section)
- else:
- if not opts.has_section(section):
- log.debug("Adding new section [%s] to %s", section, filename)
- opts.add_section(section)
- for option, value in options.items():
- if value is None:
- log.debug(
- "Deleting %s.%s from %s",
- section, option, filename
- )
- opts.remove_option(section, option)
- if not opts.options(section):
- log.info("Deleting empty [%s] section from %s",
- section, filename)
- opts.remove_section(section)
- else:
- log.debug(
- "Setting %s.%s to %r in %s",
- section, option, value, filename
- )
- opts.set(section, option, value)
-
- log.info("Writing %s", filename)
- if not dry_run:
- with open(filename, 'w') as f:
- opts.write(f)
-
-
-class option_base(Command):
- """Abstract base class for commands that mess with config files"""
-
- user_options = [
- ('global-config', 'g',
- "save options to the site-wide distutils.cfg file"),
- ('user-config', 'u',
- "save options to the current user's pydistutils.cfg file"),
- ('filename=', 'f',
- "configuration file to use (default=setup.cfg)"),
- ]
-
- boolean_options = [
- 'global-config', 'user-config',
- ]
-
- def initialize_options(self):
- self.global_config = None
- self.user_config = None
- self.filename = None
-
- def finalize_options(self):
- filenames = []
- if self.global_config:
- filenames.append(config_file('global'))
- if self.user_config:
- filenames.append(config_file('user'))
- if self.filename is not None:
- filenames.append(self.filename)
- if not filenames:
- filenames.append(config_file('local'))
- if len(filenames) > 1:
- raise DistutilsOptionError(
- "Must specify only one configuration file option",
- filenames
- )
- self.filename, = filenames
-
-
-class setopt(option_base):
- """Save command-line options to a file"""
-
- description = "set an option in setup.cfg or another config file"
-
- user_options = [
- ('command=', 'c', 'command to set an option for'),
- ('option=', 'o', 'option to set'),
- ('set-value=', 's', 'value of the option'),
- ('remove', 'r', 'remove (unset) the value'),
- ] + option_base.user_options
-
- boolean_options = option_base.boolean_options + ['remove']
-
- def initialize_options(self):
- option_base.initialize_options(self)
- self.command = None
- self.option = None
- self.set_value = None
- self.remove = None
-
- def finalize_options(self):
- option_base.finalize_options(self)
- if self.command is None or self.option is None:
- raise DistutilsOptionError("Must specify --command *and* --option")
- if self.set_value is None and not self.remove:
- raise DistutilsOptionError("Must specify --set-value or --remove")
-
- def run(self):
- edit_config(
- self.filename, {
- self.command: {self.option.replace('-', '_'): self.set_value}
- },
- self.dry_run
- )
diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/test.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/test.py
deleted file mode 100644
index 42689f7..0000000
--- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/test.py
+++ /dev/null
@@ -1,175 +0,0 @@
-from distutils.errors import DistutilsOptionError
-from unittest import TestLoader
-import unittest
-import sys
-
-from pkg_resources import (resource_listdir, resource_exists, normalize_path,
- working_set, _namespace_packages,
- add_activation_listener, require, EntryPoint)
-from setuptools import Command
-from setuptools.compat import PY3
-from setuptools.py31compat import unittest_main
-
-
-class ScanningLoader(TestLoader):
- def loadTestsFromModule(self, module):
- """Return a suite of all tests cases contained in the given module
-
- If the module is a package, load tests from all the modules in it.
- If the module has an ``additional_tests`` function, call it and add
- the return value to the tests.
- """
- tests = []
- tests.append(TestLoader.loadTestsFromModule(self, module))
-
- if hasattr(module, "additional_tests"):
- tests.append(module.additional_tests())
-
- if hasattr(module, '__path__'):
- for file in resource_listdir(module.__name__, ''):
- if file.endswith('.py') and file != '__init__.py':
- submodule = module.__name__ + '.' + file[:-3]
- else:
- if resource_exists(module.__name__, file + '/__init__.py'):
- submodule = module.__name__ + '.' + file
- else:
- continue
- tests.append(self.loadTestsFromName(submodule))
-
- if len(tests) != 1:
- return self.suiteClass(tests)
- else:
- return tests[0] # don't create a nested suite for only one return
-
-
-class test(Command):
- """Command to run unit tests after in-place build"""
-
- description = "run unit tests after in-place build"
-
- user_options = [
- ('test-module=', 'm', "Run 'test_suite' in specified module"),
- ('test-suite=', 's',
- "Test suite to run (e.g. 'some_module.test_suite')"),
- ('test-runner=', 'r', "Test runner to use"),
- ]
-
- def initialize_options(self):
- self.test_suite = None
- self.test_module = None
- self.test_loader = None
- self.test_runner = None
-
- def finalize_options(self):
-
- if self.test_suite is None:
- if self.test_module is None:
- self.test_suite = self.distribution.test_suite
- else:
- self.test_suite = self.test_module + ".test_suite"
- elif self.test_module:
- raise DistutilsOptionError(
- "You may specify a module or a suite, but not both"
- )
-
- self.test_args = [self.test_suite]
-
- if self.verbose:
- self.test_args.insert(0, '--verbose')
- if self.test_loader is None:
- self.test_loader = getattr(self.distribution, 'test_loader', None)
- if self.test_loader is None:
- self.test_loader = "setuptools.command.test:ScanningLoader"
- if self.test_runner is None:
- self.test_runner = getattr(self.distribution, 'test_runner', None)
-
- def with_project_on_sys_path(self, func):
- with_2to3 = PY3 and getattr(self.distribution, 'use_2to3', False)
-
- if with_2to3:
- # If we run 2to3 we can not do this inplace:
-
- # Ensure metadata is up-to-date
- self.reinitialize_command('build_py', inplace=0)
- self.run_command('build_py')
- bpy_cmd = self.get_finalized_command("build_py")
- build_path = normalize_path(bpy_cmd.build_lib)
-
- # Build extensions
- self.reinitialize_command('egg_info', egg_base=build_path)
- self.run_command('egg_info')
-
- self.reinitialize_command('build_ext', inplace=0)
- self.run_command('build_ext')
- else:
- # Without 2to3 inplace works fine:
- self.run_command('egg_info')
-
- # Build extensions in-place
- self.reinitialize_command('build_ext', inplace=1)
- self.run_command('build_ext')
-
- ei_cmd = self.get_finalized_command("egg_info")
-
- old_path = sys.path[:]
- old_modules = sys.modules.copy()
-
- try:
- sys.path.insert(0, normalize_path(ei_cmd.egg_base))
- working_set.__init__()
- add_activation_listener(lambda dist: dist.activate())
- require('%s==%s' % (ei_cmd.egg_name, ei_cmd.egg_version))
- func()
- finally:
- sys.path[:] = old_path
- sys.modules.clear()
- sys.modules.update(old_modules)
- working_set.__init__()
-
- def run(self):
- if self.distribution.install_requires:
- self.distribution.fetch_build_eggs(
- self.distribution.install_requires)
- if self.distribution.tests_require:
- self.distribution.fetch_build_eggs(self.distribution.tests_require)
-
- if self.test_suite:
- cmd = ' '.join(self.test_args)
- if self.dry_run:
- self.announce('skipping "unittest %s" (dry run)' % cmd)
- else:
- self.announce('running "unittest %s"' % cmd)
- self.with_project_on_sys_path(self.run_tests)
-
- def run_tests(self):
- # Purge modules under test from sys.modules. The test loader will
- # re-import them from the build location. Required when 2to3 is used
- # with namespace packages.
- if PY3 and getattr(self.distribution, 'use_2to3', False):
- module = self.test_args[-1].split('.')[0]
- if module in _namespace_packages:
- del_modules = []
- if module in sys.modules:
- del_modules.append(module)
- module += '.'
- for name in sys.modules:
- if name.startswith(module):
- del_modules.append(name)
- list(map(sys.modules.__delitem__, del_modules))
-
- unittest_main(
- None, None, [unittest.__file__] + self.test_args,
- testLoader=self._resolve_as_ep(self.test_loader),
- testRunner=self._resolve_as_ep(self.test_runner),
- )
-
- @staticmethod
- def _resolve_as_ep(val):
- """
- Load the indicated attribute value, called, as a as if it were
- specified as an entry point.
- """
- if val is None:
- return
- parsed = EntryPoint.parse("x=" + val)
- return parsed.resolve()()
diff --git a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/upload_docs.py b/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/upload_docs.py
deleted file mode 100644
index 001ee93..0000000
--- a/jython-tosca-parser/src/main/resources/Lib/site-packages/setuptools/command/upload_docs.py
+++ /dev/null
@@ -1,190 +0,0 @@
-# -*- coding: utf-8 -*-
-"""upload_docs
-
-Implements a Distutils 'upload_docs' subcommand (upload documentation to
-PyPI's pythonhosted.org).
-"""
-
-from base64 import standard_b64encode
-from distutils import log
-from distutils.errors import DistutilsOptionError
-from distutils.command.upload import upload
-import os
-import socket
-import zipfile
-import tempfile
-import sys
-import shutil
-
-from setuptools.compat import httplib, urlparse, unicode, iteritems, PY3
-from pkg_resources import iter_entry_points
-
-
-errors = 'surrogateescape' if PY3 else 'strict'
-
-
-# This is not just a replacement for byte literals
-# but works as a general purpose encoder
-def b(s, encoding='utf-8'):
- if isinstance(s, unicode):
- return s.encode(encoding, errors)
- return s
-
-
-class upload_docs(upload):
- description = 'Upload documentation to PyPI'
-
- user_options = [
- ('repository=', 'r',
- "url of repository [default: %s]" % upload.DEFAULT_REPOSITORY),
- ('show-response', None,
- 'display full response text from server'),
- ('upload-dir=', None, 'directory to upload'),
- ]
- boolean_options = upload.boolean_options
-
- def has_sphinx(self):
- if self.upload_dir is None:
- for ep in iter_entry_points('distutils.commands', 'build_sphinx'):
- return True
-
- sub_commands = [('build_sphinx', has_sphinx)]
-
- def initialize_options(self):
- upload.initialize_options(self)
- self.upload_dir = None
- self.target_dir = None
-
- def finalize_options(self):
- upload.finalize_options(self)
- if self.upload_dir is None:
- if self.has_sphinx():
- build_sphinx = self.get_finalized_command('build_sphinx')
- self.target_dir = build_sphinx.builder_target_dir
- else:
- build = self.get_finalized_command('build')
- self.target_dir = os.path.join(build.build_base, 'docs')
- else:
- self.ensure_dirname('upload_dir')
- self.target_dir = self.upload_dir
- self.announce('Using upload directory %s' % self.target_dir)
-
- def create_zipfile(self, filename):
- zip_file = zipfile.ZipFile(filename, "w")
- try:
- self.mkpath(self.target_dir) # just in case
- for root, dirs, files in os.walk(self.target_dir):
- if root == self.target_dir and not files:
- raise DistutilsOptionError(
- "no files found in upload directory '%s'"
- % self.target_dir)
- for name in files:
- full = os.path.join(root, name)
- relative = root[len(self.target_dir):].lstrip(os.path.sep)
- dest = os.path.join(relative, name)
- zip_file.write(full, dest)
- finally:
- zip_file.close()
-
- def run(self):
- # Run sub commands
- for cmd_name in self.get_sub_commands():
- self.run_command(cmd_name)
-
- tmp_dir = tempfile.mkdtemp()
- name = self.distribution.metadata.get_name()
- zip_file = os.path.join(tmp_dir, "%s.zip" % name)
- try:
- self.create_zipfile(zip_file)
- self.upload_file(zip_file)
- finally:
- shutil.rmtree(tmp_dir)
-
- def upload_file(self, filename):
- f = open(filename, 'rb')
- content = f.read()
- f.close()
- meta = self.distribution.metadata
- data = {
- ':action': 'doc_upload',
- 'name': meta.get_name(),
- 'content': (os.path.basename(filename), content),
- }
- # set up the authentication
- credentials = b(self.username + ':' + self.password)
- credentials = standard_b64encode(credentials)
- if PY3:
- credentials = credentials.decode('ascii')
- auth = "Basic " + credentials
-
- # Build up the MIME payload for the POST data
- boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
- sep_boundary = b('\n--') + b(boundary)
- end_boundary = sep_boundary + b('--')
- body = []
- for key, values in iteritems(data):
- title = '\nContent-Disposition: form-data; name="%s"' % key
- # handle multiple entries for the same name
- if not isinstance(values, list):
- values = [values]
- for value in values:
- if type(value) is tuple:
- title += '; filename="%s"' % value[0]
- value = value[1]
- else:
- value = b(value)
- body.append(sep_boundary)
- body.append(b(title))
- body.append(b("\n\n"))
- body.append(value)
- if value and value[-1:] == b('\r'):
- body.append(b('\n')) # write an extra newline (lurve Macs)
- body.append(end_boundary)
- body.append(b("\n"))
- body = b('').join(body)
-
- self.announce("Submitting documentation to %s" % (self.repository),
- log.INFO)
-
- # build the Request
- # We can't use urllib2 since we need to send the Basic
- # auth right with the first request
- schema, netloc, url, params, query, fragments = \
- urlparse(self.repository)
- assert not params and not query and not fragments
- if schema == 'http':
- conn = httplib.HTTPConnection(netloc)
- elif schema == 'https':
- conn = httplib.HTTPSConnection(netloc)
- else:
- raise AssertionError("unsupported schema " + schema)
-
- data = ''
- try:
- conn.connect()
- conn.putrequest("POST", url)
- content_type = 'multipart/form-data; boundary=%s' % boundary
- conn.putheader('Content-type', content_type)
- conn.putheader('Content-length', str(len(body)))
- conn.putheader('Authorization', auth)
- conn.endheaders()
- conn.send(body)
- except socket.error as e:
- self.announce(str(e), log.ERROR)
- return
-
- r = conn.getresponse()
- if r.status == 200:
- self.announce('Server response (%s): %s' % (r.status, r.reason),
- log.INFO)
- elif r.status == 301:
- location = r.getheader('Location')
- if location is None:
- location = 'https://pythonhosted.org/%s/' % meta.get_name()
- self.announce('Upload successful. Visit %s' % location,
- log.INFO)
- else:
- self.announce('Upload failed (%s): %s' % (r.status, r.reason),
- log.ERROR)
- if self.show_response:
- print('-' * 75, r.read(), '-' * 75)