Updated script that can be controled by Nodejs web app

This commit is contained in:
mac OS
2024-11-25 12:24:18 +07:00
parent c440eda1f4
commit 8b0ab2bd3a
8662 changed files with 1803808 additions and 34 deletions

View File

@@ -0,0 +1,111 @@
"""Extensions to the 'distutils' for large or complex distributions"""
import os
import sys
import distutils.core
import distutils.filelist
from distutils.core import Command as _Command
from distutils.util import convert_path
from fnmatch import fnmatchcase
import setuptools.version
from setuptools.extension import Extension
from setuptools.dist import Distribution, Feature, _get_unpatched
from setuptools.depends import Require
__all__ = [
'setup', 'Distribution', 'Feature', 'Command', 'Extension', 'Require',
'find_packages'
]
__version__ = setuptools.version.__version__
bootstrap_install_from = None
# If we run 2to3 on .py files, should we also convert docstrings?
# Default: yes; assume that we can detect doctests reliably
run_2to3_on_doctests = True
# Standard package names for fixer packages
lib2to3_fixer_packages = ['lib2to3.fixes']
def find_packages(where='.', exclude=(), include=()):
"""Return a list all Python packages found within directory 'where'
'where' should be supplied as a "cross-platform" (i.e. URL-style) path; it
will be converted to the appropriate local path syntax. 'exclude' is a
sequence of package names to exclude; '*' can be used as a wildcard in the
names, such that 'foo.*' will exclude all subpackages of 'foo' (but not
'foo' itself).
'include' is a sequence of package names to include. If it's specified,
only the named packages will be included. If it's not specified, all found
packages will be included. 'include' can contain shell style wildcard
patterns just like 'exclude'.
The list of included packages is built up first and then any explicitly
excluded packages are removed from it.
"""
out = []
stack=[(convert_path(where), '')]
include = list(include)
exclude = list(exclude) + ['ez_setup', '*__pycache__']
while stack:
where,prefix = stack.pop(0)
for name in os.listdir(where):
fn = os.path.join(where,name)
looks_like_package = (
'.' not in name
and os.path.isdir(fn)
and os.path.isfile(os.path.join(fn, '__init__.py'))
)
if looks_like_package:
pkg_name = prefix + name
if (not include or
any(fnmatchcase(pkg_name, pat) for pat in include)):
out.append(pkg_name)
stack.append((fn, pkg_name + '.'))
for pat in exclude:
out = [item for item in out if not fnmatchcase(item,pat)]
return out
setup = distutils.core.setup
_Command = _get_unpatched(_Command)
class Command(_Command):
__doc__ = _Command.__doc__
command_consumes_arguments = False
def __init__(self, dist, **kw):
# Add support for keyword arguments
_Command.__init__(self,dist)
for k,v in kw.items():
setattr(self,k,v)
def reinitialize_command(self, command, reinit_subcommands=0, **kw):
cmd = _Command.reinitialize_command(self, command, reinit_subcommands)
for k,v in kw.items():
setattr(cmd,k,v) # update command with keywords
return cmd
distutils.core.Command = Command # we can't patch distutils.cmd, alas
def findall(dir = os.curdir):
"""Find all files under 'dir' and return the list of full filenames
(relative to 'dir').
"""
all_files = []
for base, dirs, files in os.walk(dir):
if base==os.curdir or base.startswith(os.curdir+os.sep):
base = base[2:]
if base:
files = [os.path.join(base, f) for f in files]
all_files.extend(filter(os.path.isfile, files))
return all_files
distutils.filelist.findall = findall # fix findall bug in distutils.
# sys.dont_write_bytecode was introduced in Python 2.6.
_dont_write_bytecode = getattr(sys, 'dont_write_bytecode',
bool(os.environ.get("PYTHONDONTWRITEBYTECODE")))

View File

@@ -0,0 +1,210 @@
"""Utilities for extracting common archive formats"""
__all__ = [
"unpack_archive", "unpack_zipfile", "unpack_tarfile", "default_filter",
"UnrecognizedFormat", "extraction_drivers", "unpack_directory",
]
import zipfile, tarfile, os, shutil, posixpath
from pkg_resources import ensure_directory
from distutils.errors import DistutilsError
class UnrecognizedFormat(DistutilsError):
"""Couldn't recognize the archive type"""
def default_filter(src,dst):
"""The default progress/filter callback; returns True for all files"""
return dst
def unpack_archive(filename, extract_dir, progress_filter=default_filter,
drivers=None
):
"""Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat``
`progress_filter` is a function taking two arguments: a source path
internal to the archive ('/'-separated), and a filesystem path where it
will be extracted. The callback must return the desired extract path
(which may be the same as the one passed in), or else ``None`` to skip
that file or directory. The callback can thus be used to report on the
progress of the extraction, as well as to filter the items extracted or
alter their extraction paths.
`drivers`, if supplied, must be a non-empty sequence of functions with the
same signature as this function (minus the `drivers` argument), that raise
``UnrecognizedFormat`` if they do not support extracting the designated
archive type. The `drivers` are tried in sequence until one is found that
does not raise an error, or until all are exhausted (in which case
``UnrecognizedFormat`` is raised). If you do not supply a sequence of
drivers, the module's ``extraction_drivers`` constant will be used, which
means that ``unpack_zipfile`` and ``unpack_tarfile`` will be tried, in that
order.
"""
for driver in drivers or extraction_drivers:
try:
driver(filename, extract_dir, progress_filter)
except UnrecognizedFormat:
continue
else:
return
else:
raise UnrecognizedFormat(
"Not a recognized archive type: %s" % filename
)
def unpack_directory(filename, extract_dir, progress_filter=default_filter):
""""Unpack" a directory, using the same interface as for archives
Raises ``UnrecognizedFormat`` if `filename` is not a directory
"""
if not os.path.isdir(filename):
raise UnrecognizedFormat("%s is not a directory" % (filename,))
paths = {filename:('',extract_dir)}
for base, dirs, files in os.walk(filename):
src,dst = paths[base]
for d in dirs:
paths[os.path.join(base,d)] = src+d+'/', os.path.join(dst,d)
for f in files:
name = src+f
target = os.path.join(dst,f)
target = progress_filter(src+f, target)
if not target:
continue # skip non-files
ensure_directory(target)
f = os.path.join(base,f)
shutil.copyfile(f, target)
shutil.copystat(f, target)
def unpack_zipfile(filename, extract_dir, progress_filter=default_filter):
"""Unpack zip `filename` to `extract_dir`
Raises ``UnrecognizedFormat`` if `filename` is not a zipfile (as determined
by ``zipfile.is_zipfile()``). See ``unpack_archive()`` for an explanation
of the `progress_filter` argument.
"""
if not zipfile.is_zipfile(filename):
raise UnrecognizedFormat("%s is not a zip file" % (filename,))
z = zipfile.ZipFile(filename)
try:
for info in z.infolist():
name = info.filename
# don't extract absolute paths or ones with .. in them
if name.startswith('/') or '..' in name.split('/'):
continue
target = os.path.join(extract_dir, *name.split('/'))
target = progress_filter(name, target)
if not target:
continue
if name.endswith('/'):
# directory
ensure_directory(target)
else:
# file
ensure_directory(target)
data = z.read(info.filename)
f = open(target,'wb')
try:
f.write(data)
finally:
f.close()
del data
unix_attributes = info.external_attr >> 16
if unix_attributes:
os.chmod(target, unix_attributes)
finally:
z.close()
def unpack_tarfile(filename, extract_dir, progress_filter=default_filter):
"""Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir`
Raises ``UnrecognizedFormat`` if `filename` is not a tarfile (as determined
by ``tarfile.open()``). See ``unpack_archive()`` for an explanation
of the `progress_filter` argument.
"""
try:
tarobj = tarfile.open(filename)
except tarfile.TarError:
raise UnrecognizedFormat(
"%s is not a compressed or uncompressed tar file" % (filename,)
)
try:
tarobj.chown = lambda *args: None # don't do any chowning!
for member in tarobj:
name = member.name
# don't extract absolute paths or ones with .. in them
if not name.startswith('/') and '..' not in name.split('/'):
prelim_dst = os.path.join(extract_dir, *name.split('/'))
# resolve any links and to extract the link targets as normal files
while member is not None and (member.islnk() or member.issym()):
linkpath = member.linkname
if member.issym():
linkpath = posixpath.join(posixpath.dirname(member.name), linkpath)
linkpath = posixpath.normpath(linkpath)
member = tarobj._getmember(linkpath)
if member is not None and (member.isfile() or member.isdir()):
final_dst = progress_filter(name, prelim_dst)
if final_dst:
if final_dst.endswith(os.sep):
final_dst = final_dst[:-1]
try:
tarobj._extract_member(member, final_dst) # XXX Ugh
except tarfile.ExtractError:
pass # chown/chmod/mkfifo/mknode/makedev failed
return True
finally:
tarobj.close()
extraction_drivers = unpack_directory, unpack_zipfile, unpack_tarfile

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@@ -0,0 +1,17 @@
__all__ = [
'alias', 'bdist_egg', 'bdist_rpm', 'build_ext', 'build_py', 'develop',
'easy_install', 'egg_info', 'install', 'install_lib', 'rotate', 'saveopts',
'sdist', 'setopt', 'test', 'install_egg_info', 'install_scripts',
'register', 'bdist_wininst', 'upload_docs',
]
from setuptools.command import install_scripts
import sys
from distutils.command.bdist import bdist
if 'egg' not in bdist.format_commands:
bdist.format_command['egg'] = ('bdist_egg', "Python .egg file")
bdist.format_commands.append('egg')
del bdist, sys

View File

@@ -0,0 +1,76 @@
from distutils.errors import DistutilsOptionError
from setuptools.command.setopt import edit_config, option_base, config_file
def shquote(arg):
"""Quote an argument for later parsing by shlex.split()"""
for c in '"', "'", "\\", "#":
if c in arg: return repr(arg)
if arg.split() != [arg]:
return repr(arg)
return arg
class alias(option_base):
"""Define a shortcut that invokes one or more commands"""
description = "define a shortcut to invoke one or more commands"
command_consumes_arguments = True
user_options = [
('remove', 'r', 'remove (unset) the alias'),
] + option_base.user_options
boolean_options = option_base.boolean_options + ['remove']
def initialize_options(self):
option_base.initialize_options(self)
self.args = None
self.remove = None
def finalize_options(self):
option_base.finalize_options(self)
if self.remove and len(self.args) != 1:
raise DistutilsOptionError(
"Must specify exactly one argument (the alias name) when "
"using --remove"
)
def run(self):
aliases = self.distribution.get_option_dict('aliases')
if not self.args:
print("Command Aliases")
print("---------------")
for alias in aliases:
print("setup.py alias", format_alias(alias, aliases))
return
elif len(self.args)==1:
alias, = self.args
if self.remove:
command = None
elif alias in aliases:
print("setup.py alias", format_alias(alias, aliases))
return
else:
print("No alias definition found for %r" % alias)
return
else:
alias = self.args[0]
command = ' '.join(map(shquote,self.args[1:]))
edit_config(self.filename, {'aliases': {alias:command}}, self.dry_run)
def format_alias(name, aliases):
source, command = aliases[name]
if source == config_file('global'):
source = '--global-config '
elif source == config_file('user'):
source = '--user-config '
elif source == config_file('local'):
source = ''
else:
source = '--filename=%r' % source
return source+name+' '+command

View File

@@ -0,0 +1,529 @@
"""setuptools.command.bdist_egg
Build .egg distributions"""
# This module should be kept compatible with Python 2.3
import sys, os, marshal
from setuptools import Command
from distutils.dir_util import remove_tree, mkpath
try:
# Python 2.7 or >=3.2
from sysconfig import get_path, get_python_version
def _get_purelib():
return get_path("purelib")
except ImportError:
from distutils.sysconfig import get_python_lib, get_python_version
def _get_purelib():
return get_python_lib(False)
from distutils import log
from distutils.errors import DistutilsSetupError
from pkg_resources import get_build_platform, Distribution, ensure_directory
from pkg_resources import EntryPoint
from types import CodeType
from setuptools.compat import basestring, next
from setuptools.extension import Library
def strip_module(filename):
if '.' in filename:
filename = os.path.splitext(filename)[0]
if filename.endswith('module'):
filename = filename[:-6]
return filename
def write_stub(resource, pyfile):
f = open(pyfile,'w')
f.write('\n'.join([
"def __bootstrap__():",
" global __bootstrap__, __loader__, __file__",
" import sys, pkg_resources, imp",
" __file__ = pkg_resources.resource_filename(__name__,%r)"
% resource,
" __loader__ = None; del __bootstrap__, __loader__",
" imp.load_dynamic(__name__,__file__)",
"__bootstrap__()",
"" # terminal \n
]))
f.close()
class bdist_egg(Command):
description = "create an \"egg\" distribution"
user_options = [
('bdist-dir=', 'b',
"temporary directory for creating the distribution"),
('plat-name=', 'p',
"platform name to embed in generated filenames "
"(default: %s)" % get_build_platform()),
('exclude-source-files', None,
"remove all .py files from the generated egg"),
('keep-temp', 'k',
"keep the pseudo-installation tree around after " +
"creating the distribution archive"),
('dist-dir=', 'd',
"directory to put final built distributions in"),
('skip-build', None,
"skip rebuilding everything (for testing/debugging)"),
]
boolean_options = [
'keep-temp', 'skip-build', 'exclude-source-files'
]
def initialize_options (self):
self.bdist_dir = None
self.plat_name = None
self.keep_temp = 0
self.dist_dir = None
self.skip_build = 0
self.egg_output = None
self.exclude_source_files = None
def finalize_options(self):
ei_cmd = self.ei_cmd = self.get_finalized_command("egg_info")
self.egg_info = ei_cmd.egg_info
if self.bdist_dir is None:
bdist_base = self.get_finalized_command('bdist').bdist_base
self.bdist_dir = os.path.join(bdist_base, 'egg')
if self.plat_name is None:
self.plat_name = get_build_platform()
self.set_undefined_options('bdist',('dist_dir', 'dist_dir'))
if self.egg_output is None:
# Compute filename of the output egg
basename = Distribution(
None, None, ei_cmd.egg_name, ei_cmd.egg_version,
get_python_version(),
self.distribution.has_ext_modules() and self.plat_name
).egg_name()
self.egg_output = os.path.join(self.dist_dir, basename+'.egg')
def do_install_data(self):
# Hack for packages that install data to install's --install-lib
self.get_finalized_command('install').install_lib = self.bdist_dir
site_packages = os.path.normcase(os.path.realpath(_get_purelib()))
old, self.distribution.data_files = self.distribution.data_files,[]
for item in old:
if isinstance(item,tuple) and len(item)==2:
if os.path.isabs(item[0]):
realpath = os.path.realpath(item[0])
normalized = os.path.normcase(realpath)
if normalized==site_packages or normalized.startswith(
site_packages+os.sep
):
item = realpath[len(site_packages)+1:], item[1]
# XXX else: raise ???
self.distribution.data_files.append(item)
try:
log.info("installing package data to %s" % self.bdist_dir)
self.call_command('install_data', force=0, root=None)
finally:
self.distribution.data_files = old
def get_outputs(self):
return [self.egg_output]
def call_command(self,cmdname,**kw):
"""Invoke reinitialized command `cmdname` with keyword args"""
for dirname in INSTALL_DIRECTORY_ATTRS:
kw.setdefault(dirname,self.bdist_dir)
kw.setdefault('skip_build',self.skip_build)
kw.setdefault('dry_run', self.dry_run)
cmd = self.reinitialize_command(cmdname, **kw)
self.run_command(cmdname)
return cmd
def run(self):
# Generate metadata first
self.run_command("egg_info")
# We run install_lib before install_data, because some data hacks
# pull their data path from the install_lib command.
log.info("installing library code to %s" % self.bdist_dir)
instcmd = self.get_finalized_command('install')
old_root = instcmd.root; instcmd.root = None
if self.distribution.has_c_libraries() and not self.skip_build:
self.run_command('build_clib')
cmd = self.call_command('install_lib', warn_dir=0)
instcmd.root = old_root
all_outputs, ext_outputs = self.get_ext_outputs()
self.stubs = []
to_compile = []
for (p,ext_name) in enumerate(ext_outputs):
filename,ext = os.path.splitext(ext_name)
pyfile = os.path.join(self.bdist_dir, strip_module(filename)+'.py')
self.stubs.append(pyfile)
log.info("creating stub loader for %s" % ext_name)
if not self.dry_run:
write_stub(os.path.basename(ext_name), pyfile)
to_compile.append(pyfile)
ext_outputs[p] = ext_name.replace(os.sep,'/')
if to_compile:
cmd.byte_compile(to_compile)
if self.distribution.data_files:
self.do_install_data()
# Make the EGG-INFO directory
archive_root = self.bdist_dir
egg_info = os.path.join(archive_root,'EGG-INFO')
self.mkpath(egg_info)
if self.distribution.scripts:
script_dir = os.path.join(egg_info, 'scripts')
log.info("installing scripts to %s" % script_dir)
self.call_command('install_scripts',install_dir=script_dir,no_ep=1)
self.copy_metadata_to(egg_info)
native_libs = os.path.join(egg_info, "native_libs.txt")
if all_outputs:
log.info("writing %s" % native_libs)
if not self.dry_run:
ensure_directory(native_libs)
libs_file = open(native_libs, 'wt')
libs_file.write('\n'.join(all_outputs))
libs_file.write('\n')
libs_file.close()
elif os.path.isfile(native_libs):
log.info("removing %s" % native_libs)
if not self.dry_run:
os.unlink(native_libs)
write_safety_flag(
os.path.join(archive_root,'EGG-INFO'), self.zip_safe()
)
if os.path.exists(os.path.join(self.egg_info,'depends.txt')):
log.warn(
"WARNING: 'depends.txt' will not be used by setuptools 0.6!\n"
"Use the install_requires/extras_require setup() args instead."
)
if self.exclude_source_files:
self.zap_pyfiles()
# Make the archive
make_zipfile(self.egg_output, archive_root, verbose=self.verbose,
dry_run=self.dry_run, mode=self.gen_header())
if not self.keep_temp:
remove_tree(self.bdist_dir, dry_run=self.dry_run)
# Add to 'Distribution.dist_files' so that the "upload" command works
getattr(self.distribution,'dist_files',[]).append(
('bdist_egg',get_python_version(),self.egg_output))
def zap_pyfiles(self):
log.info("Removing .py files from temporary directory")
for base,dirs,files in walk_egg(self.bdist_dir):
for name in files:
if name.endswith('.py'):
path = os.path.join(base,name)
log.debug("Deleting %s", path)
os.unlink(path)
def zip_safe(self):
safe = getattr(self.distribution,'zip_safe',None)
if safe is not None:
return safe
log.warn("zip_safe flag not set; analyzing archive contents...")
return analyze_egg(self.bdist_dir, self.stubs)
def gen_header(self):
epm = EntryPoint.parse_map(self.distribution.entry_points or '')
ep = epm.get('setuptools.installation',{}).get('eggsecutable')
if ep is None:
return 'w' # not an eggsecutable, do it the usual way.
if not ep.attrs or ep.extras:
raise DistutilsSetupError(
"eggsecutable entry point (%r) cannot have 'extras' "
"or refer to a module" % (ep,)
)
pyver = sys.version[:3]
pkg = ep.module_name
full = '.'.join(ep.attrs)
base = ep.attrs[0]
basename = os.path.basename(self.egg_output)
header = (
"#!/bin/sh\n"
'if [ `basename $0` = "%(basename)s" ]\n'
'then exec python%(pyver)s -c "'
"import sys, os; sys.path.insert(0, os.path.abspath('$0')); "
"from %(pkg)s import %(base)s; sys.exit(%(full)s())"
'" "$@"\n'
'else\n'
' echo $0 is not the correct name for this egg file.\n'
' echo Please rename it back to %(basename)s and try again.\n'
' exec false\n'
'fi\n'
) % locals()
if not self.dry_run:
mkpath(os.path.dirname(self.egg_output), dry_run=self.dry_run)
f = open(self.egg_output, 'w')
f.write(header)
f.close()
return 'a'
def copy_metadata_to(self, target_dir):
"Copy metadata (egg info) to the target_dir"
# normalize the path (so that a forward-slash in egg_info will
# match using startswith below)
norm_egg_info = os.path.normpath(self.egg_info)
prefix = os.path.join(norm_egg_info,'')
for path in self.ei_cmd.filelist.files:
if path.startswith(prefix):
target = os.path.join(target_dir, path[len(prefix):])
ensure_directory(target)
self.copy_file(path, target)
def get_ext_outputs(self):
"""Get a list of relative paths to C extensions in the output distro"""
all_outputs = []
ext_outputs = []
paths = {self.bdist_dir:''}
for base, dirs, files in os.walk(self.bdist_dir):
for filename in files:
if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS:
all_outputs.append(paths[base]+filename)
for filename in dirs:
paths[os.path.join(base,filename)] = paths[base]+filename+'/'
if self.distribution.has_ext_modules():
build_cmd = self.get_finalized_command('build_ext')
for ext in build_cmd.extensions:
if isinstance(ext,Library):
continue
fullname = build_cmd.get_ext_fullname(ext.name)
filename = build_cmd.get_ext_filename(fullname)
if not os.path.basename(filename).startswith('dl-'):
if os.path.exists(os.path.join(self.bdist_dir,filename)):
ext_outputs.append(filename)
return all_outputs, ext_outputs
NATIVE_EXTENSIONS = dict.fromkeys('.dll .so .dylib .pyd'.split())
def walk_egg(egg_dir):
"""Walk an unpacked egg's contents, skipping the metadata directory"""
walker = os.walk(egg_dir)
base,dirs,files = next(walker)
if 'EGG-INFO' in dirs:
dirs.remove('EGG-INFO')
yield base,dirs,files
for bdf in walker:
yield bdf
def analyze_egg(egg_dir, stubs):
# check for existing flag in EGG-INFO
for flag,fn in safety_flags.items():
if os.path.exists(os.path.join(egg_dir,'EGG-INFO',fn)):
return flag
if not can_scan(): return False
safe = True
for base, dirs, files in walk_egg(egg_dir):
for name in files:
if name.endswith('.py') or name.endswith('.pyw'):
continue
elif name.endswith('.pyc') or name.endswith('.pyo'):
# always scan, even if we already know we're not safe
safe = scan_module(egg_dir, base, name, stubs) and safe
return safe
def write_safety_flag(egg_dir, safe):
# Write or remove zip safety flag file(s)
for flag,fn in safety_flags.items():
fn = os.path.join(egg_dir, fn)
if os.path.exists(fn):
if safe is None or bool(safe) != flag:
os.unlink(fn)
elif safe is not None and bool(safe)==flag:
f=open(fn,'wt'); f.write('\n'); f.close()
safety_flags = {
True: 'zip-safe',
False: 'not-zip-safe',
}
def scan_module(egg_dir, base, name, stubs):
"""Check whether module possibly uses unsafe-for-zipfile stuff"""
filename = os.path.join(base,name)
if filename[:-1] in stubs:
return True # Extension module
pkg = base[len(egg_dir)+1:].replace(os.sep,'.')
module = pkg+(pkg and '.' or '')+os.path.splitext(name)[0]
if sys.version_info < (3, 3):
skip = 8 # skip magic & date
else:
skip = 12 # skip magic & date & file size
f = open(filename,'rb'); f.read(skip)
code = marshal.load(f); f.close()
safe = True
symbols = dict.fromkeys(iter_symbols(code))
for bad in ['__file__', '__path__']:
if bad in symbols:
log.warn("%s: module references %s", module, bad)
safe = False
if 'inspect' in symbols:
for bad in [
'getsource', 'getabsfile', 'getsourcefile', 'getfile'
'getsourcelines', 'findsource', 'getcomments', 'getframeinfo',
'getinnerframes', 'getouterframes', 'stack', 'trace'
]:
if bad in symbols:
log.warn("%s: module MAY be using inspect.%s", module, bad)
safe = False
if '__name__' in symbols and '__main__' in symbols and '.' not in module:
if sys.version[:3]=="2.4": # -m works w/zipfiles in 2.5
log.warn("%s: top-level module may be 'python -m' script", module)
safe = False
return safe
def iter_symbols(code):
"""Yield names and strings used by `code` and its nested code objects"""
for name in code.co_names: yield name
for const in code.co_consts:
if isinstance(const,basestring):
yield const
elif isinstance(const,CodeType):
for name in iter_symbols(const):
yield name
def can_scan():
if not sys.platform.startswith('java') and sys.platform != 'cli':
# CPython, PyPy, etc.
return True
log.warn("Unable to analyze compiled code on this platform.")
log.warn("Please ask the author to include a 'zip_safe'"
" setting (either True or False) in the package's setup.py")
# Attribute names of options for commands that might need to be convinced to
# install to the egg build directory
INSTALL_DIRECTORY_ATTRS = [
'install_lib', 'install_dir', 'install_data', 'install_base'
]
def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=None,
mode='w'
):
"""Create a zip file from all the files under 'base_dir'. The output
zip file will be named 'base_dir' + ".zip". Uses either the "zipfile"
Python module (if available) or the InfoZIP "zip" utility (if installed
and found on the default search path). If neither tool is available,
raises DistutilsExecError. Returns the name of the output zip file.
"""
import zipfile
mkpath(os.path.dirname(zip_filename), dry_run=dry_run)
log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir)
def visit(z, dirname, names):
for name in names:
path = os.path.normpath(os.path.join(dirname, name))
if os.path.isfile(path):
p = path[len(base_dir)+1:]
if not dry_run:
z.write(path, p)
log.debug("adding '%s'" % p)
if compress is None:
compress = (sys.version>="2.4") # avoid 2.3 zipimport bug when 64 bits
compression = [zipfile.ZIP_STORED, zipfile.ZIP_DEFLATED][bool(compress)]
if not dry_run:
z = zipfile.ZipFile(zip_filename, mode, compression=compression)
for dirname, dirs, files in os.walk(base_dir):
visit(z, dirname, files)
z.close()
else:
for dirname, dirs, files in os.walk(base_dir):
visit(None, dirname, files)
return zip_filename
#

View File

@@ -0,0 +1,82 @@
# This is just a kludge so that bdist_rpm doesn't guess wrong about the
# distribution name and version, if the egg_info command is going to alter
# them, another kludge to allow you to build old-style non-egg RPMs, and
# finally, a kludge to track .rpm files for uploading when run on Python <2.5.
from distutils.command.bdist_rpm import bdist_rpm as _bdist_rpm
import sys, os
class bdist_rpm(_bdist_rpm):
def initialize_options(self):
_bdist_rpm.initialize_options(self)
self.no_egg = None
if sys.version<"2.5":
# Track for uploading any .rpm file(s) moved to self.dist_dir
def move_file(self, src, dst, level=1):
_bdist_rpm.move_file(self, src, dst, level)
if dst==self.dist_dir and src.endswith('.rpm'):
getattr(self.distribution,'dist_files',[]).append(
('bdist_rpm',
src.endswith('.src.rpm') and 'any' or sys.version[:3],
os.path.join(dst, os.path.basename(src)))
)
def run(self):
self.run_command('egg_info') # ensure distro name is up-to-date
_bdist_rpm.run(self)
def _make_spec_file(self):
version = self.distribution.get_version()
rpmversion = version.replace('-','_')
spec = _bdist_rpm._make_spec_file(self)
line23 = '%define version '+version
line24 = '%define version '+rpmversion
spec = [
line.replace(
"Source0: %{name}-%{version}.tar",
"Source0: %{name}-%{unmangled_version}.tar"
).replace(
"setup.py install ",
"setup.py install --single-version-externally-managed "
).replace(
"%setup",
"%setup -n %{name}-%{unmangled_version}"
).replace(line23,line24)
for line in spec
]
spec.insert(spec.index(line24)+1, "%define unmangled_version "+version)
return spec

View File

@@ -0,0 +1,20 @@
from distutils.command.bdist_wininst import bdist_wininst as _bdist_wininst
class bdist_wininst(_bdist_wininst):
def reinitialize_command(self, command, reinit_subcommands=0):
"""
Supplement reinitialize_command to work around
http://bugs.python.org/issue20819
"""
cmd = self.distribution.reinitialize_command(
command, reinit_subcommands)
if command in ('install', 'install_lib'):
cmd.install_lib = None
return cmd
def run(self):
self._is_running = True
try:
_bdist_wininst.run(self)
finally:
self._is_running = False

View File

@@ -0,0 +1,289 @@
from distutils.command.build_ext import build_ext as _du_build_ext
try:
# Attempt to use Pyrex for building extensions, if available
from Pyrex.Distutils.build_ext import build_ext as _build_ext
except ImportError:
_build_ext = _du_build_ext
import os
import sys
from distutils.file_util import copy_file
from setuptools.extension import Library
from distutils.ccompiler import new_compiler
from distutils.sysconfig import customize_compiler
try:
# Python 2.7 or >=3.2
from sysconfig import _CONFIG_VARS
except ImportError:
from distutils.sysconfig import get_config_var
get_config_var("LDSHARED") # make sure _config_vars is initialized
del get_config_var
from distutils.sysconfig import _config_vars as _CONFIG_VARS
from distutils import log
from distutils.errors import DistutilsError
have_rtld = False
use_stubs = False
libtype = 'shared'
if sys.platform == "darwin":
use_stubs = True
elif os.name != 'nt':
try:
from dl import RTLD_NOW
have_rtld = True
use_stubs = True
except ImportError:
pass
def if_dl(s):
if have_rtld:
return s
return ''
class build_ext(_build_ext):
def run(self):
"""Build extensions in build directory, then copy if --inplace"""
old_inplace, self.inplace = self.inplace, 0
_build_ext.run(self)
self.inplace = old_inplace
if old_inplace:
self.copy_extensions_to_source()
def copy_extensions_to_source(self):
build_py = self.get_finalized_command('build_py')
for ext in self.extensions:
fullname = self.get_ext_fullname(ext.name)
filename = self.get_ext_filename(fullname)
modpath = fullname.split('.')
package = '.'.join(modpath[:-1])
package_dir = build_py.get_package_dir(package)
dest_filename = os.path.join(package_dir,os.path.basename(filename))
src_filename = os.path.join(self.build_lib,filename)
# Always copy, even if source is older than destination, to ensure
# that the right extensions for the current Python/platform are
# used.
copy_file(
src_filename, dest_filename, verbose=self.verbose,
dry_run=self.dry_run
)
if ext._needs_stub:
self.write_stub(package_dir or os.curdir, ext, True)
if _build_ext is not _du_build_ext and not hasattr(_build_ext,'pyrex_sources'):
# Workaround for problems using some Pyrex versions w/SWIG and/or 2.4
def swig_sources(self, sources, *otherargs):
# first do any Pyrex processing
sources = _build_ext.swig_sources(self, sources) or sources
# Then do any actual SWIG stuff on the remainder
return _du_build_ext.swig_sources(self, sources, *otherargs)
def get_ext_filename(self, fullname):
filename = _build_ext.get_ext_filename(self,fullname)
if fullname in self.ext_map:
ext = self.ext_map[fullname]
if isinstance(ext,Library):
fn, ext = os.path.splitext(filename)
return self.shlib_compiler.library_filename(fn,libtype)
elif use_stubs and ext._links_to_dynamic:
d,fn = os.path.split(filename)
return os.path.join(d,'dl-'+fn)
return filename
def initialize_options(self):
_build_ext.initialize_options(self)
self.shlib_compiler = None
self.shlibs = []
self.ext_map = {}
def finalize_options(self):
_build_ext.finalize_options(self)
self.extensions = self.extensions or []
self.check_extensions_list(self.extensions)
self.shlibs = [ext for ext in self.extensions
if isinstance(ext, Library)]
if self.shlibs:
self.setup_shlib_compiler()
for ext in self.extensions:
ext._full_name = self.get_ext_fullname(ext.name)
for ext in self.extensions:
fullname = ext._full_name
self.ext_map[fullname] = ext
# distutils 3.1 will also ask for module names
# XXX what to do with conflicts?
self.ext_map[fullname.split('.')[-1]] = ext
ltd = ext._links_to_dynamic = \
self.shlibs and self.links_to_dynamic(ext) or False
ext._needs_stub = ltd and use_stubs and not isinstance(ext,Library)
filename = ext._file_name = self.get_ext_filename(fullname)
libdir = os.path.dirname(os.path.join(self.build_lib,filename))
if ltd and libdir not in ext.library_dirs:
ext.library_dirs.append(libdir)
if ltd and use_stubs and os.curdir not in ext.runtime_library_dirs:
ext.runtime_library_dirs.append(os.curdir)
def setup_shlib_compiler(self):
compiler = self.shlib_compiler = new_compiler(
compiler=self.compiler, dry_run=self.dry_run, force=self.force
)
if sys.platform == "darwin":
tmp = _CONFIG_VARS.copy()
try:
# XXX Help! I don't have any idea whether these are right...
_CONFIG_VARS['LDSHARED'] = "gcc -Wl,-x -dynamiclib -undefined dynamic_lookup"
_CONFIG_VARS['CCSHARED'] = " -dynamiclib"
_CONFIG_VARS['SO'] = ".dylib"
customize_compiler(compiler)
finally:
_CONFIG_VARS.clear()
_CONFIG_VARS.update(tmp)
else:
customize_compiler(compiler)
if self.include_dirs is not None:
compiler.set_include_dirs(self.include_dirs)
if self.define is not None:
# 'define' option is a list of (name,value) tuples
for (name,value) in self.define:
compiler.define_macro(name, value)
if self.undef is not None:
for macro in self.undef:
compiler.undefine_macro(macro)
if self.libraries is not None:
compiler.set_libraries(self.libraries)
if self.library_dirs is not None:
compiler.set_library_dirs(self.library_dirs)
if self.rpath is not None:
compiler.set_runtime_library_dirs(self.rpath)
if self.link_objects is not None:
compiler.set_link_objects(self.link_objects)
# hack so distutils' build_extension() builds a library instead
compiler.link_shared_object = link_shared_object.__get__(compiler)
def get_export_symbols(self, ext):
if isinstance(ext,Library):
return ext.export_symbols
return _build_ext.get_export_symbols(self,ext)
def build_extension(self, ext):
_compiler = self.compiler
try:
if isinstance(ext,Library):
self.compiler = self.shlib_compiler
_build_ext.build_extension(self,ext)
if ext._needs_stub:
self.write_stub(
self.get_finalized_command('build_py').build_lib, ext
)
finally:
self.compiler = _compiler
def links_to_dynamic(self, ext):
"""Return true if 'ext' links to a dynamic lib in the same package"""
# XXX this should check to ensure the lib is actually being built
# XXX as dynamic, and not just using a locally-found version or a
# XXX static-compiled version
libnames = dict.fromkeys([lib._full_name for lib in self.shlibs])
pkg = '.'.join(ext._full_name.split('.')[:-1]+[''])
for libname in ext.libraries:
if pkg+libname in libnames: return True
return False
def get_outputs(self):
outputs = _build_ext.get_outputs(self)
optimize = self.get_finalized_command('build_py').optimize
for ext in self.extensions:
if ext._needs_stub:
base = os.path.join(self.build_lib, *ext._full_name.split('.'))
outputs.append(base+'.py')
outputs.append(base+'.pyc')
if optimize:
outputs.append(base+'.pyo')
return outputs
def write_stub(self, output_dir, ext, compile=False):
log.info("writing stub loader for %s to %s",ext._full_name, output_dir)
stub_file = os.path.join(output_dir, *ext._full_name.split('.'))+'.py'
if compile and os.path.exists(stub_file):
raise DistutilsError(stub_file+" already exists! Please delete.")
if not self.dry_run:
f = open(stub_file,'w')
f.write(
'\n'.join([
"def __bootstrap__():",
" global __bootstrap__, __file__, __loader__",
" import sys, os, pkg_resources, imp"+if_dl(", dl"),
" __file__ = pkg_resources.resource_filename(__name__,%r)"
% os.path.basename(ext._file_name),
" del __bootstrap__",
" if '__loader__' in globals():",
" del __loader__",
if_dl(" old_flags = sys.getdlopenflags()"),
" old_dir = os.getcwd()",
" try:",
" os.chdir(os.path.dirname(__file__))",
if_dl(" sys.setdlopenflags(dl.RTLD_NOW)"),
" imp.load_dynamic(__name__,__file__)",
" finally:",
if_dl(" sys.setdlopenflags(old_flags)"),
" os.chdir(old_dir)",
"__bootstrap__()",
"" # terminal \n
])
)
f.close()
if compile:
from distutils.util import byte_compile
byte_compile([stub_file], optimize=0,
force=True, dry_run=self.dry_run)
optimize = self.get_finalized_command('install_lib').optimize
if optimize > 0:
byte_compile([stub_file], optimize=optimize,
force=True, dry_run=self.dry_run)
if os.path.exists(stub_file) and not self.dry_run:
os.unlink(stub_file)
if use_stubs or os.name=='nt':
# Build shared libraries
#
def link_shared_object(self, objects, output_libname, output_dir=None,
libraries=None, library_dirs=None, runtime_library_dirs=None,
export_symbols=None, debug=0, extra_preargs=None,
extra_postargs=None, build_temp=None, target_lang=None):
self.link(
self.SHARED_LIBRARY, objects, output_libname,
output_dir, libraries, library_dirs, runtime_library_dirs,
export_symbols, debug, extra_preargs, extra_postargs,
build_temp, target_lang
)
else:
# Build static libraries everywhere else
libtype = 'static'
def link_shared_object(self, objects, output_libname, output_dir=None,
libraries=None, library_dirs=None, runtime_library_dirs=None,
export_symbols=None, debug=0, extra_preargs=None,
extra_postargs=None, build_temp=None, target_lang=None):
# XXX we need to either disallow these attrs on Library instances,
# or warn/abort here if set, or something...
#libraries=None, library_dirs=None, runtime_library_dirs=None,
#export_symbols=None, extra_preargs=None, extra_postargs=None,
#build_temp=None
assert output_dir is None # distutils build_ext doesn't pass this
output_dir,filename = os.path.split(output_libname)
basename, ext = os.path.splitext(filename)
if self.library_filename("x").startswith('lib'):
# strip 'lib' prefix; this is kludgy if some platform uses
# a different prefix
basename = basename[3:]
self.create_static_lib(
objects, basename, output_dir, debug, target_lang
)

View File

@@ -0,0 +1,221 @@
import os
import sys
import fnmatch
import textwrap
from distutils.command.build_py import build_py as _build_py
from distutils.util import convert_path
from glob import glob
try:
from setuptools.lib2to3_ex import Mixin2to3
except ImportError:
class Mixin2to3:
def run_2to3(self, files, doctests=True):
"do nothing"
class build_py(_build_py, Mixin2to3):
"""Enhanced 'build_py' command that includes data files with packages
The data files are specified via a 'package_data' argument to 'setup()'.
See 'setuptools.dist.Distribution' for more details.
Also, this version of the 'build_py' command allows you to specify both
'py_modules' and 'packages' in the same setup operation.
"""
def finalize_options(self):
_build_py.finalize_options(self)
self.package_data = self.distribution.package_data
self.exclude_package_data = self.distribution.exclude_package_data or {}
if 'data_files' in self.__dict__: del self.__dict__['data_files']
self.__updated_files = []
self.__doctests_2to3 = []
def run(self):
"""Build modules, packages, and copy data files to build directory"""
if not self.py_modules and not self.packages:
return
if self.py_modules:
self.build_modules()
if self.packages:
self.build_packages()
self.build_package_data()
self.run_2to3(self.__updated_files, False)
self.run_2to3(self.__updated_files, True)
self.run_2to3(self.__doctests_2to3, True)
# Only compile actual .py files, using our base class' idea of what our
# output files are.
self.byte_compile(_build_py.get_outputs(self, include_bytecode=0))
def __getattr__(self, attr):
if attr=='data_files': # lazily compute data files
self.data_files = files = self._get_data_files()
return files
return _build_py.__getattr__(self,attr)
def build_module(self, module, module_file, package):
outfile, copied = _build_py.build_module(self, module, module_file, package)
if copied:
self.__updated_files.append(outfile)
return outfile, copied
def _get_data_files(self):
"""Generate list of '(package,src_dir,build_dir,filenames)' tuples"""
self.analyze_manifest()
data = []
for package in self.packages or ():
# Locate package source directory
src_dir = self.get_package_dir(package)
# Compute package build directory
build_dir = os.path.join(*([self.build_lib] + package.split('.')))
# Length of path to strip from found files
plen = len(src_dir)+1
# Strip directory from globbed filenames
filenames = [
file[plen:] for file in self.find_data_files(package, src_dir)
]
data.append((package, src_dir, build_dir, filenames))
return data
def find_data_files(self, package, src_dir):
"""Return filenames for package's data files in 'src_dir'"""
globs = (self.package_data.get('', [])
+ self.package_data.get(package, []))
files = self.manifest_files.get(package, [])[:]
for pattern in globs:
# Each pattern has to be converted to a platform-specific path
files.extend(glob(os.path.join(src_dir, convert_path(pattern))))
return self.exclude_data_files(package, src_dir, files)
def build_package_data(self):
"""Copy data files into build directory"""
for package, src_dir, build_dir, filenames in self.data_files:
for filename in filenames:
target = os.path.join(build_dir, filename)
self.mkpath(os.path.dirname(target))
srcfile = os.path.join(src_dir, filename)
outf, copied = self.copy_file(srcfile, target)
srcfile = os.path.abspath(srcfile)
if copied and srcfile in self.distribution.convert_2to3_doctests:
self.__doctests_2to3.append(outf)
def analyze_manifest(self):
self.manifest_files = mf = {}
if not self.distribution.include_package_data:
return
src_dirs = {}
for package in self.packages or ():
# Locate package source directory
src_dirs[assert_relative(self.get_package_dir(package))] = package
self.run_command('egg_info')
ei_cmd = self.get_finalized_command('egg_info')
for path in ei_cmd.filelist.files:
d,f = os.path.split(assert_relative(path))
prev = None
oldf = f
while d and d!=prev and d not in src_dirs:
prev = d
d, df = os.path.split(d)
f = os.path.join(df, f)
if d in src_dirs:
if path.endswith('.py') and f==oldf:
continue # it's a module, not data
mf.setdefault(src_dirs[d],[]).append(path)
def get_data_files(self): pass # kludge 2.4 for lazy computation
if sys.version<"2.4": # Python 2.4 already has this code
def get_outputs(self, include_bytecode=1):
"""Return complete list of files copied to the build directory
This includes both '.py' files and data files, as well as '.pyc'
and '.pyo' files if 'include_bytecode' is true. (This method is
needed for the 'install_lib' command to do its job properly, and to
generate a correct installation manifest.)
"""
return _build_py.get_outputs(self, include_bytecode) + [
os.path.join(build_dir, filename)
for package, src_dir, build_dir,filenames in self.data_files
for filename in filenames
]
def check_package(self, package, package_dir):
"""Check namespace packages' __init__ for declare_namespace"""
try:
return self.packages_checked[package]
except KeyError:
pass
init_py = _build_py.check_package(self, package, package_dir)
self.packages_checked[package] = init_py
if not init_py or not self.distribution.namespace_packages:
return init_py
for pkg in self.distribution.namespace_packages:
if pkg==package or pkg.startswith(package+'.'):
break
else:
return init_py
f = open(init_py,'rbU')
if 'declare_namespace'.encode() not in f.read():
from distutils.errors import DistutilsError
raise DistutilsError(
"Namespace package problem: %s is a namespace package, but its\n"
"__init__.py does not call declare_namespace()! Please fix it.\n"
'(See the setuptools manual under "Namespace Packages" for '
"details.)\n" % (package,)
)
f.close()
return init_py
def initialize_options(self):
self.packages_checked={}
_build_py.initialize_options(self)
def get_package_dir(self, package):
res = _build_py.get_package_dir(self, package)
if self.distribution.src_root is not None:
return os.path.join(self.distribution.src_root, res)
return res
def exclude_data_files(self, package, src_dir, files):
"""Filter filenames for package's data files in 'src_dir'"""
globs = (self.exclude_package_data.get('', [])
+ self.exclude_package_data.get(package, []))
bad = []
for pattern in globs:
bad.extend(
fnmatch.filter(
files, os.path.join(src_dir, convert_path(pattern))
)
)
bad = dict.fromkeys(bad)
seen = {}
return [
f for f in files if f not in bad
and f not in seen and seen.setdefault(f,1) # ditch dupes
]
def assert_relative(path):
if not os.path.isabs(path):
return path
from distutils.errors import DistutilsSetupError
msg = textwrap.dedent("""
Error: setup script specifies an absolute path:
%s
setup() arguments must *always* be /-separated paths relative to the
setup.py directory, *never* absolute paths.
""").lstrip() % path
raise DistutilsSetupError(msg)

View File

@@ -0,0 +1,167 @@
from setuptools.command.easy_install import easy_install
from distutils.util import convert_path, subst_vars
from pkg_resources import Distribution, PathMetadata, normalize_path
from distutils import log
from distutils.errors import DistutilsError, DistutilsOptionError
import os, sys, setuptools, glob
class develop(easy_install):
"""Set up package for development"""
description = "install package in 'development mode'"
user_options = easy_install.user_options + [
("uninstall", "u", "Uninstall this source package"),
("egg-path=", None, "Set the path to be used in the .egg-link file"),
]
boolean_options = easy_install.boolean_options + ['uninstall']
command_consumes_arguments = False # override base
def run(self):
if self.uninstall:
self.multi_version = True
self.uninstall_link()
else:
self.install_for_development()
self.warn_deprecated_options()
def initialize_options(self):
self.uninstall = None
self.egg_path = None
easy_install.initialize_options(self)
self.setup_path = None
self.always_copy_from = '.' # always copy eggs installed in curdir
def finalize_options(self):
ei = self.get_finalized_command("egg_info")
if ei.broken_egg_info:
raise DistutilsError(
"Please rename %r to %r before using 'develop'"
% (ei.egg_info, ei.broken_egg_info)
)
self.args = [ei.egg_name]
easy_install.finalize_options(self)
self.expand_basedirs()
self.expand_dirs()
# pick up setup-dir .egg files only: no .egg-info
self.package_index.scan(glob.glob('*.egg'))
self.egg_link = os.path.join(self.install_dir, ei.egg_name+'.egg-link')
self.egg_base = ei.egg_base
if self.egg_path is None:
self.egg_path = os.path.abspath(ei.egg_base)
target = normalize_path(self.egg_base)
if normalize_path(os.path.join(self.install_dir, self.egg_path)) != target:
raise DistutilsOptionError(
"--egg-path must be a relative path from the install"
" directory to "+target
)
# Make a distribution for the package's source
self.dist = Distribution(
target,
PathMetadata(target, os.path.abspath(ei.egg_info)),
project_name = ei.egg_name
)
p = self.egg_base.replace(os.sep,'/')
if p!= os.curdir:
p = '../' * (p.count('/')+1)
self.setup_path = p
p = normalize_path(os.path.join(self.install_dir, self.egg_path, p))
if p != normalize_path(os.curdir):
raise DistutilsOptionError(
"Can't get a consistent path to setup script from"
" installation directory", p, normalize_path(os.curdir))
def install_for_development(self):
if sys.version_info >= (3,) and getattr(self.distribution, 'use_2to3', False):
# If we run 2to3 we can not do this inplace:
# Ensure metadata is up-to-date
self.reinitialize_command('build_py', inplace=0)
self.run_command('build_py')
bpy_cmd = self.get_finalized_command("build_py")
build_path = normalize_path(bpy_cmd.build_lib)
# Build extensions
self.reinitialize_command('egg_info', egg_base=build_path)
self.run_command('egg_info')
self.reinitialize_command('build_ext', inplace=0)
self.run_command('build_ext')
# Fixup egg-link and easy-install.pth
ei_cmd = self.get_finalized_command("egg_info")
self.egg_path = build_path
self.dist.location = build_path
self.dist._provider = PathMetadata(build_path, ei_cmd.egg_info) # XXX
else:
# Without 2to3 inplace works fine:
self.run_command('egg_info')
# Build extensions in-place
self.reinitialize_command('build_ext', inplace=1)
self.run_command('build_ext')
self.install_site_py() # ensure that target dir is site-safe
if setuptools.bootstrap_install_from:
self.easy_install(setuptools.bootstrap_install_from)
setuptools.bootstrap_install_from = None
# create an .egg-link in the installation dir, pointing to our egg
log.info("Creating %s (link to %s)", self.egg_link, self.egg_base)
if not self.dry_run:
f = open(self.egg_link,"w")
f.write(self.egg_path + "\n" + self.setup_path)
f.close()
# postprocess the installed distro, fixing up .pth, installing scripts,
# and handling requirements
self.process_distribution(None, self.dist, not self.no_deps)
def uninstall_link(self):
if os.path.exists(self.egg_link):
log.info("Removing %s (link to %s)", self.egg_link, self.egg_base)
egg_link_file = open(self.egg_link)
contents = [line.rstrip() for line in egg_link_file]
egg_link_file.close()
if contents not in ([self.egg_path], [self.egg_path, self.setup_path]):
log.warn("Link points to %s: uninstall aborted", contents)
return
if not self.dry_run:
os.unlink(self.egg_link)
if not self.dry_run:
self.update_pth(self.dist) # remove any .pth link to us
if self.distribution.scripts:
# XXX should also check for entry point scripts!
log.warn("Note: you must uninstall or replace scripts manually!")
def install_egg_scripts(self, dist):
if dist is not self.dist:
# Installing a dependency, so fall back to normal behavior
return easy_install.install_egg_scripts(self,dist)
# create wrapper scripts in the script dir, pointing to dist.scripts
# new-style...
self.install_wrapper_scripts(dist)
# ...and old-style
for script_name in self.distribution.scripts or []:
script_path = os.path.abspath(convert_path(script_name))
script_name = os.path.basename(script_path)
f = open(script_path,'rU')
script_text = f.read()
f.close()
self.install_script(dist, script_name, script_text, script_path)

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,392 @@
"""setuptools.command.egg_info
Create a distribution's .egg-info directory and contents"""
import os
import re
import sys
from setuptools import Command
import distutils.errors
from distutils import log
from setuptools.command.sdist import sdist
from setuptools.compat import basestring
from setuptools import svn_utils
from distutils.util import convert_path
from distutils.filelist import FileList as _FileList
from pkg_resources import (parse_requirements, safe_name, parse_version,
safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename)
from setuptools.command.sdist import walk_revctrl
class egg_info(Command):
description = "create a distribution's .egg-info directory"
user_options = [
('egg-base=', 'e', "directory containing .egg-info directories"
" (default: top of the source tree)"),
('tag-svn-revision', 'r',
"Add subversion revision ID to version number"),
('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"),
('tag-build=', 'b', "Specify explicit tag to add to version number"),
('no-svn-revision', 'R',
"Don't add subversion revision ID [default]"),
('no-date', 'D', "Don't include date stamp [default]"),
]
boolean_options = ['tag-date', 'tag-svn-revision']
negative_opt = {'no-svn-revision': 'tag-svn-revision',
'no-date': 'tag-date'}
def initialize_options(self):
self.egg_name = None
self.egg_version = None
self.egg_base = None
self.egg_info = None
self.tag_build = None
self.tag_svn_revision = 0
self.tag_date = 0
self.broken_egg_info = False
self.vtags = None
def save_version_info(self, filename):
from setuptools.command.setopt import edit_config
values = dict(
egg_info=dict(
tag_svn_revision=0,
tag_date=0,
tag_build=self.tags(),
)
)
edit_config(filename, values)
def finalize_options(self):
self.egg_name = safe_name(self.distribution.get_name())
self.vtags = self.tags()
self.egg_version = self.tagged_version()
try:
list(
parse_requirements('%s==%s' % (self.egg_name,self.egg_version))
)
except ValueError:
raise distutils.errors.DistutilsOptionError(
"Invalid distribution name or version syntax: %s-%s" %
(self.egg_name,self.egg_version)
)
if self.egg_base is None:
dirs = self.distribution.package_dir
self.egg_base = (dirs or {}).get('',os.curdir)
self.ensure_dirname('egg_base')
self.egg_info = to_filename(self.egg_name)+'.egg-info'
if self.egg_base != os.curdir:
self.egg_info = os.path.join(self.egg_base, self.egg_info)
if '-' in self.egg_name: self.check_broken_egg_info()
# Set package version for the benefit of dumber commands
# (e.g. sdist, bdist_wininst, etc.)
#
self.distribution.metadata.version = self.egg_version
# If we bootstrapped around the lack of a PKG-INFO, as might be the
# case in a fresh checkout, make sure that any special tags get added
# to the version info
#
pd = self.distribution._patched_dist
if pd is not None and pd.key==self.egg_name.lower():
pd._version = self.egg_version
pd._parsed_version = parse_version(self.egg_version)
self.distribution._patched_dist = None
def write_or_delete_file(self, what, filename, data, force=False):
"""Write `data` to `filename` or delete if empty
If `data` is non-empty, this routine is the same as ``write_file()``.
If `data` is empty but not ``None``, this is the same as calling
``delete_file(filename)`. If `data` is ``None``, then this is a no-op
unless `filename` exists, in which case a warning is issued about the
orphaned file (if `force` is false), or deleted (if `force` is true).
"""
if data:
self.write_file(what, filename, data)
elif os.path.exists(filename):
if data is None and not force:
log.warn(
"%s not set in setup(), but %s exists", what, filename
)
return
else:
self.delete_file(filename)
def write_file(self, what, filename, data):
"""Write `data` to `filename` (if not a dry run) after announcing it
`what` is used in a log message to identify what is being written
to the file.
"""
log.info("writing %s to %s", what, filename)
if sys.version_info >= (3,):
data = data.encode("utf-8")
if not self.dry_run:
f = open(filename, 'wb')
f.write(data)
f.close()
def delete_file(self, filename):
"""Delete `filename` (if not a dry run) after announcing it"""
log.info("deleting %s", filename)
if not self.dry_run:
os.unlink(filename)
def tagged_version(self):
version = self.distribution.get_version()
# egg_info may be called more than once for a distribution,
# in which case the version string already contains all tags.
if self.vtags and version.endswith(self.vtags):
return safe_version(version)
return safe_version(version + self.vtags)
def run(self):
self.mkpath(self.egg_info)
installer = self.distribution.fetch_build_egg
for ep in iter_entry_points('egg_info.writers'):
writer = ep.load(installer=installer)
writer(self, ep.name, os.path.join(self.egg_info,ep.name))
# Get rid of native_libs.txt if it was put there by older bdist_egg
nl = os.path.join(self.egg_info, "native_libs.txt")
if os.path.exists(nl):
self.delete_file(nl)
self.find_sources()
def tags(self):
version = ''
if self.tag_build:
version+=self.tag_build
if self.tag_svn_revision and (
os.path.exists('.svn') or os.path.exists('PKG-INFO')
): version += '-r%s' % self.get_svn_revision()
if self.tag_date:
import time
version += time.strftime("-%Y%m%d")
return version
@staticmethod
def get_svn_revision():
return str(svn_utils.SvnInfo.load(os.curdir).get_revision())
def find_sources(self):
"""Generate SOURCES.txt manifest file"""
manifest_filename = os.path.join(self.egg_info,"SOURCES.txt")
mm = manifest_maker(self.distribution)
mm.manifest = manifest_filename
mm.run()
self.filelist = mm.filelist
def check_broken_egg_info(self):
bei = self.egg_name+'.egg-info'
if self.egg_base != os.curdir:
bei = os.path.join(self.egg_base, bei)
if os.path.exists(bei):
log.warn(
"-"*78+'\n'
"Note: Your current .egg-info directory has a '-' in its name;"
'\nthis will not work correctly with "setup.py develop".\n\n'
'Please rename %s to %s to correct this problem.\n'+'-'*78,
bei, self.egg_info
)
self.broken_egg_info = self.egg_info
self.egg_info = bei # make it work for now
class FileList(_FileList):
"""File list that accepts only existing, platform-independent paths"""
def append(self, item):
if item.endswith('\r'): # Fix older sdists built on Windows
item = item[:-1]
path = convert_path(item)
if sys.version_info >= (3,):
try:
if os.path.exists(path) or os.path.exists(path.encode('utf-8')):
self.files.append(path)
except UnicodeEncodeError:
# Accept UTF-8 filenames even if LANG=C
if os.path.exists(path.encode('utf-8')):
self.files.append(path)
else:
log.warn("'%s' not %s encodable -- skipping", path,
sys.getfilesystemencoding())
else:
if os.path.exists(path):
self.files.append(path)
class manifest_maker(sdist):
template = "MANIFEST.in"
def initialize_options(self):
self.use_defaults = 1
self.prune = 1
self.manifest_only = 1
self.force_manifest = 1
def finalize_options(self):
pass
def run(self):
self.filelist = FileList()
if not os.path.exists(self.manifest):
self.write_manifest() # it must exist so it'll get in the list
self.filelist.findall()
self.add_defaults()
if os.path.exists(self.template):
self.read_template()
self.prune_file_list()
self.filelist.sort()
self.filelist.remove_duplicates()
self.write_manifest()
def write_manifest(self):
"""Write the file list in 'self.filelist' (presumably as filled in
by 'add_defaults()' and 'read_template()') to the manifest file
named by 'self.manifest'.
"""
# The manifest must be UTF-8 encodable. See #303.
if sys.version_info >= (3,):
files = []
for file in self.filelist.files:
try:
file.encode("utf-8")
except UnicodeEncodeError:
log.warn("'%s' not UTF-8 encodable -- skipping" % file)
else:
files.append(file)
self.filelist.files = files
files = self.filelist.files
if os.sep!='/':
files = [f.replace(os.sep,'/') for f in files]
self.execute(write_file, (self.manifest, files),
"writing manifest file '%s'" % self.manifest)
def warn(self, msg): # suppress missing-file warnings from sdist
if not msg.startswith("standard file not found:"):
sdist.warn(self, msg)
def add_defaults(self):
sdist.add_defaults(self)
self.filelist.append(self.template)
self.filelist.append(self.manifest)
rcfiles = list(walk_revctrl())
if rcfiles:
self.filelist.extend(rcfiles)
elif os.path.exists(self.manifest):
self.read_manifest()
ei_cmd = self.get_finalized_command('egg_info')
self.filelist.include_pattern("*", prefix=ei_cmd.egg_info)
def prune_file_list(self):
build = self.get_finalized_command('build')
base_dir = self.distribution.get_fullname()
self.filelist.exclude_pattern(None, prefix=build.build_base)
self.filelist.exclude_pattern(None, prefix=base_dir)
sep = re.escape(os.sep)
self.filelist.exclude_pattern(sep+r'(RCS|CVS|\.svn)'+sep, is_regex=1)
def write_file(filename, contents):
"""Create a file with the specified name and write 'contents' (a
sequence of strings without line terminators) to it.
"""
contents = "\n".join(contents)
if sys.version_info >= (3,):
contents = contents.encode("utf-8")
f = open(filename, "wb") # always write POSIX-style manifest
f.write(contents)
f.close()
def write_pkg_info(cmd, basename, filename):
log.info("writing %s", filename)
if not cmd.dry_run:
metadata = cmd.distribution.metadata
metadata.version, oldver = cmd.egg_version, metadata.version
metadata.name, oldname = cmd.egg_name, metadata.name
try:
# write unescaped data to PKG-INFO, so older pkg_resources
# can still parse it
metadata.write_pkg_info(cmd.egg_info)
finally:
metadata.name, metadata.version = oldname, oldver
safe = getattr(cmd.distribution,'zip_safe',None)
from setuptools.command import bdist_egg
bdist_egg.write_safety_flag(cmd.egg_info, safe)
def warn_depends_obsolete(cmd, basename, filename):
if os.path.exists(filename):
log.warn(
"WARNING: 'depends.txt' is not used by setuptools 0.6!\n"
"Use the install_requires/extras_require setup() args instead."
)
def write_requirements(cmd, basename, filename):
dist = cmd.distribution
data = ['\n'.join(yield_lines(dist.install_requires or ()))]
for extra,reqs in (dist.extras_require or {}).items():
data.append('\n\n[%s]\n%s' % (extra, '\n'.join(yield_lines(reqs))))
cmd.write_or_delete_file("requirements", filename, ''.join(data))
def write_toplevel_names(cmd, basename, filename):
pkgs = dict.fromkeys(
[
k.split('.',1)[0]
for k in cmd.distribution.iter_distribution_names()
]
)
cmd.write_file("top-level names", filename, '\n'.join(pkgs)+'\n')
def overwrite_arg(cmd, basename, filename):
write_arg(cmd, basename, filename, True)
def write_arg(cmd, basename, filename, force=False):
argname = os.path.splitext(basename)[0]
value = getattr(cmd.distribution, argname, None)
if value is not None:
value = '\n'.join(value)+'\n'
cmd.write_or_delete_file(argname, filename, value, force)
def write_entries(cmd, basename, filename):
ep = cmd.distribution.entry_points
if isinstance(ep,basestring) or ep is None:
data = ep
elif ep is not None:
data = []
for section, contents in ep.items():
if not isinstance(contents,basestring):
contents = EntryPoint.parse_group(section, contents)
contents = '\n'.join(map(str,contents.values()))
data.append('[%s]\n%s\n\n' % (section,contents))
data = ''.join(data)
cmd.write_or_delete_file('entry points', filename, data, True)
def get_pkg_info_revision():
# See if we can get a -r### off of PKG-INFO, in case this is an sdist of
# a subversion revision
#
if os.path.exists('PKG-INFO'):
f = open('PKG-INFO','rU')
for line in f:
match = re.match(r"Version:.*-r(\d+)\s*$", line)
if match:
return int(match.group(1))
f.close()
return 0

View File

@@ -0,0 +1,102 @@
import setuptools
import sys
import glob
from distutils.command.install import install as _install
from distutils.errors import DistutilsArgError
class install(_install):
"""Use easy_install to install the package, w/dependencies"""
user_options = _install.user_options + [
('old-and-unmanageable', None, "Try not to use this!"),
('single-version-externally-managed', None,
"used by system package builders to create 'flat' eggs"),
]
boolean_options = _install.boolean_options + [
'old-and-unmanageable', 'single-version-externally-managed',
]
new_commands = [
('install_egg_info', lambda self: True),
('install_scripts', lambda self: True),
]
_nc = dict(new_commands)
def initialize_options(self):
_install.initialize_options(self)
self.old_and_unmanageable = None
self.single_version_externally_managed = None
def finalize_options(self):
_install.finalize_options(self)
if self.root:
self.single_version_externally_managed = True
elif self.single_version_externally_managed:
if not self.root and not self.record:
raise DistutilsArgError(
"You must specify --record or --root when building system"
" packages"
)
def handle_extra_path(self):
if self.root or self.single_version_externally_managed:
# explicit backward-compatibility mode, allow extra_path to work
return _install.handle_extra_path(self)
# Ignore extra_path when installing an egg (or being run by another
# command without --root or --single-version-externally-managed
self.path_file = None
self.extra_dirs = ''
def run(self):
# Explicit request for old-style install? Just do it
if self.old_and_unmanageable or self.single_version_externally_managed:
return _install.run(self)
# Attempt to detect whether we were called from setup() or by another
# command. If we were called by setup(), our caller will be the
# 'run_command' method in 'distutils.dist', and *its* caller will be
# the 'run_commands' method. If we were called any other way, our
# immediate caller *might* be 'run_command', but it won't have been
# called by 'run_commands'. This is slightly kludgy, but seems to
# work.
#
caller = sys._getframe(2)
caller_module = caller.f_globals.get('__name__','')
caller_name = caller.f_code.co_name
if caller_module != 'distutils.dist' or caller_name!='run_commands':
# We weren't called from the command line or setup(), so we
# should run in backward-compatibility mode to support bdist_*
# commands.
_install.run(self)
else:
self.do_egg_install()
def do_egg_install(self):
easy_install = self.distribution.get_command_class('easy_install')
cmd = easy_install(
self.distribution, args="x", root=self.root, record=self.record,
)
cmd.ensure_finalized() # finalize before bdist_egg munges install cmd
cmd.always_copy_from = '.' # make sure local-dir eggs get installed
# pick up setup-dir .egg files only: no .egg-info
cmd.package_index.scan(glob.glob('*.egg'))
self.run_command('bdist_egg')
args = [self.distribution.get_command_obj('bdist_egg').egg_output]
if setuptools.bootstrap_install_from:
# Bootstrap self-installation of setuptools
args.insert(0, setuptools.bootstrap_install_from)
cmd.args = args
cmd.run()
setuptools.bootstrap_install_from = None
# XXX Python 3.1 doesn't see _nc if this is inside the class
install.sub_commands = [
cmd for cmd in _install.sub_commands if cmd[0] not in install._nc
] + install.new_commands

View File

@@ -0,0 +1,125 @@
from setuptools import Command
from setuptools.archive_util import unpack_archive
from distutils import log, dir_util
import os, pkg_resources
class install_egg_info(Command):
"""Install an .egg-info directory for the package"""
description = "Install an .egg-info directory for the package"
user_options = [
('install-dir=', 'd', "directory to install to"),
]
def initialize_options(self):
self.install_dir = None
def finalize_options(self):
self.set_undefined_options('install_lib',('install_dir','install_dir'))
ei_cmd = self.get_finalized_command("egg_info")
basename = pkg_resources.Distribution(
None, None, ei_cmd.egg_name, ei_cmd.egg_version
).egg_name()+'.egg-info'
self.source = ei_cmd.egg_info
self.target = os.path.join(self.install_dir, basename)
self.outputs = [self.target]
def run(self):
self.run_command('egg_info')
target = self.target
if os.path.isdir(self.target) and not os.path.islink(self.target):
dir_util.remove_tree(self.target, dry_run=self.dry_run)
elif os.path.exists(self.target):
self.execute(os.unlink,(self.target,),"Removing "+self.target)
if not self.dry_run:
pkg_resources.ensure_directory(self.target)
self.execute(self.copytree, (),
"Copying %s to %s" % (self.source, self.target)
)
self.install_namespaces()
def get_outputs(self):
return self.outputs
def copytree(self):
# Copy the .egg-info tree to site-packages
def skimmer(src,dst):
# filter out source-control directories; note that 'src' is always
# a '/'-separated path, regardless of platform. 'dst' is a
# platform-specific path.
for skip in '.svn/','CVS/':
if src.startswith(skip) or '/'+skip in src:
return None
self.outputs.append(dst)
log.debug("Copying %s to %s", src, dst)
return dst
unpack_archive(self.source, self.target, skimmer)
def install_namespaces(self):
nsp = self._get_all_ns_packages()
if not nsp: return
filename,ext = os.path.splitext(self.target)
filename += '-nspkg.pth'; self.outputs.append(filename)
log.info("Installing %s",filename)
if not self.dry_run:
f = open(filename,'wt')
for pkg in nsp:
# ensure pkg is not a unicode string under Python 2.7
pkg = str(pkg)
pth = tuple(pkg.split('.'))
trailer = '\n'
if '.' in pkg:
trailer = (
"; m and setattr(sys.modules[%r], %r, m)\n"
% ('.'.join(pth[:-1]), pth[-1])
)
f.write(
"import sys,types,os; "
"p = os.path.join(sys._getframe(1).f_locals['sitedir'], "
"*%(pth)r); "
"ie = os.path.exists(os.path.join(p,'__init__.py')); "
"m = not ie and "
"sys.modules.setdefault(%(pkg)r,types.ModuleType(%(pkg)r)); "
"mp = (m or []) and m.__dict__.setdefault('__path__',[]); "
"(p not in mp) and mp.append(p)%(trailer)s"
% locals()
)
f.close()
def _get_all_ns_packages(self):
nsp = {}
for pkg in self.distribution.namespace_packages or []:
pkg = pkg.split('.')
while pkg:
nsp['.'.join(pkg)] = 1
pkg.pop()
nsp=list(nsp)
nsp.sort() # set up shorter names first
return nsp

View File

@@ -0,0 +1,63 @@
from distutils.command.install_lib import install_lib as _install_lib
import os
class install_lib(_install_lib):
"""Don't add compiled flags to filenames of non-Python files"""
def run(self):
self.build()
outfiles = self.install()
if outfiles is not None:
# always compile, in case we have any extension stubs to deal with
self.byte_compile(outfiles)
def get_exclusions(self):
exclude = {}
nsp = self.distribution.namespace_packages
svem = (nsp and self.get_finalized_command('install')
.single_version_externally_managed)
if svem:
for pkg in nsp:
parts = pkg.split('.')
while parts:
pkgdir = os.path.join(self.install_dir, *parts)
for f in '__init__.py', '__init__.pyc', '__init__.pyo':
exclude[os.path.join(pkgdir,f)] = 1
parts.pop()
return exclude
def copy_tree(
self, infile, outfile,
preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1
):
assert preserve_mode and preserve_times and not preserve_symlinks
exclude = self.get_exclusions()
if not exclude:
return _install_lib.copy_tree(self, infile, outfile)
# Exclude namespace package __init__.py* files from the output
from setuptools.archive_util import unpack_directory
from distutils import log
outfiles = []
def pf(src, dst):
if dst in exclude:
log.warn("Skipping installation of %s (namespace package)",dst)
return False
log.info("copying %s -> %s", src, os.path.dirname(dst))
outfiles.append(dst)
return dst
unpack_directory(infile, outfile, pf)
return outfiles
def get_outputs(self):
outputs = _install_lib.get_outputs(self)
exclude = self.get_exclusions()
if exclude:
return [f for f in outputs if f not in exclude]
return outputs

View File

@@ -0,0 +1,53 @@
from distutils.command.install_scripts import install_scripts \
as _install_scripts
from pkg_resources import Distribution, PathMetadata, ensure_directory
import os
from distutils import log
class install_scripts(_install_scripts):
"""Do normal script install, plus any egg_info wrapper scripts"""
def initialize_options(self):
_install_scripts.initialize_options(self)
self.no_ep = False
def run(self):
from setuptools.command.easy_install import get_script_args
from setuptools.command.easy_install import sys_executable
self.run_command("egg_info")
if self.distribution.scripts:
_install_scripts.run(self) # run first to set up self.outfiles
else:
self.outfiles = []
if self.no_ep:
# don't install entry point scripts into .egg file!
return
ei_cmd = self.get_finalized_command("egg_info")
dist = Distribution(
ei_cmd.egg_base, PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info),
ei_cmd.egg_name, ei_cmd.egg_version,
)
bs_cmd = self.get_finalized_command('build_scripts')
executable = getattr(bs_cmd,'executable',sys_executable)
is_wininst = getattr(
self.get_finalized_command("bdist_wininst"), '_is_running', False
)
for args in get_script_args(dist, executable, is_wininst):
self.write_script(*args)
def write_script(self, script_name, contents, mode="t", *ignored):
"""Write an executable file to the scripts directory"""
from setuptools.command.easy_install import chmod, current_umask
log.info("Installing %s script to %s", script_name, self.install_dir)
target = os.path.join(self.install_dir, script_name)
self.outfiles.append(target)
mask = current_umask()
if not self.dry_run:
ensure_directory(target)
f = open(target,"w"+mode)
f.write(contents)
f.close()
chmod(target, 0x1FF-mask) # 0777

View File

@@ -0,0 +1,15 @@
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
<assemblyIdentity version="1.0.0.0"
processorArchitecture="X86"
name="%(name)s"
type="win32"/>
<!-- Identify the application security requirements. -->
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
<security>
<requestedPrivileges>
<requestedExecutionLevel level="asInvoker" uiAccess="false"/>
</requestedPrivileges>
</security>
</trustInfo>
</assembly>

View File

@@ -0,0 +1,10 @@
from distutils.command.register import register as _register
class register(_register):
__doc__ = _register.__doc__
def run(self):
# Make sure that we are using valid current name/version info
self.run_command('egg_info')
_register.run(self)

View File

@@ -0,0 +1,58 @@
import os
from setuptools import Command
from setuptools.compat import basestring
from distutils.util import convert_path
from distutils import log
from distutils.errors import DistutilsOptionError
class rotate(Command):
"""Delete older distributions"""
description = "delete older distributions, keeping N newest files"
user_options = [
('match=', 'm', "patterns to match (required)"),
('dist-dir=', 'd', "directory where the distributions are"),
('keep=', 'k', "number of matching distributions to keep"),
]
boolean_options = []
def initialize_options(self):
self.match = None
self.dist_dir = None
self.keep = None
def finalize_options(self):
if self.match is None:
raise DistutilsOptionError(
"Must specify one or more (comma-separated) match patterns "
"(e.g. '.zip' or '.egg')"
)
if self.keep is None:
raise DistutilsOptionError("Must specify number of files to keep")
try:
self.keep = int(self.keep)
except ValueError:
raise DistutilsOptionError("--keep must be an integer")
if isinstance(self.match, basestring):
self.match = [
convert_path(p.strip()) for p in self.match.split(',')
]
self.set_undefined_options('bdist',('dist_dir', 'dist_dir'))
def run(self):
self.run_command("egg_info")
from glob import glob
for pattern in self.match:
pattern = self.distribution.get_name()+'*'+pattern
files = glob(os.path.join(self.dist_dir,pattern))
files = [(os.path.getmtime(f),f) for f in files]
files.sort()
files.reverse()
log.info("%d file(s) matching %s", len(files), pattern)
files = files[self.keep:]
for (t,f) in files:
log.info("Deleting %s", f)
if not self.dry_run:
os.unlink(f)

View File

@@ -0,0 +1,24 @@
import distutils, os
from setuptools import Command
from setuptools.command.setopt import edit_config, option_base
class saveopts(option_base):
"""Save command-line options to a file"""
description = "save supplied options to setup.cfg or other config file"
def run(self):
dist = self.distribution
settings = {}
for cmd in dist.command_options:
if cmd=='saveopts':
continue # don't save our own options!
for opt,(src,val) in dist.get_option_dict(cmd).items():
if src=="command line":
settings.setdefault(cmd,{})[opt] = val
edit_config(self.filename, settings, self.dry_run)

View File

@@ -0,0 +1,244 @@
import os
import re
import sys
from glob import glob
import pkg_resources
from distutils.command.sdist import sdist as _sdist
from distutils.util import convert_path
from distutils import log
from setuptools import svn_utils
READMES = ('README', 'README.rst', 'README.txt')
def walk_revctrl(dirname=''):
"""Find all files under revision control"""
for ep in pkg_resources.iter_entry_points('setuptools.file_finders'):
for item in ep.load()(dirname):
yield item
#TODO will need test case
class re_finder(object):
"""
Finder that locates files based on entries in a file matched by a
regular expression.
"""
def __init__(self, path, pattern, postproc=lambda x: x):
self.pattern = pattern
self.postproc = postproc
self.entries_path = convert_path(path)
def _finder(self, dirname, filename):
f = open(filename,'rU')
try:
data = f.read()
finally:
f.close()
for match in self.pattern.finditer(data):
path = match.group(1)
# postproc was formerly used when the svn finder
# was an re_finder for calling unescape
path = self.postproc(path)
yield svn_utils.joinpath(dirname, path)
def find(self, dirname=''):
path = svn_utils.joinpath(dirname, self.entries_path)
if not os.path.isfile(path):
# entries file doesn't exist
return
for path in self._finder(dirname,path):
if os.path.isfile(path):
yield path
elif os.path.isdir(path):
for item in self.find(path):
yield item
__call__ = find
def _default_revctrl(dirname=''):
'Primary svn_cvs entry point'
for finder in finders:
for item in finder(dirname):
yield item
finders = [
re_finder('CVS/Entries', re.compile(r"^\w?/([^/]+)/", re.M)),
svn_utils.svn_finder,
]
class sdist(_sdist):
"""Smart sdist that finds anything supported by revision control"""
user_options = [
('formats=', None,
"formats for source distribution (comma-separated list)"),
('keep-temp', 'k',
"keep the distribution tree around after creating " +
"archive file(s)"),
('dist-dir=', 'd',
"directory to put the source distribution archive(s) in "
"[default: dist]"),
]
negative_opt = {}
def run(self):
self.run_command('egg_info')
ei_cmd = self.get_finalized_command('egg_info')
self.filelist = ei_cmd.filelist
self.filelist.append(os.path.join(ei_cmd.egg_info,'SOURCES.txt'))
self.check_readme()
# Run sub commands
for cmd_name in self.get_sub_commands():
self.run_command(cmd_name)
# Call check_metadata only if no 'check' command
# (distutils <= 2.6)
import distutils.command
if 'check' not in distutils.command.__all__:
self.check_metadata()
self.make_distribution()
dist_files = getattr(self.distribution,'dist_files',[])
for file in self.archive_files:
data = ('sdist', '', file)
if data not in dist_files:
dist_files.append(data)
def __read_template_hack(self):
# This grody hack closes the template file (MANIFEST.in) if an
# exception occurs during read_template.
# Doing so prevents an error when easy_install attempts to delete the
# file.
try:
_sdist.read_template(self)
except:
sys.exc_info()[2].tb_next.tb_frame.f_locals['template'].close()
raise
# Beginning with Python 2.7.2, 3.1.4, and 3.2.1, this leaky file handle
# has been fixed, so only override the method if we're using an earlier
# Python.
has_leaky_handle = (
sys.version_info < (2,7,2)
or (3,0) <= sys.version_info < (3,1,4)
or (3,2) <= sys.version_info < (3,2,1)
)
if has_leaky_handle:
read_template = __read_template_hack
def add_defaults(self):
standards = [READMES,
self.distribution.script_name]
for fn in standards:
if isinstance(fn, tuple):
alts = fn
got_it = 0
for fn in alts:
if os.path.exists(fn):
got_it = 1
self.filelist.append(fn)
break
if not got_it:
self.warn("standard file not found: should have one of " +
', '.join(alts))
else:
if os.path.exists(fn):
self.filelist.append(fn)
else:
self.warn("standard file '%s' not found" % fn)
optional = ['test/test*.py', 'setup.cfg']
for pattern in optional:
files = list(filter(os.path.isfile, glob(pattern)))
if files:
self.filelist.extend(files)
# getting python files
if self.distribution.has_pure_modules():
build_py = self.get_finalized_command('build_py')
self.filelist.extend(build_py.get_source_files())
# This functionality is incompatible with include_package_data, and
# will in fact create an infinite recursion if include_package_data
# is True. Use of include_package_data will imply that
# distutils-style automatic handling of package_data is disabled
if not self.distribution.include_package_data:
for _, src_dir, _, filenames in build_py.data_files:
self.filelist.extend([os.path.join(src_dir, filename)
for filename in filenames])
if self.distribution.has_ext_modules():
build_ext = self.get_finalized_command('build_ext')
self.filelist.extend(build_ext.get_source_files())
if self.distribution.has_c_libraries():
build_clib = self.get_finalized_command('build_clib')
self.filelist.extend(build_clib.get_source_files())
if self.distribution.has_scripts():
build_scripts = self.get_finalized_command('build_scripts')
self.filelist.extend(build_scripts.get_source_files())
def check_readme(self):
for f in READMES:
if os.path.exists(f):
return
else:
self.warn(
"standard file not found: should have one of " +', '.join(READMES)
)
def make_release_tree(self, base_dir, files):
_sdist.make_release_tree(self, base_dir, files)
# Save any egg_info command line options used to create this sdist
dest = os.path.join(base_dir, 'setup.cfg')
if hasattr(os,'link') and os.path.exists(dest):
# unlink and re-copy, since it might be hard-linked, and
# we don't want to change the source version
os.unlink(dest)
self.copy_file('setup.cfg', dest)
self.get_finalized_command('egg_info').save_version_info(dest)
def _manifest_is_not_generated(self):
# check for special comment used in 2.7.1 and higher
if not os.path.isfile(self.manifest):
return False
fp = open(self.manifest, 'rbU')
try:
first_line = fp.readline()
finally:
fp.close()
return first_line != '# file GENERATED by distutils, do NOT edit\n'.encode()
def read_manifest(self):
"""Read the manifest file (named by 'self.manifest') and use it to
fill in 'self.filelist', the list of files to include in the source
distribution.
"""
log.info("reading manifest file '%s'", self.manifest)
manifest = open(self.manifest, 'rbU')
for line in manifest:
# The manifest must contain UTF-8. See #303.
if sys.version_info >= (3,):
try:
line = line.decode('UTF-8')
except UnicodeDecodeError:
log.warn("%r not UTF-8 decodable -- skipping" % line)
continue
# ignore comments and blank lines
line = line.strip()
if line.startswith('#') or not line:
continue
self.filelist.append(line)
manifest.close()

View File

@@ -0,0 +1,145 @@
import os
import distutils
from setuptools import Command
from distutils.util import convert_path
from distutils import log
from distutils.errors import DistutilsOptionError
__all__ = ['config_file', 'edit_config', 'option_base', 'setopt']
def config_file(kind="local"):
"""Get the filename of the distutils, local, global, or per-user config
`kind` must be one of "local", "global", or "user"
"""
if kind=='local':
return 'setup.cfg'
if kind=='global':
return os.path.join(
os.path.dirname(distutils.__file__),'distutils.cfg'
)
if kind=='user':
dot = os.name=='posix' and '.' or ''
return os.path.expanduser(convert_path("~/%spydistutils.cfg" % dot))
raise ValueError(
"config_file() type must be 'local', 'global', or 'user'", kind
)
def edit_config(filename, settings, dry_run=False):
"""Edit a configuration file to include `settings`
`settings` is a dictionary of dictionaries or ``None`` values, keyed by
command/section name. A ``None`` value means to delete the entire section,
while a dictionary lists settings to be changed or deleted in that section.
A setting of ``None`` means to delete that setting.
"""
from setuptools.compat import ConfigParser
log.debug("Reading configuration from %s", filename)
opts = ConfigParser.RawConfigParser()
opts.read([filename])
for section, options in settings.items():
if options is None:
log.info("Deleting section [%s] from %s", section, filename)
opts.remove_section(section)
else:
if not opts.has_section(section):
log.debug("Adding new section [%s] to %s", section, filename)
opts.add_section(section)
for option,value in options.items():
if value is None:
log.debug(
"Deleting %s.%s from %s",
section, option, filename
)
opts.remove_option(section,option)
if not opts.options(section):
log.info("Deleting empty [%s] section from %s",
section, filename)
opts.remove_section(section)
else:
log.debug(
"Setting %s.%s to %r in %s",
section, option, value, filename
)
opts.set(section,option,value)
log.info("Writing %s", filename)
if not dry_run:
with open(filename, 'w') as f:
opts.write(f)
class option_base(Command):
"""Abstract base class for commands that mess with config files"""
user_options = [
('global-config', 'g',
"save options to the site-wide distutils.cfg file"),
('user-config', 'u',
"save options to the current user's pydistutils.cfg file"),
('filename=', 'f',
"configuration file to use (default=setup.cfg)"),
]
boolean_options = [
'global-config', 'user-config',
]
def initialize_options(self):
self.global_config = None
self.user_config = None
self.filename = None
def finalize_options(self):
filenames = []
if self.global_config:
filenames.append(config_file('global'))
if self.user_config:
filenames.append(config_file('user'))
if self.filename is not None:
filenames.append(self.filename)
if not filenames:
filenames.append(config_file('local'))
if len(filenames)>1:
raise DistutilsOptionError(
"Must specify only one configuration file option",
filenames
)
self.filename, = filenames
class setopt(option_base):
"""Save command-line options to a file"""
description = "set an option in setup.cfg or another config file"
user_options = [
('command=', 'c', 'command to set an option for'),
('option=', 'o', 'option to set'),
('set-value=', 's', 'value of the option'),
('remove', 'r', 'remove (unset) the value'),
] + option_base.user_options
boolean_options = option_base.boolean_options + ['remove']
def initialize_options(self):
option_base.initialize_options(self)
self.command = None
self.option = None
self.set_value = None
self.remove = None
def finalize_options(self):
option_base.finalize_options(self)
if self.command is None or self.option is None:
raise DistutilsOptionError("Must specify --command *and* --option")
if self.set_value is None and not self.remove:
raise DistutilsOptionError("Must specify --set-value or --remove")
def run(self):
edit_config(
self.filename, {
self.command: {self.option.replace('-','_'):self.set_value}
},
self.dry_run
)

View File

@@ -0,0 +1,161 @@
from setuptools import Command
from distutils.errors import DistutilsOptionError
import sys
from pkg_resources import (resource_listdir, resource_exists,
normalize_path, working_set, _namespace_packages, add_activation_listener,
require, EntryPoint)
from unittest import TestLoader
class ScanningLoader(TestLoader):
def loadTestsFromModule(self, module):
"""Return a suite of all tests cases contained in the given module
If the module is a package, load tests from all the modules in it.
If the module has an ``additional_tests`` function, call it and add
the return value to the tests.
"""
tests = []
if module.__name__!='setuptools.tests.doctest': # ugh
tests.append(TestLoader.loadTestsFromModule(self,module))
if hasattr(module, "additional_tests"):
tests.append(module.additional_tests())
if hasattr(module, '__path__'):
for file in resource_listdir(module.__name__, ''):
if file.endswith('.py') and file!='__init__.py':
submodule = module.__name__+'.'+file[:-3]
else:
if resource_exists(
module.__name__, file+'/__init__.py'
):
submodule = module.__name__+'.'+file
else:
continue
tests.append(self.loadTestsFromName(submodule))
if len(tests)!=1:
return self.suiteClass(tests)
else:
return tests[0] # don't create a nested suite for only one return
class test(Command):
"""Command to run unit tests after in-place build"""
description = "run unit tests after in-place build"
user_options = [
('test-module=','m', "Run 'test_suite' in specified module"),
('test-suite=','s',
"Test suite to run (e.g. 'some_module.test_suite')"),
]
def initialize_options(self):
self.test_suite = None
self.test_module = None
self.test_loader = None
def finalize_options(self):
if self.test_suite is None:
if self.test_module is None:
self.test_suite = self.distribution.test_suite
else:
self.test_suite = self.test_module+".test_suite"
elif self.test_module:
raise DistutilsOptionError(
"You may specify a module or a suite, but not both"
)
self.test_args = [self.test_suite]
if self.verbose:
self.test_args.insert(0,'--verbose')
if self.test_loader is None:
self.test_loader = getattr(self.distribution,'test_loader',None)
if self.test_loader is None:
self.test_loader = "setuptools.command.test:ScanningLoader"
def with_project_on_sys_path(self, func):
if sys.version_info >= (3,) and getattr(self.distribution, 'use_2to3', False):
# If we run 2to3 we can not do this inplace:
# Ensure metadata is up-to-date
self.reinitialize_command('build_py', inplace=0)
self.run_command('build_py')
bpy_cmd = self.get_finalized_command("build_py")
build_path = normalize_path(bpy_cmd.build_lib)
# Build extensions
self.reinitialize_command('egg_info', egg_base=build_path)
self.run_command('egg_info')
self.reinitialize_command('build_ext', inplace=0)
self.run_command('build_ext')
else:
# Without 2to3 inplace works fine:
self.run_command('egg_info')
# Build extensions in-place
self.reinitialize_command('build_ext', inplace=1)
self.run_command('build_ext')
ei_cmd = self.get_finalized_command("egg_info")
old_path = sys.path[:]
old_modules = sys.modules.copy()
try:
sys.path.insert(0, normalize_path(ei_cmd.egg_base))
working_set.__init__()
add_activation_listener(lambda dist: dist.activate())
require('%s==%s' % (ei_cmd.egg_name, ei_cmd.egg_version))
func()
finally:
sys.path[:] = old_path
sys.modules.clear()
sys.modules.update(old_modules)
working_set.__init__()
def run(self):
if self.distribution.install_requires:
self.distribution.fetch_build_eggs(self.distribution.install_requires)
if self.distribution.tests_require:
self.distribution.fetch_build_eggs(self.distribution.tests_require)
if self.test_suite:
cmd = ' '.join(self.test_args)
if self.dry_run:
self.announce('skipping "unittest %s" (dry run)' % cmd)
else:
self.announce('running "unittest %s"' % cmd)
self.with_project_on_sys_path(self.run_tests)
def run_tests(self):
import unittest
# Purge modules under test from sys.modules. The test loader will
# re-import them from the build location. Required when 2to3 is used
# with namespace packages.
if sys.version_info >= (3,) and getattr(self.distribution, 'use_2to3', False):
module = self.test_args[-1].split('.')[0]
if module in _namespace_packages:
del_modules = []
if module in sys.modules:
del_modules.append(module)
module += '.'
for name in sys.modules:
if name.startswith(module):
del_modules.append(name)
list(map(sys.modules.__delitem__, del_modules))
loader_ep = EntryPoint.parse("x="+self.test_loader)
loader_class = loader_ep.load(require=False)
cks = loader_class()
unittest.main(
None, None, [unittest.__file__]+self.test_args,
testLoader = cks
)

View File

@@ -0,0 +1,193 @@
# -*- coding: utf-8 -*-
"""upload_docs
Implements a Distutils 'upload_docs' subcommand (upload documentation to
PyPI's pythonhosted.org).
"""
import os
import socket
import zipfile
import tempfile
import sys
import shutil
from base64 import standard_b64encode
from pkg_resources import iter_entry_points
from distutils import log
from distutils.errors import DistutilsOptionError
from distutils.command.upload import upload
from setuptools.compat import httplib, urlparse, unicode, iteritems, PY3
errors = 'surrogateescape' if PY3 else 'strict'
# This is not just a replacement for byte literals
# but works as a general purpose encoder
def b(s, encoding='utf-8'):
if isinstance(s, unicode):
return s.encode(encoding, errors)
return s
class upload_docs(upload):
description = 'Upload documentation to PyPI'
user_options = [
('repository=', 'r',
"url of repository [default: %s]" % upload.DEFAULT_REPOSITORY),
('show-response', None,
'display full response text from server'),
('upload-dir=', None, 'directory to upload'),
]
boolean_options = upload.boolean_options
def has_sphinx(self):
if self.upload_dir is None:
for ep in iter_entry_points('distutils.commands', 'build_sphinx'):
return True
sub_commands = [('build_sphinx', has_sphinx)]
def initialize_options(self):
upload.initialize_options(self)
self.upload_dir = None
self.target_dir = None
def finalize_options(self):
upload.finalize_options(self)
if self.upload_dir is None:
if self.has_sphinx():
build_sphinx = self.get_finalized_command('build_sphinx')
self.target_dir = build_sphinx.builder_target_dir
else:
build = self.get_finalized_command('build')
self.target_dir = os.path.join(build.build_base, 'docs')
else:
self.ensure_dirname('upload_dir')
self.target_dir = self.upload_dir
self.announce('Using upload directory %s' % self.target_dir)
def create_zipfile(self, filename):
zip_file = zipfile.ZipFile(filename, "w")
try:
self.mkpath(self.target_dir) # just in case
for root, dirs, files in os.walk(self.target_dir):
if root == self.target_dir and not files:
raise DistutilsOptionError(
"no files found in upload directory '%s'"
% self.target_dir)
for name in files:
full = os.path.join(root, name)
relative = root[len(self.target_dir):].lstrip(os.path.sep)
dest = os.path.join(relative, name)
zip_file.write(full, dest)
finally:
zip_file.close()
def run(self):
# Run sub commands
for cmd_name in self.get_sub_commands():
self.run_command(cmd_name)
tmp_dir = tempfile.mkdtemp()
name = self.distribution.metadata.get_name()
zip_file = os.path.join(tmp_dir, "%s.zip" % name)
try:
self.create_zipfile(zip_file)
self.upload_file(zip_file)
finally:
shutil.rmtree(tmp_dir)
def upload_file(self, filename):
f = open(filename, 'rb')
content = f.read()
f.close()
meta = self.distribution.metadata
data = {
':action': 'doc_upload',
'name': meta.get_name(),
'content': (os.path.basename(filename), content),
}
# set up the authentication
credentials = b(self.username + ':' + self.password)
credentials = standard_b64encode(credentials)
if PY3:
credentials = credentials.decode('ascii')
auth = "Basic " + credentials
# Build up the MIME payload for the POST data
boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
sep_boundary = b('\n--') + b(boundary)
end_boundary = sep_boundary + b('--')
body = []
for key, values in iteritems(data):
title = '\nContent-Disposition: form-data; name="%s"' % key
# handle multiple entries for the same name
if not isinstance(values, list):
values = [values]
for value in values:
if type(value) is tuple:
title += '; filename="%s"' % value[0]
value = value[1]
else:
value = b(value)
body.append(sep_boundary)
body.append(b(title))
body.append(b("\n\n"))
body.append(value)
if value and value[-1:] == b('\r'):
body.append(b('\n')) # write an extra newline (lurve Macs)
body.append(end_boundary)
body.append(b("\n"))
body = b('').join(body)
self.announce("Submitting documentation to %s" % (self.repository),
log.INFO)
# build the Request
# We can't use urllib2 since we need to send the Basic
# auth right with the first request
schema, netloc, url, params, query, fragments = \
urlparse(self.repository)
assert not params and not query and not fragments
if schema == 'http':
conn = httplib.HTTPConnection(netloc)
elif schema == 'https':
conn = httplib.HTTPSConnection(netloc)
else:
raise AssertionError("unsupported schema "+schema)
data = ''
try:
conn.connect()
conn.putrequest("POST", url)
content_type = 'multipart/form-data; boundary=%s' % boundary
conn.putheader('Content-type', content_type)
conn.putheader('Content-length', str(len(body)))
conn.putheader('Authorization', auth)
conn.endheaders()
conn.send(body)
except socket.error:
e = sys.exc_info()[1]
self.announce(str(e), log.ERROR)
return
r = conn.getresponse()
if r.status == 200:
self.announce('Server response (%s): %s' % (r.status, r.reason),
log.INFO)
elif r.status == 301:
location = r.getheader('Location')
if location is None:
location = 'https://pythonhosted.org/%s/' % meta.get_name()
self.announce('Upload successful. Visit %s' % location,
log.INFO)
else:
self.announce('Upload failed (%s): %s' % (r.status, r.reason),
log.ERROR)
if self.show_response:
print('-'*75, r.read(), '-'*75)

View File

@@ -0,0 +1,83 @@
import sys
import itertools
if sys.version_info[0] < 3:
PY3 = False
basestring = basestring
import __builtin__ as builtins
import ConfigParser
from StringIO import StringIO
BytesIO = StringIO
execfile = execfile
func_code = lambda o: o.func_code
func_globals = lambda o: o.func_globals
im_func = lambda o: o.im_func
from htmlentitydefs import name2codepoint
import httplib
from BaseHTTPServer import HTTPServer
from SimpleHTTPServer import SimpleHTTPRequestHandler
from BaseHTTPServer import BaseHTTPRequestHandler
iteritems = lambda o: o.iteritems()
long_type = long
maxsize = sys.maxint
next = lambda o: o.next()
numeric_types = (int, long, float)
unichr = unichr
unicode = unicode
bytes = str
from urllib import url2pathname, splittag, pathname2url
import urllib2
from urllib2 import urlopen, HTTPError, URLError, unquote, splituser
from urlparse import urlparse, urlunparse, urljoin, urlsplit, urlunsplit
filterfalse = itertools.ifilterfalse
exec("""def reraise(tp, value, tb=None):
raise tp, value, tb""")
else:
PY3 = True
basestring = str
import builtins
import configparser as ConfigParser
from io import StringIO, BytesIO
func_code = lambda o: o.__code__
func_globals = lambda o: o.__globals__
im_func = lambda o: o.__func__
from html.entities import name2codepoint
import http.client as httplib
from http.server import HTTPServer, SimpleHTTPRequestHandler
from http.server import BaseHTTPRequestHandler
iteritems = lambda o: o.items()
long_type = int
maxsize = sys.maxsize
next = next
numeric_types = (int, float)
unichr = chr
unicode = str
bytes = bytes
from urllib.error import HTTPError, URLError
import urllib.request as urllib2
from urllib.request import urlopen, url2pathname, pathname2url
from urllib.parse import (
urlparse, urlunparse, unquote, splituser, urljoin, urlsplit,
urlunsplit, splittag,
)
filterfalse = itertools.filterfalse
def execfile(fn, globs=None, locs=None):
if globs is None:
globs = globals()
if locs is None:
locs = globs
f = open(fn, 'rb')
try:
source = f.read()
finally:
f.close()
exec(compile(source, fn, 'exec'), globs, locs)
def reraise(tp, value, tb=None):
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value

View File

@@ -0,0 +1,246 @@
from __future__ import generators
import sys, imp, marshal
from imp import PKG_DIRECTORY, PY_COMPILED, PY_SOURCE, PY_FROZEN
from distutils.version import StrictVersion, LooseVersion
__all__ = [
'Require', 'find_module', 'get_module_constant', 'extract_constant'
]
class Require:
"""A prerequisite to building or installing a distribution"""
def __init__(self,name,requested_version,module,homepage='',
attribute=None,format=None
):
if format is None and requested_version is not None:
format = StrictVersion
if format is not None:
requested_version = format(requested_version)
if attribute is None:
attribute = '__version__'
self.__dict__.update(locals())
del self.self
def full_name(self):
"""Return full package/distribution name, w/version"""
if self.requested_version is not None:
return '%s-%s' % (self.name,self.requested_version)
return self.name
def version_ok(self,version):
"""Is 'version' sufficiently up-to-date?"""
return self.attribute is None or self.format is None or \
str(version) != "unknown" and version >= self.requested_version
def get_version(self, paths=None, default="unknown"):
"""Get version number of installed module, 'None', or 'default'
Search 'paths' for module. If not found, return 'None'. If found,
return the extracted version attribute, or 'default' if no version
attribute was specified, or the value cannot be determined without
importing the module. The version is formatted according to the
requirement's version format (if any), unless it is 'None' or the
supplied 'default'.
"""
if self.attribute is None:
try:
f,p,i = find_module(self.module,paths)
if f: f.close()
return default
except ImportError:
return None
v = get_module_constant(self.module,self.attribute,default,paths)
if v is not None and v is not default and self.format is not None:
return self.format(v)
return v
def is_present(self,paths=None):
"""Return true if dependency is present on 'paths'"""
return self.get_version(paths) is not None
def is_current(self,paths=None):
"""Return true if dependency is present and up-to-date on 'paths'"""
version = self.get_version(paths)
if version is None:
return False
return self.version_ok(version)
def _iter_code(code):
"""Yield '(op,arg)' pair for each operation in code object 'code'"""
from array import array
from dis import HAVE_ARGUMENT, EXTENDED_ARG
bytes = array('b',code.co_code)
eof = len(code.co_code)
ptr = 0
extended_arg = 0
while ptr<eof:
op = bytes[ptr]
if op>=HAVE_ARGUMENT:
arg = bytes[ptr+1] + bytes[ptr+2]*256 + extended_arg
ptr += 3
if op==EXTENDED_ARG:
extended_arg = arg * long_type(65536)
continue
else:
arg = None
ptr += 1
yield op,arg
def find_module(module, paths=None):
"""Just like 'imp.find_module()', but with package support"""
parts = module.split('.')
while parts:
part = parts.pop(0)
f, path, (suffix,mode,kind) = info = imp.find_module(part, paths)
if kind==PKG_DIRECTORY:
parts = parts or ['__init__']
paths = [path]
elif parts:
raise ImportError("Can't find %r in %s" % (parts,module))
return info
def get_module_constant(module, symbol, default=-1, paths=None):
"""Find 'module' by searching 'paths', and extract 'symbol'
Return 'None' if 'module' does not exist on 'paths', or it does not define
'symbol'. If the module defines 'symbol' as a constant, return the
constant. Otherwise, return 'default'."""
try:
f, path, (suffix,mode,kind) = find_module(module,paths)
except ImportError:
# Module doesn't exist
return None
try:
if kind==PY_COMPILED:
f.read(8) # skip magic & date
code = marshal.load(f)
elif kind==PY_FROZEN:
code = imp.get_frozen_object(module)
elif kind==PY_SOURCE:
code = compile(f.read(), path, 'exec')
else:
# Not something we can parse; we'll have to import it. :(
if module not in sys.modules:
imp.load_module(module,f,path,(suffix,mode,kind))
return getattr(sys.modules[module],symbol,None)
finally:
if f:
f.close()
return extract_constant(code,symbol,default)
def extract_constant(code,symbol,default=-1):
"""Extract the constant value of 'symbol' from 'code'
If the name 'symbol' is bound to a constant value by the Python code
object 'code', return that value. If 'symbol' is bound to an expression,
return 'default'. Otherwise, return 'None'.
Return value is based on the first assignment to 'symbol'. 'symbol' must
be a global, or at least a non-"fast" local in the code block. That is,
only 'STORE_NAME' and 'STORE_GLOBAL' opcodes are checked, and 'symbol'
must be present in 'code.co_names'.
"""
if symbol not in code.co_names:
# name's not there, can't possibly be an assigment
return None
name_idx = list(code.co_names).index(symbol)
STORE_NAME = 90
STORE_GLOBAL = 97
LOAD_CONST = 100
const = default
for op, arg in _iter_code(code):
if op==LOAD_CONST:
const = code.co_consts[arg]
elif arg==name_idx and (op==STORE_NAME or op==STORE_GLOBAL):
return const
else:
const = default
if sys.platform.startswith('java') or sys.platform == 'cli':
# XXX it'd be better to test assertions about bytecode instead...
del extract_constant, get_module_constant
__all__.remove('extract_constant')
__all__.remove('get_module_constant')

View File

@@ -0,0 +1,797 @@
__all__ = ['Distribution']
import re
import os
import sys
import warnings
import distutils.log
import distutils.core
import distutils.cmd
from distutils.core import Distribution as _Distribution
from distutils.errors import (DistutilsOptionError, DistutilsPlatformError,
DistutilsSetupError)
from setuptools.depends import Require
from setuptools.compat import numeric_types, basestring
import pkg_resources
def _get_unpatched(cls):
"""Protect against re-patching the distutils if reloaded
Also ensures that no other distutils extension monkeypatched the distutils
first.
"""
while cls.__module__.startswith('setuptools'):
cls, = cls.__bases__
if not cls.__module__.startswith('distutils'):
raise AssertionError(
"distutils has already been patched by %r" % cls
)
return cls
_Distribution = _get_unpatched(_Distribution)
sequence = tuple, list
def check_importable(dist, attr, value):
try:
ep = pkg_resources.EntryPoint.parse('x='+value)
assert not ep.extras
except (TypeError,ValueError,AttributeError,AssertionError):
raise DistutilsSetupError(
"%r must be importable 'module:attrs' string (got %r)"
% (attr,value)
)
def assert_string_list(dist, attr, value):
"""Verify that value is a string list or None"""
try:
assert ''.join(value)!=value
except (TypeError,ValueError,AttributeError,AssertionError):
raise DistutilsSetupError(
"%r must be a list of strings (got %r)" % (attr,value)
)
def check_nsp(dist, attr, value):
"""Verify that namespace packages are valid"""
assert_string_list(dist,attr,value)
for nsp in value:
if not dist.has_contents_for(nsp):
raise DistutilsSetupError(
"Distribution contains no modules or packages for " +
"namespace package %r" % nsp
)
if '.' in nsp:
parent = '.'.join(nsp.split('.')[:-1])
if parent not in value:
distutils.log.warn(
"WARNING: %r is declared as a package namespace, but %r"
" is not: please correct this in setup.py", nsp, parent
)
def check_extras(dist, attr, value):
"""Verify that extras_require mapping is valid"""
try:
for k,v in value.items():
if ':' in k:
k,m = k.split(':',1)
if pkg_resources.invalid_marker(m):
raise DistutilsSetupError("Invalid environment marker: "+m)
list(pkg_resources.parse_requirements(v))
except (TypeError,ValueError,AttributeError):
raise DistutilsSetupError(
"'extras_require' must be a dictionary whose values are "
"strings or lists of strings containing valid project/version "
"requirement specifiers."
)
def assert_bool(dist, attr, value):
"""Verify that value is True, False, 0, or 1"""
if bool(value) != value:
raise DistutilsSetupError(
"%r must be a boolean value (got %r)" % (attr,value)
)
def check_requirements(dist, attr, value):
"""Verify that install_requires is a valid requirements list"""
try:
list(pkg_resources.parse_requirements(value))
except (TypeError,ValueError):
raise DistutilsSetupError(
"%r must be a string or list of strings "
"containing valid project/version requirement specifiers" % (attr,)
)
def check_entry_points(dist, attr, value):
"""Verify that entry_points map is parseable"""
try:
pkg_resources.EntryPoint.parse_map(value)
except ValueError:
e = sys.exc_info()[1]
raise DistutilsSetupError(e)
def check_test_suite(dist, attr, value):
if not isinstance(value,basestring):
raise DistutilsSetupError("test_suite must be a string")
def check_package_data(dist, attr, value):
"""Verify that value is a dictionary of package names to glob lists"""
if isinstance(value,dict):
for k,v in value.items():
if not isinstance(k,str): break
try: iter(v)
except TypeError:
break
else:
return
raise DistutilsSetupError(
attr+" must be a dictionary mapping package names to lists of "
"wildcard patterns"
)
def check_packages(dist, attr, value):
for pkgname in value:
if not re.match(r'\w+(\.\w+)*', pkgname):
distutils.log.warn(
"WARNING: %r not a valid package name; please use only"
".-separated package names in setup.py", pkgname
)
class Distribution(_Distribution):
"""Distribution with support for features, tests, and package data
This is an enhanced version of 'distutils.dist.Distribution' that
effectively adds the following new optional keyword arguments to 'setup()':
'install_requires' -- a string or sequence of strings specifying project
versions that the distribution requires when installed, in the format
used by 'pkg_resources.require()'. They will be installed
automatically when the package is installed. If you wish to use
packages that are not available in PyPI, or want to give your users an
alternate download location, you can add a 'find_links' option to the
'[easy_install]' section of your project's 'setup.cfg' file, and then
setuptools will scan the listed web pages for links that satisfy the
requirements.
'extras_require' -- a dictionary mapping names of optional "extras" to the
additional requirement(s) that using those extras incurs. For example,
this::
extras_require = dict(reST = ["docutils>=0.3", "reSTedit"])
indicates that the distribution can optionally provide an extra
capability called "reST", but it can only be used if docutils and
reSTedit are installed. If the user installs your package using
EasyInstall and requests one of your extras, the corresponding
additional requirements will be installed if needed.
'features' **deprecated** -- a dictionary mapping option names to
'setuptools.Feature'
objects. Features are a portion of the distribution that can be
included or excluded based on user options, inter-feature dependencies,
and availability on the current system. Excluded features are omitted
from all setup commands, including source and binary distributions, so
you can create multiple distributions from the same source tree.
Feature names should be valid Python identifiers, except that they may
contain the '-' (minus) sign. Features can be included or excluded
via the command line options '--with-X' and '--without-X', where 'X' is
the name of the feature. Whether a feature is included by default, and
whether you are allowed to control this from the command line, is
determined by the Feature object. See the 'Feature' class for more
information.
'test_suite' -- the name of a test suite to run for the 'test' command.
If the user runs 'python setup.py test', the package will be installed,
and the named test suite will be run. The format is the same as
would be used on a 'unittest.py' command line. That is, it is the
dotted name of an object to import and call to generate a test suite.
'package_data' -- a dictionary mapping package names to lists of filenames
or globs to use to find data files contained in the named packages.
If the dictionary has filenames or globs listed under '""' (the empty
string), those names will be searched for in every package, in addition
to any names for the specific package. Data files found using these
names/globs will be installed along with the package, in the same
location as the package. Note that globs are allowed to reference
the contents of non-package subdirectories, as long as you use '/' as
a path separator. (Globs are automatically converted to
platform-specific paths at runtime.)
In addition to these new keywords, this class also has several new methods
for manipulating the distribution's contents. For example, the 'include()'
and 'exclude()' methods can be thought of as in-place add and subtract
commands that add or remove packages, modules, extensions, and so on from
the distribution. They are used by the feature subsystem to configure the
distribution for the included and excluded features.
"""
_patched_dist = None
def patch_missing_pkg_info(self, attrs):
# Fake up a replacement for the data that would normally come from
# PKG-INFO, but which might not yet be built if this is a fresh
# checkout.
#
if not attrs or 'name' not in attrs or 'version' not in attrs:
return
key = pkg_resources.safe_name(str(attrs['name'])).lower()
dist = pkg_resources.working_set.by_key.get(key)
if dist is not None and not dist.has_metadata('PKG-INFO'):
dist._version = pkg_resources.safe_version(str(attrs['version']))
self._patched_dist = dist
def __init__(self, attrs=None):
have_package_data = hasattr(self, "package_data")
if not have_package_data:
self.package_data = {}
_attrs_dict = attrs or {}
if 'features' in _attrs_dict or 'require_features' in _attrs_dict:
Feature.warn_deprecated()
self.require_features = []
self.features = {}
self.dist_files = []
self.src_root = attrs and attrs.pop("src_root", None)
self.patch_missing_pkg_info(attrs)
# Make sure we have any eggs needed to interpret 'attrs'
if attrs is not None:
self.dependency_links = attrs.pop('dependency_links', [])
assert_string_list(self,'dependency_links',self.dependency_links)
if attrs and 'setup_requires' in attrs:
self.fetch_build_eggs(attrs.pop('setup_requires'))
for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'):
if not hasattr(self,ep.name):
setattr(self,ep.name,None)
_Distribution.__init__(self,attrs)
if isinstance(self.metadata.version, numeric_types):
# Some people apparently take "version number" too literally :)
self.metadata.version = str(self.metadata.version)
def parse_command_line(self):
"""Process features after parsing command line options"""
result = _Distribution.parse_command_line(self)
if self.features:
self._finalize_features()
return result
def _feature_attrname(self,name):
"""Convert feature name to corresponding option attribute name"""
return 'with_'+name.replace('-','_')
def fetch_build_eggs(self, requires):
"""Resolve pre-setup requirements"""
from pkg_resources import working_set, parse_requirements
for dist in working_set.resolve(
parse_requirements(requires), installer=self.fetch_build_egg,
replace_conflicting=True
):
working_set.add(dist, replace=True)
def finalize_options(self):
_Distribution.finalize_options(self)
if self.features:
self._set_global_opts_from_features()
for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'):
value = getattr(self,ep.name,None)
if value is not None:
ep.require(installer=self.fetch_build_egg)
ep.load()(self, ep.name, value)
if getattr(self, 'convert_2to3_doctests', None):
# XXX may convert to set here when we can rely on set being builtin
self.convert_2to3_doctests = [os.path.abspath(p) for p in self.convert_2to3_doctests]
else:
self.convert_2to3_doctests = []
def fetch_build_egg(self, req):
"""Fetch an egg needed for building"""
try:
cmd = self._egg_fetcher
cmd.package_index.to_scan = []
except AttributeError:
from setuptools.command.easy_install import easy_install
dist = self.__class__({'script_args':['easy_install']})
dist.parse_config_files()
opts = dist.get_option_dict('easy_install')
keep = (
'find_links', 'site_dirs', 'index_url', 'optimize',
'site_dirs', 'allow_hosts'
)
for key in list(opts):
if key not in keep:
del opts[key] # don't use any other settings
if self.dependency_links:
links = self.dependency_links[:]
if 'find_links' in opts:
links = opts['find_links'][1].split() + links
opts['find_links'] = ('setup', links)
cmd = easy_install(
dist, args=["x"], install_dir=os.curdir, exclude_scripts=True,
always_copy=False, build_directory=None, editable=False,
upgrade=False, multi_version=True, no_report=True, user=False
)
cmd.ensure_finalized()
self._egg_fetcher = cmd
return cmd.easy_install(req)
def _set_global_opts_from_features(self):
"""Add --with-X/--without-X options based on optional features"""
go = []
no = self.negative_opt.copy()
for name,feature in self.features.items():
self._set_feature(name,None)
feature.validate(self)
if feature.optional:
descr = feature.description
incdef = ' (default)'
excdef=''
if not feature.include_by_default():
excdef, incdef = incdef, excdef
go.append(('with-'+name, None, 'include '+descr+incdef))
go.append(('without-'+name, None, 'exclude '+descr+excdef))
no['without-'+name] = 'with-'+name
self.global_options = self.feature_options = go + self.global_options
self.negative_opt = self.feature_negopt = no
def _finalize_features(self):
"""Add/remove features and resolve dependencies between them"""
# First, flag all the enabled items (and thus their dependencies)
for name,feature in self.features.items():
enabled = self.feature_is_included(name)
if enabled or (enabled is None and feature.include_by_default()):
feature.include_in(self)
self._set_feature(name,1)
# Then disable the rest, so that off-by-default features don't
# get flagged as errors when they're required by an enabled feature
for name,feature in self.features.items():
if not self.feature_is_included(name):
feature.exclude_from(self)
self._set_feature(name,0)
def get_command_class(self, command):
"""Pluggable version of get_command_class()"""
if command in self.cmdclass:
return self.cmdclass[command]
for ep in pkg_resources.iter_entry_points('distutils.commands',command):
ep.require(installer=self.fetch_build_egg)
self.cmdclass[command] = cmdclass = ep.load()
return cmdclass
else:
return _Distribution.get_command_class(self, command)
def print_commands(self):
for ep in pkg_resources.iter_entry_points('distutils.commands'):
if ep.name not in self.cmdclass:
cmdclass = ep.load(False) # don't require extras, we're not running
self.cmdclass[ep.name] = cmdclass
return _Distribution.print_commands(self)
def _set_feature(self,name,status):
"""Set feature's inclusion status"""
setattr(self,self._feature_attrname(name),status)
def feature_is_included(self,name):
"""Return 1 if feature is included, 0 if excluded, 'None' if unknown"""
return getattr(self,self._feature_attrname(name))
def include_feature(self,name):
"""Request inclusion of feature named 'name'"""
if self.feature_is_included(name)==0:
descr = self.features[name].description
raise DistutilsOptionError(
descr + " is required, but was excluded or is not available"
)
self.features[name].include_in(self)
self._set_feature(name,1)
def include(self,**attrs):
"""Add items to distribution that are named in keyword arguments
For example, 'dist.exclude(py_modules=["x"])' would add 'x' to
the distribution's 'py_modules' attribute, if it was not already
there.
Currently, this method only supports inclusion for attributes that are
lists or tuples. If you need to add support for adding to other
attributes in this or a subclass, you can add an '_include_X' method,
where 'X' is the name of the attribute. The method will be called with
the value passed to 'include()'. So, 'dist.include(foo={"bar":"baz"})'
will try to call 'dist._include_foo({"bar":"baz"})', which can then
handle whatever special inclusion logic is needed.
"""
for k,v in attrs.items():
include = getattr(self, '_include_'+k, None)
if include:
include(v)
else:
self._include_misc(k,v)
def exclude_package(self,package):
"""Remove packages, modules, and extensions in named package"""
pfx = package+'.'
if self.packages:
self.packages = [
p for p in self.packages
if p != package and not p.startswith(pfx)
]
if self.py_modules:
self.py_modules = [
p for p in self.py_modules
if p != package and not p.startswith(pfx)
]
if self.ext_modules:
self.ext_modules = [
p for p in self.ext_modules
if p.name != package and not p.name.startswith(pfx)
]
def has_contents_for(self,package):
"""Return true if 'exclude_package(package)' would do something"""
pfx = package+'.'
for p in self.iter_distribution_names():
if p==package or p.startswith(pfx):
return True
def _exclude_misc(self,name,value):
"""Handle 'exclude()' for list/tuple attrs without a special handler"""
if not isinstance(value,sequence):
raise DistutilsSetupError(
"%s: setting must be a list or tuple (%r)" % (name, value)
)
try:
old = getattr(self,name)
except AttributeError:
raise DistutilsSetupError(
"%s: No such distribution setting" % name
)
if old is not None and not isinstance(old,sequence):
raise DistutilsSetupError(
name+": this setting cannot be changed via include/exclude"
)
elif old:
setattr(self,name,[item for item in old if item not in value])
def _include_misc(self,name,value):
"""Handle 'include()' for list/tuple attrs without a special handler"""
if not isinstance(value,sequence):
raise DistutilsSetupError(
"%s: setting must be a list (%r)" % (name, value)
)
try:
old = getattr(self,name)
except AttributeError:
raise DistutilsSetupError(
"%s: No such distribution setting" % name
)
if old is None:
setattr(self,name,value)
elif not isinstance(old,sequence):
raise DistutilsSetupError(
name+": this setting cannot be changed via include/exclude"
)
else:
setattr(self,name,old+[item for item in value if item not in old])
def exclude(self,**attrs):
"""Remove items from distribution that are named in keyword arguments
For example, 'dist.exclude(py_modules=["x"])' would remove 'x' from
the distribution's 'py_modules' attribute. Excluding packages uses
the 'exclude_package()' method, so all of the package's contained
packages, modules, and extensions are also excluded.
Currently, this method only supports exclusion from attributes that are
lists or tuples. If you need to add support for excluding from other
attributes in this or a subclass, you can add an '_exclude_X' method,
where 'X' is the name of the attribute. The method will be called with
the value passed to 'exclude()'. So, 'dist.exclude(foo={"bar":"baz"})'
will try to call 'dist._exclude_foo({"bar":"baz"})', which can then
handle whatever special exclusion logic is needed.
"""
for k,v in attrs.items():
exclude = getattr(self, '_exclude_'+k, None)
if exclude:
exclude(v)
else:
self._exclude_misc(k,v)
def _exclude_packages(self,packages):
if not isinstance(packages,sequence):
raise DistutilsSetupError(
"packages: setting must be a list or tuple (%r)" % (packages,)
)
list(map(self.exclude_package, packages))
def _parse_command_opts(self, parser, args):
# Remove --with-X/--without-X options when processing command args
self.global_options = self.__class__.global_options
self.negative_opt = self.__class__.negative_opt
# First, expand any aliases
command = args[0]
aliases = self.get_option_dict('aliases')
while command in aliases:
src,alias = aliases[command]
del aliases[command] # ensure each alias can expand only once!
import shlex
args[:1] = shlex.split(alias,True)
command = args[0]
nargs = _Distribution._parse_command_opts(self, parser, args)
# Handle commands that want to consume all remaining arguments
cmd_class = self.get_command_class(command)
if getattr(cmd_class,'command_consumes_arguments',None):
self.get_option_dict(command)['args'] = ("command line", nargs)
if nargs is not None:
return []
return nargs
def get_cmdline_options(self):
"""Return a '{cmd: {opt:val}}' map of all command-line options
Option names are all long, but do not include the leading '--', and
contain dashes rather than underscores. If the option doesn't take
an argument (e.g. '--quiet'), the 'val' is 'None'.
Note that options provided by config files are intentionally excluded.
"""
d = {}
for cmd,opts in self.command_options.items():
for opt,(src,val) in opts.items():
if src != "command line":
continue
opt = opt.replace('_','-')
if val==0:
cmdobj = self.get_command_obj(cmd)
neg_opt = self.negative_opt.copy()
neg_opt.update(getattr(cmdobj,'negative_opt',{}))
for neg,pos in neg_opt.items():
if pos==opt:
opt=neg
val=None
break
else:
raise AssertionError("Shouldn't be able to get here")
elif val==1:
val = None
d.setdefault(cmd,{})[opt] = val
return d
def iter_distribution_names(self):
"""Yield all packages, modules, and extension names in distribution"""
for pkg in self.packages or ():
yield pkg
for module in self.py_modules or ():
yield module
for ext in self.ext_modules or ():
if isinstance(ext,tuple):
name, buildinfo = ext
else:
name = ext.name
if name.endswith('module'):
name = name[:-6]
yield name
def handle_display_options(self, option_order):
"""If there were any non-global "display-only" options
(--help-commands or the metadata display options) on the command
line, display the requested info and return true; else return
false.
"""
import sys
if sys.version_info < (3,) or self.help_commands:
return _Distribution.handle_display_options(self, option_order)
# Stdout may be StringIO (e.g. in tests)
import io
if not isinstance(sys.stdout, io.TextIOWrapper):
return _Distribution.handle_display_options(self, option_order)
# Don't wrap stdout if utf-8 is already the encoding. Provides
# workaround for #334.
if sys.stdout.encoding.lower() in ('utf-8', 'utf8'):
return _Distribution.handle_display_options(self, option_order)
# Print metadata in UTF-8 no matter the platform
encoding = sys.stdout.encoding
errors = sys.stdout.errors
newline = sys.platform != 'win32' and '\n' or None
line_buffering = sys.stdout.line_buffering
sys.stdout = io.TextIOWrapper(
sys.stdout.detach(), 'utf-8', errors, newline, line_buffering)
try:
return _Distribution.handle_display_options(self, option_order)
finally:
sys.stdout = io.TextIOWrapper(
sys.stdout.detach(), encoding, errors, newline, line_buffering)
# Install it throughout the distutils
for module in distutils.dist, distutils.core, distutils.cmd:
module.Distribution = Distribution
class Feature:
"""
**deprecated** -- The `Feature` facility was never completely implemented
or supported, `has reported issues
<https://bitbucket.org/pypa/setuptools/issue/58>`_ and will be removed in
a future version.
A subset of the distribution that can be excluded if unneeded/wanted
Features are created using these keyword arguments:
'description' -- a short, human readable description of the feature, to
be used in error messages, and option help messages.
'standard' -- if true, the feature is included by default if it is
available on the current system. Otherwise, the feature is only
included if requested via a command line '--with-X' option, or if
another included feature requires it. The default setting is 'False'.
'available' -- if true, the feature is available for installation on the
current system. The default setting is 'True'.
'optional' -- if true, the feature's inclusion can be controlled from the
command line, using the '--with-X' or '--without-X' options. If
false, the feature's inclusion status is determined automatically,
based on 'availabile', 'standard', and whether any other feature
requires it. The default setting is 'True'.
'require_features' -- a string or sequence of strings naming features
that should also be included if this feature is included. Defaults to
empty list. May also contain 'Require' objects that should be
added/removed from the distribution.
'remove' -- a string or list of strings naming packages to be removed
from the distribution if this feature is *not* included. If the
feature *is* included, this argument is ignored. This argument exists
to support removing features that "crosscut" a distribution, such as
defining a 'tests' feature that removes all the 'tests' subpackages
provided by other features. The default for this argument is an empty
list. (Note: the named package(s) or modules must exist in the base
distribution when the 'setup()' function is initially called.)
other keywords -- any other keyword arguments are saved, and passed to
the distribution's 'include()' and 'exclude()' methods when the
feature is included or excluded, respectively. So, for example, you
could pass 'packages=["a","b"]' to cause packages 'a' and 'b' to be
added or removed from the distribution as appropriate.
A feature must include at least one 'requires', 'remove', or other
keyword argument. Otherwise, it can't affect the distribution in any way.
Note also that you can subclass 'Feature' to create your own specialized
feature types that modify the distribution in other ways when included or
excluded. See the docstrings for the various methods here for more detail.
Aside from the methods, the only feature attributes that distributions look
at are 'description' and 'optional'.
"""
@staticmethod
def warn_deprecated():
warnings.warn(
"Features are deprecated and will be removed in a future "
"version. See http://bitbucket.org/pypa/setuptools/65.",
DeprecationWarning,
stacklevel=3,
)
def __init__(self, description, standard=False, available=True,
optional=True, require_features=(), remove=(), **extras):
self.warn_deprecated()
self.description = description
self.standard = standard
self.available = available
self.optional = optional
if isinstance(require_features,(str,Require)):
require_features = require_features,
self.require_features = [
r for r in require_features if isinstance(r,str)
]
er = [r for r in require_features if not isinstance(r,str)]
if er: extras['require_features'] = er
if isinstance(remove,str):
remove = remove,
self.remove = remove
self.extras = extras
if not remove and not require_features and not extras:
raise DistutilsSetupError(
"Feature %s: must define 'require_features', 'remove', or at least one"
" of 'packages', 'py_modules', etc."
)
def include_by_default(self):
"""Should this feature be included by default?"""
return self.available and self.standard
def include_in(self,dist):
"""Ensure feature and its requirements are included in distribution
You may override this in a subclass to perform additional operations on
the distribution. Note that this method may be called more than once
per feature, and so should be idempotent.
"""
if not self.available:
raise DistutilsPlatformError(
self.description+" is required,"
"but is not available on this platform"
)
dist.include(**self.extras)
for f in self.require_features:
dist.include_feature(f)
def exclude_from(self,dist):
"""Ensure feature is excluded from distribution
You may override this in a subclass to perform additional operations on
the distribution. This method will be called at most once per
feature, and only after all included features have been asked to
include themselves.
"""
dist.exclude(**self.extras)
if self.remove:
for item in self.remove:
dist.exclude_package(item)
def validate(self,dist):
"""Verify that feature makes sense in context of distribution
This method is called by the distribution just before it parses its
command line. It checks to ensure that the 'remove' attribute, if any,
contains only valid package/module names that are present in the base
distribution when 'setup()' is called. You may override it in a
subclass to perform any other required validation of the feature
against a target distribution.
"""
for item in self.remove:
if not dist.has_contents_for(item):
raise DistutilsSetupError(
"%s wants to be able to remove %s, but the distribution"
" doesn't contain any packages or modules under %s"
% (self.description, item, item)
)

View File

@@ -0,0 +1,53 @@
import sys
import re
import functools
import distutils.core
import distutils.extension
from setuptools.dist import _get_unpatched
_Extension = _get_unpatched(distutils.core.Extension)
def have_pyrex():
"""
Return True if Cython or Pyrex can be imported.
"""
pyrex_impls = 'Cython.Distutils.build_ext', 'Pyrex.Distutils.build_ext'
for pyrex_impl in pyrex_impls:
try:
# from (pyrex_impl) import build_ext
__import__(pyrex_impl, fromlist=['build_ext']).build_ext
return True
except Exception:
pass
return False
class Extension(_Extension):
"""Extension that uses '.c' files in place of '.pyx' files"""
def __init__(self, *args, **kw):
_Extension.__init__(self, *args, **kw)
self._convert_pyx_sources_to_lang()
def _convert_pyx_sources_to_lang(self):
"""
Replace sources with .pyx extensions to sources with the target
language extension. This mechanism allows language authors to supply
pre-converted sources but to prefer the .pyx sources.
"""
if have_pyrex():
# the build has Cython, so allow it to compile the .pyx files
return
lang = self.language or ''
target_ext = '.cpp' if lang.lower() == 'c++' else '.c'
sub = functools.partial(re.sub, '.pyx$', target_ext)
self.sources = list(map(sub, self.sources))
class Library(Extension):
"""Just like a regular Extension, but built as a library instead"""
distutils.core.Extension = Extension
distutils.extension.Extension = Extension
if 'distutils.command.build_ext' in sys.modules:
sys.modules['distutils.command.build_ext'].Extension = Extension

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@@ -0,0 +1,58 @@
"""
Customized Mixin2to3 support:
- adds support for converting doctests
This module raises an ImportError on Python 2.
"""
from distutils.util import Mixin2to3 as _Mixin2to3
from distutils import log
from lib2to3.refactor import RefactoringTool, get_fixers_from_package
import setuptools
class DistutilsRefactoringTool(RefactoringTool):
def log_error(self, msg, *args, **kw):
log.error(msg, *args)
def log_message(self, msg, *args):
log.info(msg, *args)
def log_debug(self, msg, *args):
log.debug(msg, *args)
class Mixin2to3(_Mixin2to3):
def run_2to3(self, files, doctests = False):
# See of the distribution option has been set, otherwise check the
# setuptools default.
if self.distribution.use_2to3 is not True:
return
if not files:
return
log.info("Fixing "+" ".join(files))
self.__build_fixer_names()
self.__exclude_fixers()
if doctests:
if setuptools.run_2to3_on_doctests:
r = DistutilsRefactoringTool(self.fixer_names)
r.refactor(files, write=True, doctests_only=True)
else:
_Mixin2to3.run_2to3(self, files)
def __build_fixer_names(self):
if self.fixer_names: return
self.fixer_names = []
for p in setuptools.lib2to3_fixer_packages:
self.fixer_names.extend(get_fixers_from_package(p))
if self.distribution.use_2to3_fixers is not None:
for p in self.distribution.use_2to3_fixers:
self.fixer_names.extend(get_fixers_from_package(p))
def __exclude_fixers(self):
excluded_fixers = getattr(self, 'exclude_fixers', [])
if self.distribution.use_2to3_exclude_fixers is not None:
excluded_fixers.extend(self.distribution.use_2to3_exclude_fixers)
for fixer_name in excluded_fixers:
if fixer_name in self.fixer_names:
self.fixer_names.remove(fixer_name)

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,19 @@
"""
Compatibility Support for Python 2.6 and earlier
"""
import sys
from setuptools.compat import splittag
def strip_fragment(url):
"""
In `Python 8280 <http://bugs.python.org/issue8280>`_, Python 2.7 and
later was patched to disregard the fragment when making URL requests.
Do the same for Python 2.6 and earlier.
"""
url, fragment = splittag(url)
return url
if sys.version_info >= (2,7):
strip_fragment = lambda x: x

View File

@@ -0,0 +1,15 @@
"""
Compatibility Support for Python 2.7 and earlier
"""
import sys
def get_all_headers(message, key):
"""
Given an HTTPMessage, return all headers matching a given key.
"""
return message.get_all(key)
if sys.version_info < (3,):
def get_all_headers(message, key):
return message.getheaders(key)

View File

@@ -0,0 +1,37 @@
__all__ = ['get_config_vars', 'get_path']
try:
# Python 2.7 or >=3.2
from sysconfig import get_config_vars, get_path
except ImportError:
from distutils.sysconfig import get_config_vars, get_python_lib
def get_path(name):
if name not in ('platlib', 'purelib'):
raise ValueError("Name must be purelib or platlib")
return get_python_lib(name=='platlib')
try:
# Python >=3.2
from tempfile import TemporaryDirectory
except ImportError:
import shutil
import tempfile
class TemporaryDirectory(object):
""""
Very simple temporary directory context manager.
Will try to delete afterward, but will also ignore OS and similar
errors on deletion.
"""
def __init__(self):
self.name = None # Handle mkdtemp raising an exception
self.name = tempfile.mkdtemp()
def __enter__(self):
return self.name
def __exit__(self, exctype, excvalue, exctrace):
try:
shutil.rmtree(self.name, True)
except OSError: #removal errors are not the only possible
pass
self.name = None

View File

@@ -0,0 +1,322 @@
import os
import sys
import tempfile
import operator
import functools
import itertools
import re
import pkg_resources
if os.name == "java":
import org.python.modules.posix.PosixModule as _os
else:
_os = sys.modules[os.name]
try:
_file = file
except NameError:
_file = None
_open = open
from distutils.errors import DistutilsError
from pkg_resources import working_set
from setuptools.compat import builtins, execfile
__all__ = [
"AbstractSandbox", "DirectorySandbox", "SandboxViolation", "run_setup",
]
def run_setup(setup_script, args):
"""Run a distutils setup script, sandboxed in its directory"""
old_dir = os.getcwd()
save_argv = sys.argv[:]
save_path = sys.path[:]
setup_dir = os.path.abspath(os.path.dirname(setup_script))
temp_dir = os.path.join(setup_dir,'temp')
if not os.path.isdir(temp_dir): os.makedirs(temp_dir)
save_tmp = tempfile.tempdir
save_modules = sys.modules.copy()
pr_state = pkg_resources.__getstate__()
try:
tempfile.tempdir = temp_dir
os.chdir(setup_dir)
try:
sys.argv[:] = [setup_script]+list(args)
sys.path.insert(0, setup_dir)
# reset to include setup dir, w/clean callback list
working_set.__init__()
working_set.callbacks.append(lambda dist:dist.activate())
DirectorySandbox(setup_dir).run(
lambda: execfile(
"setup.py",
{'__file__':setup_script, '__name__':'__main__'}
)
)
except SystemExit:
v = sys.exc_info()[1]
if v.args and v.args[0]:
raise
# Normal exit, just return
finally:
pkg_resources.__setstate__(pr_state)
sys.modules.update(save_modules)
# remove any modules imported within the sandbox
del_modules = [
mod_name for mod_name in sys.modules
if mod_name not in save_modules
# exclude any encodings modules. See #285
and not mod_name.startswith('encodings.')
]
list(map(sys.modules.__delitem__, del_modules))
os.chdir(old_dir)
sys.path[:] = save_path
sys.argv[:] = save_argv
tempfile.tempdir = save_tmp
class AbstractSandbox:
"""Wrap 'os' module and 'open()' builtin for virtualizing setup scripts"""
_active = False
def __init__(self):
self._attrs = [
name for name in dir(_os)
if not name.startswith('_') and hasattr(self,name)
]
def _copy(self, source):
for name in self._attrs:
setattr(os, name, getattr(source,name))
def run(self, func):
"""Run 'func' under os sandboxing"""
try:
self._copy(self)
if _file:
builtins.file = self._file
builtins.open = self._open
self._active = True
return func()
finally:
self._active = False
if _file:
builtins.file = _file
builtins.open = _open
self._copy(_os)
def _mk_dual_path_wrapper(name):
original = getattr(_os,name)
def wrap(self,src,dst,*args,**kw):
if self._active:
src,dst = self._remap_pair(name,src,dst,*args,**kw)
return original(src,dst,*args,**kw)
return wrap
for name in ["rename", "link", "symlink"]:
if hasattr(_os,name): locals()[name] = _mk_dual_path_wrapper(name)
def _mk_single_path_wrapper(name, original=None):
original = original or getattr(_os,name)
def wrap(self,path,*args,**kw):
if self._active:
path = self._remap_input(name,path,*args,**kw)
return original(path,*args,**kw)
return wrap
if _file:
_file = _mk_single_path_wrapper('file', _file)
_open = _mk_single_path_wrapper('open', _open)
for name in [
"stat", "listdir", "chdir", "open", "chmod", "chown", "mkdir",
"remove", "unlink", "rmdir", "utime", "lchown", "chroot", "lstat",
"startfile", "mkfifo", "mknod", "pathconf", "access"
]:
if hasattr(_os,name): locals()[name] = _mk_single_path_wrapper(name)
def _mk_single_with_return(name):
original = getattr(_os,name)
def wrap(self,path,*args,**kw):
if self._active:
path = self._remap_input(name,path,*args,**kw)
return self._remap_output(name, original(path,*args,**kw))
return original(path,*args,**kw)
return wrap
for name in ['readlink', 'tempnam']:
if hasattr(_os,name): locals()[name] = _mk_single_with_return(name)
def _mk_query(name):
original = getattr(_os,name)
def wrap(self,*args,**kw):
retval = original(*args,**kw)
if self._active:
return self._remap_output(name, retval)
return retval
return wrap
for name in ['getcwd', 'tmpnam']:
if hasattr(_os,name): locals()[name] = _mk_query(name)
def _validate_path(self,path):
"""Called to remap or validate any path, whether input or output"""
return path
def _remap_input(self,operation,path,*args,**kw):
"""Called for path inputs"""
return self._validate_path(path)
def _remap_output(self,operation,path):
"""Called for path outputs"""
return self._validate_path(path)
def _remap_pair(self,operation,src,dst,*args,**kw):
"""Called for path pairs like rename, link, and symlink operations"""
return (
self._remap_input(operation+'-from',src,*args,**kw),
self._remap_input(operation+'-to',dst,*args,**kw)
)
if hasattr(os, 'devnull'):
_EXCEPTIONS = [os.devnull,]
else:
_EXCEPTIONS = []
try:
from win32com.client.gencache import GetGeneratePath
_EXCEPTIONS.append(GetGeneratePath())
del GetGeneratePath
except ImportError:
# it appears pywin32 is not installed, so no need to exclude.
pass
class DirectorySandbox(AbstractSandbox):
"""Restrict operations to a single subdirectory - pseudo-chroot"""
write_ops = dict.fromkeys([
"open", "chmod", "chown", "mkdir", "remove", "unlink", "rmdir",
"utime", "lchown", "chroot", "mkfifo", "mknod", "tempnam",
])
_exception_patterns = [
# Allow lib2to3 to attempt to save a pickled grammar object (#121)
'.*lib2to3.*\.pickle$',
]
"exempt writing to paths that match the pattern"
def __init__(self, sandbox, exceptions=_EXCEPTIONS):
self._sandbox = os.path.normcase(os.path.realpath(sandbox))
self._prefix = os.path.join(self._sandbox,'')
self._exceptions = [
os.path.normcase(os.path.realpath(path))
for path in exceptions
]
AbstractSandbox.__init__(self)
def _violation(self, operation, *args, **kw):
raise SandboxViolation(operation, args, kw)
if _file:
def _file(self, path, mode='r', *args, **kw):
if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path):
self._violation("file", path, mode, *args, **kw)
return _file(path,mode,*args,**kw)
def _open(self, path, mode='r', *args, **kw):
if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path):
self._violation("open", path, mode, *args, **kw)
return _open(path,mode,*args,**kw)
def tmpnam(self):
self._violation("tmpnam")
def _ok(self, path):
active = self._active
try:
self._active = False
realpath = os.path.normcase(os.path.realpath(path))
return (
self._exempted(realpath)
or realpath == self._sandbox
or realpath.startswith(self._prefix)
)
finally:
self._active = active
def _exempted(self, filepath):
start_matches = (
filepath.startswith(exception)
for exception in self._exceptions
)
pattern_matches = (
re.match(pattern, filepath)
for pattern in self._exception_patterns
)
candidates = itertools.chain(start_matches, pattern_matches)
return any(candidates)
def _remap_input(self, operation, path, *args, **kw):
"""Called for path inputs"""
if operation in self.write_ops and not self._ok(path):
self._violation(operation, os.path.realpath(path), *args, **kw)
return path
def _remap_pair(self, operation, src, dst, *args, **kw):
"""Called for path pairs like rename, link, and symlink operations"""
if not self._ok(src) or not self._ok(dst):
self._violation(operation, src, dst, *args, **kw)
return (src,dst)
def open(self, file, flags, mode=0x1FF, *args, **kw): # 0777
"""Called for low-level os.open()"""
if flags & WRITE_FLAGS and not self._ok(file):
self._violation("os.open", file, flags, mode, *args, **kw)
return _os.open(file,flags,mode, *args, **kw)
WRITE_FLAGS = functools.reduce(
operator.or_, [getattr(_os, a, 0) for a in
"O_WRONLY O_RDWR O_APPEND O_CREAT O_TRUNC O_TEMPORARY".split()]
)
class SandboxViolation(DistutilsError):
"""A setup script attempted to modify the filesystem outside the sandbox"""
def __str__(self):
return """SandboxViolation: %s%r %s
The package setup script has attempted to modify files on your system
that are not within the EasyInstall build area, and has been aborted.
This package cannot be safely installed by EasyInstall, and may not
support alternate installation locations even if you run its setup
script by hand. Please inform the package's author and the EasyInstall
maintainers to find out if a fix or workaround is available.""" % self.args
#

View File

@@ -0,0 +1,11 @@
# EASY-INSTALL-DEV-SCRIPT: %(spec)r,%(script_name)r
__requires__ = """%(spec)r"""
import sys
from pkg_resources import require
require("""%(spec)r""")
del require
__file__ = """%(dev_path)r"""
if sys.version_info < (3, 0):
execfile(__file__)
else:
exec(compile(open(__file__).read(), __file__, 'exec'))

View File

@@ -0,0 +1,4 @@
# EASY-INSTALL-SCRIPT: %(spec)r,%(script_name)r
__requires__ = """%(spec)r"""
import pkg_resources
pkg_resources.run_script("""%(spec)r""", """%(script_name)r""")

View File

@@ -0,0 +1,76 @@
def __boot():
import sys
import os
PYTHONPATH = os.environ.get('PYTHONPATH')
if PYTHONPATH is None or (sys.platform=='win32' and not PYTHONPATH):
PYTHONPATH = []
else:
PYTHONPATH = PYTHONPATH.split(os.pathsep)
pic = getattr(sys,'path_importer_cache',{})
stdpath = sys.path[len(PYTHONPATH):]
mydir = os.path.dirname(__file__)
#print "searching",stdpath,sys.path
for item in stdpath:
if item==mydir or not item:
continue # skip if current dir. on Windows, or my own directory
importer = pic.get(item)
if importer is not None:
loader = importer.find_module('site')
if loader is not None:
# This should actually reload the current module
loader.load_module('site')
break
else:
try:
import imp # Avoid import loop in Python >= 3.3
stream, path, descr = imp.find_module('site',[item])
except ImportError:
continue
if stream is None:
continue
try:
# This should actually reload the current module
imp.load_module('site',stream,path,descr)
finally:
stream.close()
break
else:
raise ImportError("Couldn't find the real 'site' module")
#print "loaded", __file__
known_paths = dict([(makepath(item)[1],1) for item in sys.path]) # 2.2 comp
oldpos = getattr(sys,'__egginsert',0) # save old insertion position
sys.__egginsert = 0 # and reset the current one
for item in PYTHONPATH:
addsitedir(item)
sys.__egginsert += oldpos # restore effective old position
d, nd = makepath(stdpath[0])
insert_at = None
new_path = []
for item in sys.path:
p, np = makepath(item)
if np==nd and insert_at is None:
# We've hit the first 'system' path entry, so added entries go here
insert_at = len(new_path)
if np in known_paths or insert_at is None:
new_path.append(item)
else:
# new path after the insert point, back-insert it
new_path.insert(insert_at, item)
insert_at += 1
sys.path[:] = new_path
if __name__=='site':
__boot()
del __boot

View File

@@ -0,0 +1,234 @@
import os
import socket
import atexit
import re
import pkg_resources
from pkg_resources import ResolutionError, ExtractionError
from setuptools.compat import urllib2
try:
import ssl
except ImportError:
ssl = None
__all__ = [
'VerifyingHTTPSHandler', 'find_ca_bundle', 'is_available', 'cert_paths',
'opener_for'
]
cert_paths = """
/etc/pki/tls/certs/ca-bundle.crt
/etc/ssl/certs/ca-certificates.crt
/usr/share/ssl/certs/ca-bundle.crt
/usr/local/share/certs/ca-root.crt
/etc/ssl/cert.pem
/System/Library/OpenSSL/certs/cert.pem
""".strip().split()
HTTPSHandler = HTTPSConnection = object
for what, where in (
('HTTPSHandler', ['urllib2','urllib.request']),
('HTTPSConnection', ['httplib', 'http.client']),
):
for module in where:
try:
exec("from %s import %s" % (module, what))
except ImportError:
pass
is_available = ssl is not None and object not in (HTTPSHandler, HTTPSConnection)
try:
from ssl import CertificateError, match_hostname
except ImportError:
try:
from backports.ssl_match_hostname import CertificateError
from backports.ssl_match_hostname import match_hostname
except ImportError:
CertificateError = None
match_hostname = None
if not CertificateError:
class CertificateError(ValueError):
pass
if not match_hostname:
def _dnsname_match(dn, hostname, max_wildcards=1):
"""Matching according to RFC 6125, section 6.4.3
http://tools.ietf.org/html/rfc6125#section-6.4.3
"""
pats = []
if not dn:
return False
# Ported from python3-syntax:
# leftmost, *remainder = dn.split(r'.')
parts = dn.split(r'.')
leftmost = parts[0]
remainder = parts[1:]
wildcards = leftmost.count('*')
if wildcards > max_wildcards:
# Issue #17980: avoid denials of service by refusing more
# than one wildcard per fragment. A survey of established
# policy among SSL implementations showed it to be a
# reasonable choice.
raise CertificateError(
"too many wildcards in certificate DNS name: " + repr(dn))
# speed up common case w/o wildcards
if not wildcards:
return dn.lower() == hostname.lower()
# RFC 6125, section 6.4.3, subitem 1.
# The client SHOULD NOT attempt to match a presented identifier in which
# the wildcard character comprises a label other than the left-most label.
if leftmost == '*':
# When '*' is a fragment by itself, it matches a non-empty dotless
# fragment.
pats.append('[^.]+')
elif leftmost.startswith('xn--') or hostname.startswith('xn--'):
# RFC 6125, section 6.4.3, subitem 3.
# The client SHOULD NOT attempt to match a presented identifier
# where the wildcard character is embedded within an A-label or
# U-label of an internationalized domain name.
pats.append(re.escape(leftmost))
else:
# Otherwise, '*' matches any dotless string, e.g. www*
pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))
# add the remaining fragments, ignore any wildcards
for frag in remainder:
pats.append(re.escape(frag))
pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
return pat.match(hostname)
def match_hostname(cert, hostname):
"""Verify that *cert* (in decoded format as returned by
SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
rules are followed, but IP addresses are not accepted for *hostname*.
CertificateError is raised on failure. On success, the function
returns nothing.
"""
if not cert:
raise ValueError("empty or no certificate")
dnsnames = []
san = cert.get('subjectAltName', ())
for key, value in san:
if key == 'DNS':
if _dnsname_match(value, hostname):
return
dnsnames.append(value)
if not dnsnames:
# The subject is only checked when there is no dNSName entry
# in subjectAltName
for sub in cert.get('subject', ()):
for key, value in sub:
# XXX according to RFC 2818, the most specific Common Name
# must be used.
if key == 'commonName':
if _dnsname_match(value, hostname):
return
dnsnames.append(value)
if len(dnsnames) > 1:
raise CertificateError("hostname %r "
"doesn't match either of %s"
% (hostname, ', '.join(map(repr, dnsnames))))
elif len(dnsnames) == 1:
raise CertificateError("hostname %r "
"doesn't match %r"
% (hostname, dnsnames[0]))
else:
raise CertificateError("no appropriate commonName or "
"subjectAltName fields were found")
class VerifyingHTTPSHandler(HTTPSHandler):
"""Simple verifying handler: no auth, subclasses, timeouts, etc."""
def __init__(self, ca_bundle):
self.ca_bundle = ca_bundle
HTTPSHandler.__init__(self)
def https_open(self, req):
return self.do_open(
lambda host, **kw: VerifyingHTTPSConn(host, self.ca_bundle, **kw), req
)
class VerifyingHTTPSConn(HTTPSConnection):
"""Simple verifying connection: no auth, subclasses, timeouts, etc."""
def __init__(self, host, ca_bundle, **kw):
HTTPSConnection.__init__(self, host, **kw)
self.ca_bundle = ca_bundle
def connect(self):
sock = socket.create_connection(
(self.host, self.port), getattr(self, 'source_address', None)
)
# Handle the socket if a (proxy) tunnel is present
if hasattr(self, '_tunnel') and getattr(self, '_tunnel_host', None):
self.sock = sock
self._tunnel()
self.sock = ssl.wrap_socket(
sock, cert_reqs=ssl.CERT_REQUIRED, ca_certs=self.ca_bundle
)
try:
match_hostname(self.sock.getpeercert(), self.host)
except CertificateError:
self.sock.shutdown(socket.SHUT_RDWR)
self.sock.close()
raise
def opener_for(ca_bundle=None):
"""Get a urlopen() replacement that uses ca_bundle for verification"""
return urllib2.build_opener(
VerifyingHTTPSHandler(ca_bundle or find_ca_bundle())
).open
_wincerts = None
def get_win_certfile():
global _wincerts
if _wincerts is not None:
return _wincerts.name
try:
from wincertstore import CertFile
except ImportError:
return None
class MyCertFile(CertFile):
def __init__(self, stores=(), certs=()):
CertFile.__init__(self)
for store in stores:
self.addstore(store)
self.addcerts(certs)
atexit.register(self.close)
_wincerts = MyCertFile(stores=['CA', 'ROOT'])
return _wincerts.name
def find_ca_bundle():
"""Return an existing CA bundle path, or None"""
if os.name=='nt':
return get_win_certfile()
else:
for cert_path in cert_paths:
if os.path.isfile(cert_path):
return cert_path
try:
return pkg_resources.resource_filename('certifi', 'cacert.pem')
except (ImportError, ResolutionError, ExtractionError):
return None

View File

@@ -0,0 +1,583 @@
import os
import re
import sys
from distutils import log
import xml.dom.pulldom
import shlex
import locale
import codecs
import unicodedata
import warnings
from setuptools.compat import unicode
from setuptools.py31compat import TemporaryDirectory
from xml.sax.saxutils import unescape
try:
import urlparse
except ImportError:
import urllib.parse as urlparse
from subprocess import Popen as _Popen, PIPE as _PIPE
#NOTE: Use of the command line options require SVN 1.3 or newer (December 2005)
# and SVN 1.3 hasn't been supported by the developers since mid 2008.
#subprocess is called several times with shell=(sys.platform=='win32')
#see the follow for more information:
# http://bugs.python.org/issue8557
# http://stackoverflow.com/questions/5658622/
# python-subprocess-popen-environment-path
def _run_command(args, stdout=_PIPE, stderr=_PIPE, encoding=None, stream=0):
#regarding the shell argument, see: http://bugs.python.org/issue8557
try:
proc = _Popen(args, stdout=stdout, stderr=stderr,
shell=(sys.platform == 'win32'))
data = proc.communicate()[stream]
except OSError:
return 1, ''
#doubled checked and
data = decode_as_string(data, encoding)
#communciate calls wait()
return proc.returncode, data
def _get_entry_schedule(entry):
schedule = entry.getElementsByTagName('schedule')[0]
return "".join([t.nodeValue
for t in schedule.childNodes
if t.nodeType == t.TEXT_NODE])
def _get_target_property(target):
property_text = target.getElementsByTagName('property')[0]
return "".join([t.nodeValue
for t in property_text.childNodes
if t.nodeType == t.TEXT_NODE])
def _get_xml_data(decoded_str):
if sys.version_info < (3, 0):
#old versions want an encoded string
data = decoded_str.encode('utf-8')
else:
data = decoded_str
return data
def joinpath(prefix, *suffix):
if not prefix or prefix == '.':
return os.path.join(*suffix)
return os.path.join(prefix, *suffix)
def determine_console_encoding():
try:
#try for the preferred encoding
encoding = locale.getpreferredencoding()
#see if the locale.getdefaultlocale returns null
#some versions of python\platforms return US-ASCII
#when it cannot determine an encoding
if not encoding or encoding == "US-ASCII":
encoding = locale.getdefaultlocale()[1]
if encoding:
codecs.lookup(encoding) # make sure a lookup error is not made
except (locale.Error, LookupError):
encoding = None
is_osx = sys.platform == "darwin"
if not encoding:
return ["US-ASCII", "utf-8"][is_osx]
elif encoding.startswith("mac-") and is_osx:
#certain versions of python would return mac-roman as default
#OSX as a left over of earlier mac versions.
return "utf-8"
else:
return encoding
_console_encoding = determine_console_encoding()
def decode_as_string(text, encoding=None):
"""
Decode the console or file output explicitly using getpreferredencoding.
The text paraemeter should be a encoded string, if not no decode occurs
If no encoding is given, getpreferredencoding is used. If encoding is
specified, that is used instead. This would be needed for SVN --xml
output. Unicode is explicitly put in composed NFC form.
--xml should be UTF-8 (SVN Issue 2938) the discussion on the Subversion
DEV List from 2007 seems to indicate the same.
"""
#text should be a byte string
if encoding is None:
encoding = _console_encoding
if not isinstance(text, unicode):
text = text.decode(encoding)
text = unicodedata.normalize('NFC', text)
return text
def parse_dir_entries(decoded_str):
'''Parse the entries from a recursive info xml'''
doc = xml.dom.pulldom.parseString(_get_xml_data(decoded_str))
entries = list()
for event, node in doc:
if event == 'START_ELEMENT' and node.nodeName == 'entry':
doc.expandNode(node)
if not _get_entry_schedule(node).startswith('delete'):
entries.append((node.getAttribute('path'),
node.getAttribute('kind')))
return entries[1:] # do not want the root directory
def parse_externals_xml(decoded_str, prefix=''):
'''Parse a propget svn:externals xml'''
prefix = os.path.normpath(prefix)
prefix = os.path.normcase(prefix)
doc = xml.dom.pulldom.parseString(_get_xml_data(decoded_str))
externals = list()
for event, node in doc:
if event == 'START_ELEMENT' and node.nodeName == 'target':
doc.expandNode(node)
path = os.path.normpath(node.getAttribute('path'))
if os.path.normcase(path).startswith(prefix):
path = path[len(prefix)+1:]
data = _get_target_property(node)
#data should be decoded already
for external in parse_external_prop(data):
externals.append(joinpath(path, external))
return externals # do not want the root directory
def parse_external_prop(lines):
"""
Parse the value of a retrieved svn:externals entry.
possible token setups (with quotng and backscaping in laters versions)
URL[@#] EXT_FOLDERNAME
[-r#] URL EXT_FOLDERNAME
EXT_FOLDERNAME [-r#] URL
"""
externals = []
for line in lines.splitlines():
line = line.lstrip() # there might be a "\ "
if not line:
continue
if sys.version_info < (3, 0):
#shlex handles NULLs just fine and shlex in 2.7 tries to encode
#as ascii automatiically
line = line.encode('utf-8')
line = shlex.split(line)
if sys.version_info < (3, 0):
line = [x.decode('utf-8') for x in line]
#EXT_FOLDERNAME is either the first or last depending on where
#the URL falls
if urlparse.urlsplit(line[-1])[0]:
external = line[0]
else:
external = line[-1]
external = decode_as_string(external, encoding="utf-8")
externals.append(os.path.normpath(external))
return externals
def parse_prop_file(filename, key):
found = False
f = open(filename, 'rt')
data = ''
try:
for line in iter(f.readline, ''): # can't use direct iter!
parts = line.split()
if len(parts) == 2:
kind, length = parts
data = f.read(int(length))
if kind == 'K' and data == key:
found = True
elif kind == 'V' and found:
break
finally:
f.close()
return data
class SvnInfo(object):
'''
Generic svn_info object. No has little knowledge of how to extract
information. Use cls.load to instatiate according svn version.
Paths are not filesystem encoded.
'''
@staticmethod
def get_svn_version():
# Temp config directory should be enough to check for repository
# This is needed because .svn always creates .subversion and
# some operating systems do not handle dot directory correctly.
# Real queries in real svn repos with be concerned with it creation
with TemporaryDirectory() as tempdir:
code, data = _run_command(['svn',
'--config-dir', tempdir,
'--version',
'--quiet'])
if code == 0 and data:
return data.strip()
else:
return ''
#svnversion return values (previous implementations return max revision)
# 4123:4168 mixed revision working copy
# 4168M modified working copy
# 4123S switched working copy
# 4123:4168MS mixed revision, modified, switched working copy
revision_re = re.compile(r'(?:([\-0-9]+):)?(\d+)([a-z]*)\s*$', re.I)
@classmethod
def load(cls, dirname=''):
normdir = os.path.normpath(dirname)
# Temp config directory should be enough to check for repository
# This is needed because .svn always creates .subversion and
# some operating systems do not handle dot directory correctly.
# Real queries in real svn repos with be concerned with it creation
with TemporaryDirectory() as tempdir:
code, data = _run_command(['svn',
'--config-dir', tempdir,
'info', normdir])
# Must check for some contents, as some use empty directories
# in testcases, however only enteries is needed also the info
# command above MUST have worked
svn_dir = os.path.join(normdir, '.svn')
is_svn_wd = (not code or
os.path.isfile(os.path.join(svn_dir, 'entries')))
svn_version = tuple(cls.get_svn_version().split('.'))
try:
base_svn_version = tuple(int(x) for x in svn_version[:2])
except ValueError:
base_svn_version = tuple()
if not is_svn_wd:
#return an instance of this NO-OP class
return SvnInfo(dirname)
if code or not base_svn_version or base_svn_version < (1, 3):
warnings.warn(("No SVN 1.3+ command found: falling back "
"on pre 1.7 .svn parsing"), DeprecationWarning)
return SvnFileInfo(dirname)
if base_svn_version < (1, 5):
return Svn13Info(dirname)
return Svn15Info(dirname)
def __init__(self, path=''):
self.path = path
self._entries = None
self._externals = None
def get_revision(self):
'Retrieve the directory revision informatino using svnversion'
code, data = _run_command(['svnversion', '-c', self.path])
if code:
log.warn("svnversion failed")
return 0
parsed = self.revision_re.match(data)
if parsed:
return int(parsed.group(2))
else:
return 0
@property
def entries(self):
if self._entries is None:
self._entries = self.get_entries()
return self._entries
@property
def externals(self):
if self._externals is None:
self._externals = self.get_externals()
return self._externals
def iter_externals(self):
'''
Iterate over the svn:external references in the repository path.
'''
for item in self.externals:
yield item
def iter_files(self):
'''
Iterate over the non-deleted file entries in the repository path
'''
for item, kind in self.entries:
if kind.lower() == 'file':
yield item
def iter_dirs(self, include_root=True):
'''
Iterate over the non-deleted file entries in the repository path
'''
if include_root:
yield self.path
for item, kind in self.entries:
if kind.lower() == 'dir':
yield item
def get_entries(self):
return []
def get_externals(self):
return []
class Svn13Info(SvnInfo):
def get_entries(self):
code, data = _run_command(['svn', 'info', '-R', '--xml', self.path],
encoding="utf-8")
if code:
log.debug("svn info failed")
return []
return parse_dir_entries(data)
def get_externals(self):
#Previous to 1.5 --xml was not supported for svn propget and the -R
#output format breaks the shlex compatible semantics.
cmd = ['svn', 'propget', 'svn:externals']
result = []
for folder in self.iter_dirs():
code, lines = _run_command(cmd + [folder], encoding="utf-8")
if code != 0:
log.warn("svn propget failed")
return []
#lines should a str
for external in parse_external_prop(lines):
if folder:
external = os.path.join(folder, external)
result.append(os.path.normpath(external))
return result
class Svn15Info(Svn13Info):
def get_externals(self):
cmd = ['svn', 'propget', 'svn:externals', self.path, '-R', '--xml']
code, lines = _run_command(cmd, encoding="utf-8")
if code:
log.debug("svn propget failed")
return []
return parse_externals_xml(lines, prefix=os.path.abspath(self.path))
class SvnFileInfo(SvnInfo):
def __init__(self, path=''):
super(SvnFileInfo, self).__init__(path)
self._directories = None
self._revision = None
def _walk_svn(self, base):
entry_file = joinpath(base, '.svn', 'entries')
if os.path.isfile(entry_file):
entries = SVNEntriesFile.load(base)
yield (base, False, entries.parse_revision())
for path in entries.get_undeleted_records():
path = decode_as_string(path)
path = joinpath(base, path)
if os.path.isfile(path):
yield (path, True, None)
elif os.path.isdir(path):
for item in self._walk_svn(path):
yield item
def _build_entries(self):
entries = list()
rev = 0
for path, isfile, dir_rev in self._walk_svn(self.path):
if isfile:
entries.append((path, 'file'))
else:
entries.append((path, 'dir'))
rev = max(rev, dir_rev)
self._entries = entries
self._revision = rev
def get_entries(self):
if self._entries is None:
self._build_entries()
return self._entries
def get_revision(self):
if self._revision is None:
self._build_entries()
return self._revision
def get_externals(self):
prop_files = [['.svn', 'dir-prop-base'],
['.svn', 'dir-props']]
externals = []
for dirname in self.iter_dirs():
prop_file = None
for rel_parts in prop_files:
filename = joinpath(dirname, *rel_parts)
if os.path.isfile(filename):
prop_file = filename
if prop_file is not None:
ext_prop = parse_prop_file(prop_file, 'svn:externals')
#ext_prop should be utf-8 coming from svn:externals
ext_prop = decode_as_string(ext_prop, encoding="utf-8")
externals.extend(parse_external_prop(ext_prop))
return externals
def svn_finder(dirname=''):
#combined externals due to common interface
#combined externals and entries due to lack of dir_props in 1.7
info = SvnInfo.load(dirname)
for path in info.iter_files():
yield path
for path in info.iter_externals():
sub_info = SvnInfo.load(path)
for sub_path in sub_info.iter_files():
yield sub_path
class SVNEntriesFile(object):
def __init__(self, data):
self.data = data
@classmethod
def load(class_, base):
filename = os.path.join(base, '.svn', 'entries')
f = open(filename)
try:
result = SVNEntriesFile.read(f)
finally:
f.close()
return result
@classmethod
def read(class_, fileobj):
data = fileobj.read()
is_xml = data.startswith('<?xml')
class_ = [SVNEntriesFileText, SVNEntriesFileXML][is_xml]
return class_(data)
def parse_revision(self):
all_revs = self.parse_revision_numbers() + [0]
return max(all_revs)
class SVNEntriesFileText(SVNEntriesFile):
known_svn_versions = {
'1.4.x': 8,
'1.5.x': 9,
'1.6.x': 10,
}
def __get_cached_sections(self):
return self.sections
def get_sections(self):
SECTION_DIVIDER = '\f\n'
sections = self.data.split(SECTION_DIVIDER)
sections = [x for x in map(str.splitlines, sections)]
try:
# remove the SVN version number from the first line
svn_version = int(sections[0].pop(0))
if not svn_version in self.known_svn_versions.values():
log.warn("Unknown subversion verson %d", svn_version)
except ValueError:
return
self.sections = sections
self.get_sections = self.__get_cached_sections
return self.sections
def is_valid(self):
return bool(self.get_sections())
def get_url(self):
return self.get_sections()[0][4]
def parse_revision_numbers(self):
revision_line_number = 9
rev_numbers = [
int(section[revision_line_number])
for section in self.get_sections()
if (len(section) > revision_line_number
and section[revision_line_number])
]
return rev_numbers
def get_undeleted_records(self):
undeleted = lambda s: s and s[0] and (len(s) < 6 or s[5] != 'delete')
result = [
section[0]
for section in self.get_sections()
if undeleted(section)
]
return result
class SVNEntriesFileXML(SVNEntriesFile):
def is_valid(self):
return True
def get_url(self):
"Get repository URL"
urlre = re.compile('url="([^"]+)"')
return urlre.search(self.data).group(1)
def parse_revision_numbers(self):
revre = re.compile(r'committed-rev="(\d+)"')
return [
int(m.group(1))
for m in revre.finditer(self.data)
]
def get_undeleted_records(self):
entries_pattern = \
re.compile(r'name="([^"]+)"(?![^>]+deleted="true")', re.I)
results = [
unescape(match.group(1))
for match in entries_pattern.finditer(self.data)
]
return results
if __name__ == '__main__':
for name in svn_finder(sys.argv[1]):
print(name)

View File

@@ -0,0 +1,352 @@
"""Tests for the 'setuptools' package"""
import sys
import os
import unittest
from setuptools.tests import doctest
import distutils.core
import distutils.cmd
from distutils.errors import DistutilsOptionError, DistutilsPlatformError
from distutils.errors import DistutilsSetupError
from distutils.core import Extension
from distutils.version import LooseVersion
from setuptools.compat import func_code
from setuptools.compat import func_code
import setuptools.dist
import setuptools.depends as dep
from setuptools import Feature
from setuptools.depends import Require
def additional_tests():
import doctest, unittest
suite = unittest.TestSuite((
doctest.DocFileSuite(
os.path.join('tests', 'api_tests.txt'),
optionflags=doctest.ELLIPSIS, package='pkg_resources',
),
))
if sys.platform == 'win32':
suite.addTest(doctest.DocFileSuite('win_script_wrapper.txt'))
return suite
def makeSetup(**args):
"""Return distribution from 'setup(**args)', without executing commands"""
distutils.core._setup_stop_after = "commandline"
# Don't let system command line leak into tests!
args.setdefault('script_args',['install'])
try:
return setuptools.setup(**args)
finally:
distutils.core._setup_stop_after = None
class DependsTests(unittest.TestCase):
def testExtractConst(self):
if not hasattr(dep, 'extract_constant'):
# skip on non-bytecode platforms
return
def f1():
global x, y, z
x = "test"
y = z
fc = func_code(f1)
# unrecognized name
self.assertEqual(dep.extract_constant(fc,'q', -1), None)
# constant assigned
self.assertEqual(dep.extract_constant(fc,'x', -1), "test")
# expression assigned
self.assertEqual(dep.extract_constant(fc,'y', -1), -1)
# recognized name, not assigned
self.assertEqual(dep.extract_constant(fc,'z', -1), None)
def testFindModule(self):
self.assertRaises(ImportError, dep.find_module, 'no-such.-thing')
self.assertRaises(ImportError, dep.find_module, 'setuptools.non-existent')
f,p,i = dep.find_module('setuptools.tests')
f.close()
def testModuleExtract(self):
if not hasattr(dep, 'get_module_constant'):
# skip on non-bytecode platforms
return
from email import __version__
self.assertEqual(
dep.get_module_constant('email','__version__'), __version__
)
self.assertEqual(
dep.get_module_constant('sys','version'), sys.version
)
self.assertEqual(
dep.get_module_constant('setuptools.tests','__doc__'),__doc__
)
def testRequire(self):
if not hasattr(dep, 'extract_constant'):
# skip on non-bytecode platformsh
return
req = Require('Email','1.0.3','email')
self.assertEqual(req.name, 'Email')
self.assertEqual(req.module, 'email')
self.assertEqual(req.requested_version, '1.0.3')
self.assertEqual(req.attribute, '__version__')
self.assertEqual(req.full_name(), 'Email-1.0.3')
from email import __version__
self.assertEqual(req.get_version(), __version__)
self.assertTrue(req.version_ok('1.0.9'))
self.assertTrue(not req.version_ok('0.9.1'))
self.assertTrue(not req.version_ok('unknown'))
self.assertTrue(req.is_present())
self.assertTrue(req.is_current())
req = Require('Email 3000','03000','email',format=LooseVersion)
self.assertTrue(req.is_present())
self.assertTrue(not req.is_current())
self.assertTrue(not req.version_ok('unknown'))
req = Require('Do-what-I-mean','1.0','d-w-i-m')
self.assertTrue(not req.is_present())
self.assertTrue(not req.is_current())
req = Require('Tests', None, 'tests', homepage="http://example.com")
self.assertEqual(req.format, None)
self.assertEqual(req.attribute, None)
self.assertEqual(req.requested_version, None)
self.assertEqual(req.full_name(), 'Tests')
self.assertEqual(req.homepage, 'http://example.com')
paths = [os.path.dirname(p) for p in __path__]
self.assertTrue(req.is_present(paths))
self.assertTrue(req.is_current(paths))
class DistroTests(unittest.TestCase):
def setUp(self):
self.e1 = Extension('bar.ext',['bar.c'])
self.e2 = Extension('c.y', ['y.c'])
self.dist = makeSetup(
packages=['a', 'a.b', 'a.b.c', 'b', 'c'],
py_modules=['b.d','x'],
ext_modules = (self.e1, self.e2),
package_dir = {},
)
def testDistroType(self):
self.assertTrue(isinstance(self.dist,setuptools.dist.Distribution))
def testExcludePackage(self):
self.dist.exclude_package('a')
self.assertEqual(self.dist.packages, ['b','c'])
self.dist.exclude_package('b')
self.assertEqual(self.dist.packages, ['c'])
self.assertEqual(self.dist.py_modules, ['x'])
self.assertEqual(self.dist.ext_modules, [self.e1, self.e2])
self.dist.exclude_package('c')
self.assertEqual(self.dist.packages, [])
self.assertEqual(self.dist.py_modules, ['x'])
self.assertEqual(self.dist.ext_modules, [self.e1])
# test removals from unspecified options
makeSetup().exclude_package('x')
def testIncludeExclude(self):
# remove an extension
self.dist.exclude(ext_modules=[self.e1])
self.assertEqual(self.dist.ext_modules, [self.e2])
# add it back in
self.dist.include(ext_modules=[self.e1])
self.assertEqual(self.dist.ext_modules, [self.e2, self.e1])
# should not add duplicate
self.dist.include(ext_modules=[self.e1])
self.assertEqual(self.dist.ext_modules, [self.e2, self.e1])
def testExcludePackages(self):
self.dist.exclude(packages=['c','b','a'])
self.assertEqual(self.dist.packages, [])
self.assertEqual(self.dist.py_modules, ['x'])
self.assertEqual(self.dist.ext_modules, [self.e1])
def testEmpty(self):
dist = makeSetup()
dist.include(packages=['a'], py_modules=['b'], ext_modules=[self.e2])
dist = makeSetup()
dist.exclude(packages=['a'], py_modules=['b'], ext_modules=[self.e2])
def testContents(self):
self.assertTrue(self.dist.has_contents_for('a'))
self.dist.exclude_package('a')
self.assertTrue(not self.dist.has_contents_for('a'))
self.assertTrue(self.dist.has_contents_for('b'))
self.dist.exclude_package('b')
self.assertTrue(not self.dist.has_contents_for('b'))
self.assertTrue(self.dist.has_contents_for('c'))
self.dist.exclude_package('c')
self.assertTrue(not self.dist.has_contents_for('c'))
def testInvalidIncludeExclude(self):
self.assertRaises(DistutilsSetupError,
self.dist.include, nonexistent_option='x'
)
self.assertRaises(DistutilsSetupError,
self.dist.exclude, nonexistent_option='x'
)
self.assertRaises(DistutilsSetupError,
self.dist.include, packages={'x':'y'}
)
self.assertRaises(DistutilsSetupError,
self.dist.exclude, packages={'x':'y'}
)
self.assertRaises(DistutilsSetupError,
self.dist.include, ext_modules={'x':'y'}
)
self.assertRaises(DistutilsSetupError,
self.dist.exclude, ext_modules={'x':'y'}
)
self.assertRaises(DistutilsSetupError,
self.dist.include, package_dir=['q']
)
self.assertRaises(DistutilsSetupError,
self.dist.exclude, package_dir=['q']
)
class FeatureTests(unittest.TestCase):
def setUp(self):
self.req = Require('Distutils','1.0.3','distutils')
self.dist = makeSetup(
features={
'foo': Feature("foo",standard=True,require_features=['baz',self.req]),
'bar': Feature("bar", standard=True, packages=['pkg.bar'],
py_modules=['bar_et'], remove=['bar.ext'],
),
'baz': Feature(
"baz", optional=False, packages=['pkg.baz'],
scripts = ['scripts/baz_it'],
libraries=[('libfoo','foo/foofoo.c')]
),
'dwim': Feature("DWIM", available=False, remove='bazish'),
},
script_args=['--without-bar', 'install'],
packages = ['pkg.bar', 'pkg.foo'],
py_modules = ['bar_et', 'bazish'],
ext_modules = [Extension('bar.ext',['bar.c'])]
)
def testDefaults(self):
self.assertTrue(not
Feature(
"test",standard=True,remove='x',available=False
).include_by_default()
)
self.assertTrue(
Feature("test",standard=True,remove='x').include_by_default()
)
# Feature must have either kwargs, removes, or require_features
self.assertRaises(DistutilsSetupError, Feature, "test")
def testAvailability(self):
self.assertRaises(
DistutilsPlatformError,
self.dist.features['dwim'].include_in, self.dist
)
def testFeatureOptions(self):
dist = self.dist
self.assertTrue(
('with-dwim',None,'include DWIM') in dist.feature_options
)
self.assertTrue(
('without-dwim',None,'exclude DWIM (default)') in dist.feature_options
)
self.assertTrue(
('with-bar',None,'include bar (default)') in dist.feature_options
)
self.assertTrue(
('without-bar',None,'exclude bar') in dist.feature_options
)
self.assertEqual(dist.feature_negopt['without-foo'],'with-foo')
self.assertEqual(dist.feature_negopt['without-bar'],'with-bar')
self.assertEqual(dist.feature_negopt['without-dwim'],'with-dwim')
self.assertTrue(not 'without-baz' in dist.feature_negopt)
def testUseFeatures(self):
dist = self.dist
self.assertEqual(dist.with_foo,1)
self.assertEqual(dist.with_bar,0)
self.assertEqual(dist.with_baz,1)
self.assertTrue(not 'bar_et' in dist.py_modules)
self.assertTrue(not 'pkg.bar' in dist.packages)
self.assertTrue('pkg.baz' in dist.packages)
self.assertTrue('scripts/baz_it' in dist.scripts)
self.assertTrue(('libfoo','foo/foofoo.c') in dist.libraries)
self.assertEqual(dist.ext_modules,[])
self.assertEqual(dist.require_features, [self.req])
# If we ask for bar, it should fail because we explicitly disabled
# it on the command line
self.assertRaises(DistutilsOptionError, dist.include_feature, 'bar')
def testFeatureWithInvalidRemove(self):
self.assertRaises(
SystemExit, makeSetup, features = {'x':Feature('x', remove='y')}
)
class TestCommandTests(unittest.TestCase):
def testTestIsCommand(self):
test_cmd = makeSetup().get_command_obj('test')
self.assertTrue(isinstance(test_cmd, distutils.cmd.Command))
def testLongOptSuiteWNoDefault(self):
ts1 = makeSetup(script_args=['test','--test-suite=foo.tests.suite'])
ts1 = ts1.get_command_obj('test')
ts1.ensure_finalized()
self.assertEqual(ts1.test_suite, 'foo.tests.suite')
def testDefaultSuite(self):
ts2 = makeSetup(test_suite='bar.tests.suite').get_command_obj('test')
ts2.ensure_finalized()
self.assertEqual(ts2.test_suite, 'bar.tests.suite')
def testDefaultWModuleOnCmdLine(self):
ts3 = makeSetup(
test_suite='bar.tests',
script_args=['test','-m','foo.tests']
).get_command_obj('test')
ts3.ensure_finalized()
self.assertEqual(ts3.test_module, 'foo.tests')
self.assertEqual(ts3.test_suite, 'foo.tests.test_suite')
def testConflictingOptions(self):
ts4 = makeSetup(
script_args=['test','-m','bar.tests', '-s','foo.tests.suite']
).get_command_obj('test')
self.assertRaises(DistutilsOptionError, ts4.ensure_finalized)
def testNoSuite(self):
ts5 = makeSetup().get_command_obj('test')
ts5.ensure_finalized()
self.assertEqual(ts5.test_suite, None)

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,165 @@
import os
import zipfile
import sys
import tempfile
import unittest
import shutil
import stat
import unicodedata
from subprocess import Popen as _Popen, PIPE as _PIPE
def _extract(self, member, path=None, pwd=None):
"""for zipfile py2.5 borrowed from cpython"""
if not isinstance(member, zipfile.ZipInfo):
member = self.getinfo(member)
if path is None:
path = os.getcwd()
return _extract_member(self, member, path, pwd)
def _extract_from_zip(self, name, dest_path):
dest_file = open(dest_path, 'wb')
try:
dest_file.write(self.read(name))
finally:
dest_file.close()
def _extract_member(self, member, targetpath, pwd):
"""for zipfile py2.5 borrowed from cpython"""
# build the destination pathname, replacing
# forward slashes to platform specific separators.
# Strip trailing path separator, unless it represents the root.
if (targetpath[-1:] in (os.path.sep, os.path.altsep)
and len(os.path.splitdrive(targetpath)[1]) > 1):
targetpath = targetpath[:-1]
# don't include leading "/" from file name if present
if member.filename[0] == '/':
targetpath = os.path.join(targetpath, member.filename[1:])
else:
targetpath = os.path.join(targetpath, member.filename)
targetpath = os.path.normpath(targetpath)
# Create all upper directories if necessary.
upperdirs = os.path.dirname(targetpath)
if upperdirs and not os.path.exists(upperdirs):
os.makedirs(upperdirs)
if member.filename[-1] == '/':
if not os.path.isdir(targetpath):
os.mkdir(targetpath)
return targetpath
_extract_from_zip(self, member.filename, targetpath)
return targetpath
def _remove_dir(target):
#on windows this seems to a problem
for dir_path, dirs, files in os.walk(target):
os.chmod(dir_path, stat.S_IWRITE)
for filename in files:
os.chmod(os.path.join(dir_path, filename), stat.S_IWRITE)
shutil.rmtree(target)
class ZippedEnvironment(unittest.TestCase):
datafile = None
dataname = None
old_cwd = None
def setUp(self):
if self.datafile is None or self.dataname is None:
return
if not os.path.isfile(self.datafile):
self.old_cwd = None
return
self.old_cwd = os.getcwd()
self.temp_dir = tempfile.mkdtemp()
zip_file, source, target = [None, None, None]
try:
zip_file = zipfile.ZipFile(self.datafile)
for files in zip_file.namelist():
_extract(zip_file, files, self.temp_dir)
finally:
if zip_file:
zip_file.close()
del zip_file
os.chdir(os.path.join(self.temp_dir, self.dataname))
def tearDown(self):
#Assume setUp was never completed
if self.dataname is None or self.datafile is None:
return
try:
if self.old_cwd:
os.chdir(self.old_cwd)
_remove_dir(self.temp_dir)
except OSError:
#sigh?
pass
def _which_dirs(cmd):
result = set()
for path in os.environ.get('PATH', '').split(os.pathsep):
filename = os.path.join(path, cmd)
if os.access(filename, os.X_OK):
result.add(path)
return result
def run_setup_py(cmd, pypath=None, path=None,
data_stream=0, env=None):
"""
Execution command for tests, separate from those used by the
code directly to prevent accidental behavior issues
"""
if env is None:
env = dict()
for envname in os.environ:
env[envname] = os.environ[envname]
#override the python path if needed
if pypath is not None:
env["PYTHONPATH"] = pypath
#overide the execution path if needed
if path is not None:
env["PATH"] = path
if not env.get("PATH", ""):
env["PATH"] = _which_dirs("tar").union(_which_dirs("gzip"))
env["PATH"] = os.pathsep.join(env["PATH"])
cmd = [sys.executable, "setup.py"] + list(cmd)
#regarding the shell argument, see: http://bugs.python.org/issue8557
try:
proc = _Popen(cmd, stdout=_PIPE, stderr=_PIPE,
shell=(sys.platform == 'win32'), env=env)
data = proc.communicate()[data_stream]
except OSError:
return 1, ''
#decode the console string if needed
if hasattr(data, "decode"):
data = data.decode() # should use the preffered encoding
data = unicodedata.normalize('NFC', data)
#communciate calls wait()
return proc.returncode, data

View File

@@ -0,0 +1,14 @@
import unittest
try:
# provide skipIf for Python 2.4-2.6
skipIf = unittest.skipIf
except AttributeError:
def skipIf(condition, reason):
def skipper(func):
def skip(*args, **kwargs):
return
if condition:
return skip
return func
return skipper

View File

@@ -0,0 +1,3 @@
# -*- coding: utf-8 -*-
result = 'passed'

View File

@@ -0,0 +1,82 @@
"""Basic http server for tests to simulate PyPI or custom indexes
"""
import sys
import time
import threading
from setuptools.compat import BaseHTTPRequestHandler
from setuptools.compat import (urllib2, URLError, HTTPServer,
SimpleHTTPRequestHandler)
class IndexServer(HTTPServer):
"""Basic single-threaded http server simulating a package index
You can use this server in unittest like this::
s = IndexServer()
s.start()
index_url = s.base_url() + 'mytestindex'
# do some test requests to the index
# The index files should be located in setuptools/tests/indexes
s.stop()
"""
def __init__(self, server_address=('', 0),
RequestHandlerClass=SimpleHTTPRequestHandler):
HTTPServer.__init__(self, server_address, RequestHandlerClass)
self._run = True
def serve(self):
while self._run:
self.handle_request()
def start(self):
self.thread = threading.Thread(target=self.serve)
self.thread.start()
def stop(self):
"Stop the server"
# Let the server finish the last request and wait for a new one.
time.sleep(0.1)
# self.shutdown is not supported on python < 2.6, so just
# set _run to false, and make a request, causing it to
# terminate.
self._run = False
url = 'http://127.0.0.1:%(server_port)s/' % vars(self)
try:
if sys.version_info >= (2, 6):
urllib2.urlopen(url, timeout=5)
else:
urllib2.urlopen(url)
except URLError:
# ignore any errors; all that's important is the request
pass
self.thread.join()
self.socket.close()
def base_url(self):
port = self.server_port
return 'http://127.0.0.1:%s/setuptools/tests/indexes/' % port
class RequestRecorder(BaseHTTPRequestHandler):
def do_GET(self):
requests = vars(self.server).setdefault('requests', [])
requests.append(self)
self.send_response(200, 'OK')
class MockServer(HTTPServer, threading.Thread):
"""
A simple HTTP Server that records the requests made to it.
"""
def __init__(self, server_address=('', 0),
RequestHandlerClass=RequestRecorder):
HTTPServer.__init__(self, server_address, RequestHandlerClass)
threading.Thread.__init__(self)
self.setDaemon(True)
self.requests = []
def run(self):
self.serve_forever()
def url(self):
return 'http://localhost:%(server_port)s/' % vars(self)
url = property(url)

View File

@@ -0,0 +1,72 @@
"""develop tests
"""
import os
import re
import shutil
import site
import sys
import tempfile
import unittest
from distutils.errors import DistutilsError
from setuptools.compat import StringIO
from setuptools.command.bdist_egg import bdist_egg
from setuptools.command import easy_install as easy_install_pkg
from setuptools.dist import Distribution
SETUP_PY = """\
from setuptools import setup
setup(name='foo', py_modules=['hi'])
"""
class TestDevelopTest(unittest.TestCase):
def setUp(self):
self.dir = tempfile.mkdtemp()
self.old_cwd = os.getcwd()
os.chdir(self.dir)
f = open('setup.py', 'w')
f.write(SETUP_PY)
f.close()
f = open('hi.py', 'w')
f.write('1\n')
f.close()
if sys.version >= "2.6":
self.old_base = site.USER_BASE
site.USER_BASE = tempfile.mkdtemp()
self.old_site = site.USER_SITE
site.USER_SITE = tempfile.mkdtemp()
def tearDown(self):
os.chdir(self.old_cwd)
shutil.rmtree(self.dir)
if sys.version >= "2.6":
shutil.rmtree(site.USER_BASE)
shutil.rmtree(site.USER_SITE)
site.USER_BASE = self.old_base
site.USER_SITE = self.old_site
def test_bdist_egg(self):
dist = Distribution(dict(
script_name='setup.py',
script_args=['bdist_egg'],
name='foo',
py_modules=['hi']
))
os.makedirs(os.path.join('build', 'src'))
old_stdout = sys.stdout
sys.stdout = o = StringIO()
try:
dist.parse_command_line()
dist.run_commands()
finally:
sys.stdout = old_stdout
# let's see if we got our egg link at the right place
[content] = os.listdir('dist')
self.assertTrue(re.match('foo-0.0.0-py[23].\d.egg$', content))
def test_suite():
return unittest.makeSuite(TestDevelopTest)

View File

@@ -0,0 +1,19 @@
"""build_ext tests
"""
import unittest
from distutils.command.build_ext import build_ext as distutils_build_ext
from setuptools.command.build_ext import build_ext
from setuptools.dist import Distribution
class TestBuildExtTest(unittest.TestCase):
def test_get_ext_filename(self):
# setuptools needs to give back the same
# result than distutils, even if the fullname
# is not in ext_map
dist = Distribution()
cmd = build_ext(dist)
cmd.ext_map['foo/bar'] = ''
res = cmd.get_ext_filename('foo')
wanted = distutils_build_ext.get_ext_filename(cmd, 'foo')
assert res == wanted

View File

@@ -0,0 +1,124 @@
"""develop tests
"""
import os
import shutil
import site
import sys
import tempfile
import unittest
from distutils.errors import DistutilsError
from setuptools.command.develop import develop
from setuptools.command import easy_install as easy_install_pkg
from setuptools.compat import StringIO
from setuptools.dist import Distribution
SETUP_PY = """\
from setuptools import setup
setup(name='foo',
packages=['foo'],
use_2to3=True,
)
"""
INIT_PY = """print "foo"
"""
class TestDevelopTest(unittest.TestCase):
def setUp(self):
if sys.version < "2.6" or hasattr(sys, 'real_prefix'):
return
# Directory structure
self.dir = tempfile.mkdtemp()
os.mkdir(os.path.join(self.dir, 'foo'))
# setup.py
setup = os.path.join(self.dir, 'setup.py')
f = open(setup, 'w')
f.write(SETUP_PY)
f.close()
self.old_cwd = os.getcwd()
# foo/__init__.py
init = os.path.join(self.dir, 'foo', '__init__.py')
f = open(init, 'w')
f.write(INIT_PY)
f.close()
os.chdir(self.dir)
self.old_base = site.USER_BASE
site.USER_BASE = tempfile.mkdtemp()
self.old_site = site.USER_SITE
site.USER_SITE = tempfile.mkdtemp()
def tearDown(self):
if sys.version < "2.6" or hasattr(sys, 'real_prefix') or (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix):
return
os.chdir(self.old_cwd)
shutil.rmtree(self.dir)
shutil.rmtree(site.USER_BASE)
shutil.rmtree(site.USER_SITE)
site.USER_BASE = self.old_base
site.USER_SITE = self.old_site
def test_develop(self):
if sys.version < "2.6" or hasattr(sys, 'real_prefix'):
return
dist = Distribution(
dict(name='foo',
packages=['foo'],
use_2to3=True,
version='0.0',
))
dist.script_name = 'setup.py'
cmd = develop(dist)
cmd.user = 1
cmd.ensure_finalized()
cmd.install_dir = site.USER_SITE
cmd.user = 1
old_stdout = sys.stdout
#sys.stdout = StringIO()
try:
cmd.run()
finally:
sys.stdout = old_stdout
# let's see if we got our egg link at the right place
content = os.listdir(site.USER_SITE)
content.sort()
self.assertEqual(content, ['easy-install.pth', 'foo.egg-link'])
# Check that we are using the right code.
egg_link_file = open(os.path.join(site.USER_SITE, 'foo.egg-link'), 'rt')
try:
path = egg_link_file.read().split()[0].strip()
finally:
egg_link_file.close()
init_file = open(os.path.join(path, 'foo', '__init__.py'), 'rt')
try:
init = init_file.read().strip()
finally:
init_file.close()
if sys.version < "3":
self.assertEqual(init, 'print "foo"')
else:
self.assertEqual(init, 'print("foo")')
def notest_develop_with_setup_requires(self):
wanted = ("Could not find suitable distribution for "
"Requirement.parse('I-DONT-EXIST')")
old_dir = os.getcwd()
os.chdir(self.dir)
try:
try:
dist = Distribution({'setup_requires': ['I_DONT_EXIST']})
except DistutilsError:
e = sys.exc_info()[1]
error = str(e)
if error == wanted:
pass
finally:
os.chdir(old_dir)

View File

@@ -0,0 +1,83 @@
"""Test .dist-info style distributions.
"""
import os
import shutil
import tempfile
import unittest
import textwrap
try:
import ast
except:
pass
import pkg_resources
from setuptools.tests.py26compat import skipIf
def DALS(s):
"dedent and left-strip"
return textwrap.dedent(s).lstrip()
class TestDistInfo(unittest.TestCase):
def test_distinfo(self):
dists = {}
for d in pkg_resources.find_distributions(self.tmpdir):
dists[d.project_name] = d
assert len(dists) == 2, dists
unversioned = dists['UnversionedDistribution']
versioned = dists['VersionedDistribution']
assert versioned.version == '2.718' # from filename
assert unversioned.version == '0.3' # from METADATA
@skipIf('ast' not in globals(),
"ast is used to test conditional dependencies (Python >= 2.6)")
def test_conditional_dependencies(self):
requires = [pkg_resources.Requirement.parse('splort==4'),
pkg_resources.Requirement.parse('quux>=1.1')]
for d in pkg_resources.find_distributions(self.tmpdir):
self.assertEqual(d.requires(), requires[:1])
self.assertEqual(d.requires(extras=('baz',)), requires)
self.assertEqual(d.extras, ['baz'])
def setUp(self):
self.tmpdir = tempfile.mkdtemp()
versioned = os.path.join(self.tmpdir,
'VersionedDistribution-2.718.dist-info')
os.mkdir(versioned)
metadata_file = open(os.path.join(versioned, 'METADATA'), 'w+')
try:
metadata_file.write(DALS(
"""
Metadata-Version: 1.2
Name: VersionedDistribution
Requires-Dist: splort (4)
Provides-Extra: baz
Requires-Dist: quux (>=1.1); extra == 'baz'
"""))
finally:
metadata_file.close()
unversioned = os.path.join(self.tmpdir,
'UnversionedDistribution.dist-info')
os.mkdir(unversioned)
metadata_file = open(os.path.join(unversioned, 'METADATA'), 'w+')
try:
metadata_file.write(DALS(
"""
Metadata-Version: 1.2
Name: UnversionedDistribution
Version: 0.3
Requires-Dist: splort (==4)
Provides-Extra: baz
Requires-Dist: quux (>=1.1); extra == 'baz'
"""))
finally:
metadata_file.close()
def tearDown(self):
shutil.rmtree(self.tmpdir)

View File

@@ -0,0 +1,457 @@
"""Easy install Tests
"""
import sys
import os
import shutil
import tempfile
import unittest
import site
import contextlib
import textwrap
import tarfile
import logging
import distutils.core
from setuptools.compat import StringIO, BytesIO, next, urlparse
from setuptools.sandbox import run_setup, SandboxViolation
from setuptools.command.easy_install import (
easy_install, fix_jython_executable, get_script_args, nt_quote_arg)
from setuptools.command.easy_install import PthDistributions
from setuptools.command import easy_install as easy_install_pkg
from setuptools.dist import Distribution
from pkg_resources import working_set, VersionConflict
from pkg_resources import Distribution as PRDistribution
import setuptools.tests.server
import pkg_resources
class FakeDist(object):
def get_entry_map(self, group):
if group != 'console_scripts':
return {}
return {'name': 'ep'}
def as_requirement(self):
return 'spec'
WANTED = """\
#!%s
# EASY-INSTALL-ENTRY-SCRIPT: 'spec','console_scripts','name'
__requires__ = 'spec'
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.exit(
load_entry_point('spec', 'console_scripts', 'name')()
)
""" % nt_quote_arg(fix_jython_executable(sys.executable, ""))
SETUP_PY = """\
from setuptools import setup
setup(name='foo')
"""
class TestEasyInstallTest(unittest.TestCase):
def test_install_site_py(self):
dist = Distribution()
cmd = easy_install(dist)
cmd.sitepy_installed = False
cmd.install_dir = tempfile.mkdtemp()
try:
cmd.install_site_py()
sitepy = os.path.join(cmd.install_dir, 'site.py')
self.assertTrue(os.path.exists(sitepy))
finally:
shutil.rmtree(cmd.install_dir)
def test_get_script_args(self):
dist = FakeDist()
old_platform = sys.platform
try:
name, script = [i for i in next(get_script_args(dist))][0:2]
finally:
sys.platform = old_platform
self.assertEqual(script, WANTED)
def test_no_find_links(self):
# new option '--no-find-links', that blocks find-links added at
# the project level
dist = Distribution()
cmd = easy_install(dist)
cmd.check_pth_processing = lambda: True
cmd.no_find_links = True
cmd.find_links = ['link1', 'link2']
cmd.install_dir = os.path.join(tempfile.mkdtemp(), 'ok')
cmd.args = ['ok']
cmd.ensure_finalized()
self.assertEqual(cmd.package_index.scanned_urls, {})
# let's try without it (default behavior)
cmd = easy_install(dist)
cmd.check_pth_processing = lambda: True
cmd.find_links = ['link1', 'link2']
cmd.install_dir = os.path.join(tempfile.mkdtemp(), 'ok')
cmd.args = ['ok']
cmd.ensure_finalized()
keys = sorted(cmd.package_index.scanned_urls.keys())
self.assertEqual(keys, ['link1', 'link2'])
class TestPTHFileWriter(unittest.TestCase):
def test_add_from_cwd_site_sets_dirty(self):
'''a pth file manager should set dirty
if a distribution is in site but also the cwd
'''
pth = PthDistributions('does-not_exist', [os.getcwd()])
self.assertTrue(not pth.dirty)
pth.add(PRDistribution(os.getcwd()))
self.assertTrue(pth.dirty)
def test_add_from_site_is_ignored(self):
if os.name != 'nt':
location = '/test/location/does-not-have-to-exist'
else:
location = 'c:\\does_not_exist'
pth = PthDistributions('does-not_exist', [location, ])
self.assertTrue(not pth.dirty)
pth.add(PRDistribution(location))
self.assertTrue(not pth.dirty)
class TestUserInstallTest(unittest.TestCase):
def setUp(self):
self.dir = tempfile.mkdtemp()
setup = os.path.join(self.dir, 'setup.py')
f = open(setup, 'w')
f.write(SETUP_PY)
f.close()
self.old_cwd = os.getcwd()
os.chdir(self.dir)
self.old_enable_site = site.ENABLE_USER_SITE
self.old_file = easy_install_pkg.__file__
self.old_base = site.USER_BASE
site.USER_BASE = tempfile.mkdtemp()
self.old_site = site.USER_SITE
site.USER_SITE = tempfile.mkdtemp()
easy_install_pkg.__file__ = site.USER_SITE
def tearDown(self):
os.chdir(self.old_cwd)
shutil.rmtree(self.dir)
shutil.rmtree(site.USER_BASE)
shutil.rmtree(site.USER_SITE)
site.USER_BASE = self.old_base
site.USER_SITE = self.old_site
site.ENABLE_USER_SITE = self.old_enable_site
easy_install_pkg.__file__ = self.old_file
def test_user_install_implied(self):
site.ENABLE_USER_SITE = True # disabled sometimes
#XXX: replace with something meaningfull
dist = Distribution()
dist.script_name = 'setup.py'
cmd = easy_install(dist)
cmd.args = ['py']
cmd.ensure_finalized()
self.assertTrue(cmd.user, 'user should be implied')
def test_multiproc_atexit(self):
try:
__import__('multiprocessing')
except ImportError:
# skip the test if multiprocessing is not available
return
log = logging.getLogger('test_easy_install')
logging.basicConfig(level=logging.INFO, stream=sys.stderr)
log.info('this should not break')
def test_user_install_not_implied_without_usersite_enabled(self):
site.ENABLE_USER_SITE = False # usually enabled
#XXX: replace with something meaningfull
dist = Distribution()
dist.script_name = 'setup.py'
cmd = easy_install(dist)
cmd.args = ['py']
cmd.initialize_options()
self.assertFalse(cmd.user, 'NOT user should be implied')
def test_local_index(self):
# make sure the local index is used
# when easy_install looks for installed
# packages
new_location = tempfile.mkdtemp()
target = tempfile.mkdtemp()
egg_file = os.path.join(new_location, 'foo-1.0.egg-info')
f = open(egg_file, 'w')
try:
f.write('Name: foo\n')
finally:
f.close()
sys.path.append(target)
old_ppath = os.environ.get('PYTHONPATH')
os.environ['PYTHONPATH'] = os.path.pathsep.join(sys.path)
try:
dist = Distribution()
dist.script_name = 'setup.py'
cmd = easy_install(dist)
cmd.install_dir = target
cmd.args = ['foo']
cmd.ensure_finalized()
cmd.local_index.scan([new_location])
res = cmd.easy_install('foo')
actual = os.path.normcase(os.path.realpath(res.location))
expected = os.path.normcase(os.path.realpath(new_location))
self.assertEqual(actual, expected)
finally:
sys.path.remove(target)
for basedir in [new_location, target, ]:
if not os.path.exists(basedir) or not os.path.isdir(basedir):
continue
try:
shutil.rmtree(basedir)
except:
pass
if old_ppath is not None:
os.environ['PYTHONPATH'] = old_ppath
else:
del os.environ['PYTHONPATH']
def test_setup_requires(self):
"""Regression test for Distribute issue #318
Ensure that a package with setup_requires can be installed when
setuptools is installed in the user site-packages without causing a
SandboxViolation.
"""
test_pkg = create_setup_requires_package(self.dir)
test_setup_py = os.path.join(test_pkg, 'setup.py')
try:
with quiet_context():
with reset_setup_stop_context():
run_setup(test_setup_py, ['install'])
except SandboxViolation:
self.fail('Installation caused SandboxViolation')
class TestSetupRequires(unittest.TestCase):
def test_setup_requires_honors_fetch_params(self):
"""
When easy_install installs a source distribution which specifies
setup_requires, it should honor the fetch parameters (such as
allow-hosts, index-url, and find-links).
"""
# set up a server which will simulate an alternate package index.
p_index = setuptools.tests.server.MockServer()
p_index.start()
netloc = 1
p_index_loc = urlparse(p_index.url)[netloc]
if p_index_loc.endswith(':0'):
# Some platforms (Jython) don't find a port to which to bind,
# so skip this test for them.
return
with quiet_context():
# create an sdist that has a build-time dependency.
with TestSetupRequires.create_sdist() as dist_file:
with tempdir_context() as temp_install_dir:
with environment_context(PYTHONPATH=temp_install_dir):
ei_params = ['--index-url', p_index.url,
'--allow-hosts', p_index_loc,
'--exclude-scripts', '--install-dir', temp_install_dir,
dist_file]
with reset_setup_stop_context():
with argv_context(['easy_install']):
# attempt to install the dist. It should fail because
# it doesn't exist.
self.assertRaises(SystemExit,
easy_install_pkg.main, ei_params)
# there should have been two or three requests to the server
# (three happens on Python 3.3a)
self.assertTrue(2 <= len(p_index.requests) <= 3)
self.assertEqual(p_index.requests[0].path, '/does-not-exist/')
@staticmethod
@contextlib.contextmanager
def create_sdist():
"""
Return an sdist with a setup_requires dependency (of something that
doesn't exist)
"""
with tempdir_context() as dir:
dist_path = os.path.join(dir, 'setuptools-test-fetcher-1.0.tar.gz')
make_trivial_sdist(
dist_path,
textwrap.dedent("""
import setuptools
setuptools.setup(
name="setuptools-test-fetcher",
version="1.0",
setup_requires = ['does-not-exist'],
)
""").lstrip())
yield dist_path
def test_setup_requires_overrides_version_conflict(self):
"""
Regression test for issue #323.
Ensures that a distribution's setup_requires requirements can still be
installed and used locally even if a conflicting version of that
requirement is already on the path.
"""
pr_state = pkg_resources.__getstate__()
fake_dist = PRDistribution('does-not-matter', project_name='foobar',
version='0.0')
working_set.add(fake_dist)
try:
with tempdir_context() as temp_dir:
test_pkg = create_setup_requires_package(temp_dir)
test_setup_py = os.path.join(test_pkg, 'setup.py')
with quiet_context() as (stdout, stderr):
with reset_setup_stop_context():
try:
# Don't even need to install the package, just
# running the setup.py at all is sufficient
run_setup(test_setup_py, ['--name'])
except VersionConflict:
self.fail('Installing setup.py requirements '
'caused a VersionConflict')
lines = stdout.readlines()
self.assertTrue(len(lines) > 0)
self.assertTrue(lines[-1].strip(), 'test_pkg')
finally:
pkg_resources.__setstate__(pr_state)
def create_setup_requires_package(path):
"""Creates a source tree under path for a trivial test package that has a
single requirement in setup_requires--a tarball for that requirement is
also created and added to the dependency_links argument.
"""
test_setup_attrs = {
'name': 'test_pkg', 'version': '0.0',
'setup_requires': ['foobar==0.1'],
'dependency_links': [os.path.abspath(path)]
}
test_pkg = os.path.join(path, 'test_pkg')
test_setup_py = os.path.join(test_pkg, 'setup.py')
os.mkdir(test_pkg)
f = open(test_setup_py, 'w')
f.write(textwrap.dedent("""\
import setuptools
setuptools.setup(**%r)
""" % test_setup_attrs))
f.close()
foobar_path = os.path.join(path, 'foobar-0.1.tar.gz')
make_trivial_sdist(
foobar_path,
textwrap.dedent("""\
import setuptools
setuptools.setup(
name='foobar',
version='0.1'
)
"""))
return test_pkg
def make_trivial_sdist(dist_path, setup_py):
"""Create a simple sdist tarball at dist_path, containing just a
setup.py, the contents of which are provided by the setup_py string.
"""
setup_py_file = tarfile.TarInfo(name='setup.py')
try:
# Python 3 (StringIO gets converted to io module)
MemFile = BytesIO
except AttributeError:
MemFile = StringIO
setup_py_bytes = MemFile(setup_py.encode('utf-8'))
setup_py_file.size = len(setup_py_bytes.getvalue())
dist = tarfile.open(dist_path, 'w:gz')
try:
dist.addfile(setup_py_file, fileobj=setup_py_bytes)
finally:
dist.close()
@contextlib.contextmanager
def tempdir_context(cd=lambda dir:None):
temp_dir = tempfile.mkdtemp()
orig_dir = os.getcwd()
try:
cd(temp_dir)
yield temp_dir
finally:
cd(orig_dir)
shutil.rmtree(temp_dir)
@contextlib.contextmanager
def environment_context(**updates):
old_env = os.environ.copy()
os.environ.update(updates)
try:
yield
finally:
for key in updates:
del os.environ[key]
os.environ.update(old_env)
@contextlib.contextmanager
def argv_context(repl):
old_argv = sys.argv[:]
sys.argv[:] = repl
yield
sys.argv[:] = old_argv
@contextlib.contextmanager
def reset_setup_stop_context():
"""
When the setuptools tests are run using setup.py test, and then
one wants to invoke another setup() command (such as easy_install)
within those tests, it's necessary to reset the global variable
in distutils.core so that the setup() command will run naturally.
"""
setup_stop_after = distutils.core._setup_stop_after
distutils.core._setup_stop_after = None
yield
distutils.core._setup_stop_after = setup_stop_after
@contextlib.contextmanager
def quiet_context():
"""
Redirect stdout/stderr to StringIO objects to prevent console output from
distutils commands.
"""
old_stdout = sys.stdout
old_stderr = sys.stderr
new_stdout = sys.stdout = StringIO()
new_stderr = sys.stderr = StringIO()
try:
yield new_stdout, new_stderr
finally:
new_stdout.seek(0)
new_stderr.seek(0)
sys.stdout = old_stdout
sys.stderr = old_stderr

View File

@@ -0,0 +1,173 @@
import os
import sys
import tempfile
import shutil
import unittest
import pkg_resources
import warnings
from setuptools.command import egg_info
from setuptools import svn_utils
from setuptools.tests import environment, test_svn
from setuptools.tests.py26compat import skipIf
ENTRIES_V10 = pkg_resources.resource_string(__name__, 'entries-v10')
"An entries file generated with svn 1.6.17 against the legacy Setuptools repo"
class TestEggInfo(unittest.TestCase):
def setUp(self):
self.test_dir = tempfile.mkdtemp()
os.mkdir(os.path.join(self.test_dir, '.svn'))
self.old_cwd = os.getcwd()
os.chdir(self.test_dir)
def tearDown(self):
os.chdir(self.old_cwd)
shutil.rmtree(self.test_dir)
def _write_entries(self, entries):
fn = os.path.join(self.test_dir, '.svn', 'entries')
entries_f = open(fn, 'wb')
entries_f.write(entries)
entries_f.close()
@skipIf(not test_svn._svn_check, "No SVN to text, in the first place")
def test_version_10_format(self):
"""
"""
#keeping this set for 1.6 is a good check on the get_svn_revision
#to ensure I return using svnversion what would had been returned
version_str = svn_utils.SvnInfo.get_svn_version()
version = [int(x) for x in version_str.split('.')[:2]]
if version != [1, 6]:
if hasattr(self, 'skipTest'):
self.skipTest('')
else:
sys.stderr.write('\n Skipping due to SVN Version\n')
return
self._write_entries(ENTRIES_V10)
rev = egg_info.egg_info.get_svn_revision()
self.assertEqual(rev, '89000')
def test_version_10_format_legacy_parser(self):
"""
"""
path_variable = None
for env in os.environ:
if env.lower() == 'path':
path_variable = env
if path_variable:
old_path = os.environ[path_variable]
os.environ[path_variable] = ''
#catch_warnings not available until py26
warning_filters = warnings.filters
warnings.filters = warning_filters[:]
try:
warnings.simplefilter("ignore", DeprecationWarning)
self._write_entries(ENTRIES_V10)
rev = egg_info.egg_info.get_svn_revision()
finally:
#restore the warning filters
warnings.filters = warning_filters
#restore the os path
if path_variable:
os.environ[path_variable] = old_path
self.assertEqual(rev, '89000')
DUMMY_SOURCE_TXT = """CHANGES.txt
CONTRIBUTORS.txt
HISTORY.txt
LICENSE
MANIFEST.in
README.txt
setup.py
dummy/__init__.py
dummy/test.txt
dummy.egg-info/PKG-INFO
dummy.egg-info/SOURCES.txt
dummy.egg-info/dependency_links.txt
dummy.egg-info/top_level.txt"""
class TestSvnDummy(environment.ZippedEnvironment):
def setUp(self):
version = svn_utils.SvnInfo.get_svn_version()
if not version: # None or Empty
return None
self.base_version = tuple([int(x) for x in version.split('.')][:2])
if not self.base_version:
raise ValueError('No SVN tools installed')
elif self.base_version < (1, 3):
raise ValueError('Insufficient SVN Version %s' % version)
elif self.base_version >= (1, 9):
#trying the latest version
self.base_version = (1, 8)
self.dataname = "dummy%i%i" % self.base_version
self.datafile = os.path.join('setuptools', 'tests',
'svn_data', self.dataname + ".zip")
super(TestSvnDummy, self).setUp()
@skipIf(not test_svn._svn_check, "No SVN to text, in the first place")
def test_sources(self):
code, data = environment.run_setup_py(["sdist"],
pypath=self.old_cwd,
data_stream=1)
if code:
raise AssertionError(data)
sources = os.path.join('dummy.egg-info', 'SOURCES.txt')
infile = open(sources, 'r')
try:
read_contents = infile.read()
finally:
infile.close()
del infile
self.assertEqual(DUMMY_SOURCE_TXT, read_contents)
return data
class TestSvnDummyLegacy(environment.ZippedEnvironment):
def setUp(self):
self.base_version = (1, 6)
self.dataname = "dummy%i%i" % self.base_version
self.datafile = os.path.join('setuptools', 'tests',
'svn_data', self.dataname + ".zip")
super(TestSvnDummyLegacy, self).setUp()
def test_sources(self):
code, data = environment.run_setup_py(["sdist"],
pypath=self.old_cwd,
path="",
data_stream=1)
if code:
raise AssertionError(data)
sources = os.path.join('dummy.egg-info', 'SOURCES.txt')
infile = open(sources, 'r')
try:
read_contents = infile.read()
finally:
infile.close()
del infile
self.assertEqual(DUMMY_SOURCE_TXT, read_contents)
return data
def test_suite():
return unittest.defaultTestLoader.loadTestsFromName(__name__)

View File

@@ -0,0 +1,82 @@
"""Tests for setuptools.find_packages()."""
import os
import shutil
import tempfile
import unittest
from setuptools import find_packages
class TestFindPackages(unittest.TestCase):
def setUp(self):
self.dist_dir = tempfile.mkdtemp()
self._make_pkg_structure()
def tearDown(self):
shutil.rmtree(self.dist_dir)
def _make_pkg_structure(self):
"""Make basic package structure.
dist/
docs/
conf.py
pkg/
__pycache__/
nspkg/
mod.py
subpkg/
assets/
asset
__init__.py
setup.py
"""
self.docs_dir = self._mkdir('docs', self.dist_dir)
self._touch('conf.py', self.docs_dir)
self.pkg_dir = self._mkdir('pkg', self.dist_dir)
self._mkdir('__pycache__', self.pkg_dir)
self.ns_pkg_dir = self._mkdir('nspkg', self.pkg_dir)
self._touch('mod.py', self.ns_pkg_dir)
self.sub_pkg_dir = self._mkdir('subpkg', self.pkg_dir)
self.asset_dir = self._mkdir('assets', self.sub_pkg_dir)
self._touch('asset', self.asset_dir)
self._touch('__init__.py', self.sub_pkg_dir)
self._touch('setup.py', self.dist_dir)
def _mkdir(self, path, parent_dir=None):
if parent_dir:
path = os.path.join(parent_dir, path)
os.mkdir(path)
return path
def _touch(self, path, dir_=None):
if dir_:
path = os.path.join(dir_, path)
fp = open(path, 'w')
fp.close()
return path
def test_regular_package(self):
self._touch('__init__.py', self.pkg_dir)
packages = find_packages(self.dist_dir)
self.assertEqual(packages, ['pkg', 'pkg.subpkg'])
def test_include_excludes_other(self):
"""
If include is specified, other packages should be excluded.
"""
self._touch('__init__.py', self.pkg_dir)
alt_dir = self._mkdir('other_pkg', self.dist_dir)
self._touch('__init__.py', alt_dir)
packages = find_packages(self.dist_dir, include=['other_pkg'])
self.assertEqual(packages, ['other_pkg'])
def test_dir_with_dot_is_skipped(self):
shutil.rmtree(os.path.join(self.dist_dir, 'pkg/subpkg/assets'))
data_dir = self._mkdir('some.data', self.pkg_dir)
self._touch('__init__.py', data_dir)
self._touch('file.dat', data_dir)
packages = find_packages(self.dist_dir)
self.assertTrue('pkg.some.data' not in packages)

View File

@@ -0,0 +1,68 @@
import os
import unittest
from setuptools.tests.py26compat import skipIf
try:
import ast
except ImportError:
pass
class TestMarkerlib(unittest.TestCase):
@skipIf('ast' not in globals(),
"ast not available (Python < 2.6?)")
def test_markers(self):
from _markerlib import interpret, default_environment, compile
os_name = os.name
self.assertTrue(interpret(""))
self.assertTrue(interpret("os.name != 'buuuu'"))
self.assertTrue(interpret("os_name != 'buuuu'"))
self.assertTrue(interpret("python_version > '1.0'"))
self.assertTrue(interpret("python_version < '5.0'"))
self.assertTrue(interpret("python_version <= '5.0'"))
self.assertTrue(interpret("python_version >= '1.0'"))
self.assertTrue(interpret("'%s' in os.name" % os_name))
self.assertTrue(interpret("'%s' in os_name" % os_name))
self.assertTrue(interpret("'buuuu' not in os.name"))
self.assertFalse(interpret("os.name == 'buuuu'"))
self.assertFalse(interpret("os_name == 'buuuu'"))
self.assertFalse(interpret("python_version < '1.0'"))
self.assertFalse(interpret("python_version > '5.0'"))
self.assertFalse(interpret("python_version >= '5.0'"))
self.assertFalse(interpret("python_version <= '1.0'"))
self.assertFalse(interpret("'%s' not in os.name" % os_name))
self.assertFalse(interpret("'buuuu' in os.name and python_version >= '5.0'"))
self.assertFalse(interpret("'buuuu' in os_name and python_version >= '5.0'"))
environment = default_environment()
environment['extra'] = 'test'
self.assertTrue(interpret("extra == 'test'", environment))
self.assertFalse(interpret("extra == 'doc'", environment))
def raises_nameError():
try:
interpret("python.version == '42'")
except NameError:
pass
else:
raise Exception("Expected NameError")
raises_nameError()
def raises_syntaxError():
try:
interpret("(x for x in (4,))")
except SyntaxError:
pass
else:
raise Exception("Expected SyntaxError")
raises_syntaxError()
statement = "python_version == '5'"
self.assertEqual(compile(statement).__doc__, statement)

View File

@@ -0,0 +1,203 @@
"""Package Index Tests
"""
import sys
import os
import unittest
import pkg_resources
from setuptools.compat import urllib2, httplib, HTTPError, unicode, pathname2url
import distutils.errors
import setuptools.package_index
from setuptools.tests.server import IndexServer
class TestPackageIndex(unittest.TestCase):
def test_bad_url_bad_port(self):
index = setuptools.package_index.PackageIndex()
url = 'http://127.0.0.1:0/nonesuch/test_package_index'
try:
v = index.open_url(url)
except Exception:
v = sys.exc_info()[1]
self.assertTrue(url in str(v))
else:
self.assertTrue(isinstance(v, HTTPError))
def test_bad_url_typo(self):
# issue 16
# easy_install inquant.contentmirror.plone breaks because of a typo
# in its home URL
index = setuptools.package_index.PackageIndex(
hosts=('www.example.com',)
)
url = 'url:%20https://svn.plone.org/svn/collective/inquant.contentmirror.plone/trunk'
try:
v = index.open_url(url)
except Exception:
v = sys.exc_info()[1]
self.assertTrue(url in str(v))
else:
self.assertTrue(isinstance(v, HTTPError))
def test_bad_url_bad_status_line(self):
index = setuptools.package_index.PackageIndex(
hosts=('www.example.com',)
)
def _urlopen(*args):
raise httplib.BadStatusLine('line')
index.opener = _urlopen
url = 'http://example.com'
try:
v = index.open_url(url)
except Exception:
v = sys.exc_info()[1]
self.assertTrue('line' in str(v))
else:
raise AssertionError('Should have raise here!')
def test_bad_url_double_scheme(self):
"""
A bad URL with a double scheme should raise a DistutilsError.
"""
index = setuptools.package_index.PackageIndex(
hosts=('www.example.com',)
)
# issue 20
url = 'http://http://svn.pythonpaste.org/Paste/wphp/trunk'
try:
index.open_url(url)
except distutils.errors.DistutilsError:
error = sys.exc_info()[1]
msg = unicode(error)
assert 'nonnumeric port' in msg or 'getaddrinfo failed' in msg or 'Name or service not known' in msg
return
raise RuntimeError("Did not raise")
def test_bad_url_screwy_href(self):
index = setuptools.package_index.PackageIndex(
hosts=('www.example.com',)
)
# issue #160
if sys.version_info[0] == 2 and sys.version_info[1] == 7:
# this should not fail
url = 'http://example.com'
page = ('<a href="http://www.famfamfam.com]('
'http://www.famfamfam.com/">')
index.process_index(url, page)
def test_url_ok(self):
index = setuptools.package_index.PackageIndex(
hosts=('www.example.com',)
)
url = 'file:///tmp/test_package_index'
self.assertTrue(index.url_ok(url, True))
def test_links_priority(self):
"""
Download links from the pypi simple index should be used before
external download links.
https://bitbucket.org/tarek/distribute/issue/163
Usecase :
- someone uploads a package on pypi, a md5 is generated
- someone manually copies this link (with the md5 in the url) onto an
external page accessible from the package page.
- someone reuploads the package (with a different md5)
- while easy_installing, an MD5 error occurs because the external link
is used
-> Setuptools should use the link from pypi, not the external one.
"""
if sys.platform.startswith('java'):
# Skip this test on jython because binding to :0 fails
return
# start an index server
server = IndexServer()
server.start()
index_url = server.base_url() + 'test_links_priority/simple/'
# scan a test index
pi = setuptools.package_index.PackageIndex(index_url)
requirement = pkg_resources.Requirement.parse('foobar')
pi.find_packages(requirement)
server.stop()
# the distribution has been found
self.assertTrue('foobar' in pi)
# we have only one link, because links are compared without md5
self.assertTrue(len(pi['foobar'])==1)
# the link should be from the index
self.assertTrue('correct_md5' in pi['foobar'][0].location)
def test_parse_bdist_wininst(self):
self.assertEqual(setuptools.package_index.parse_bdist_wininst(
'reportlab-2.5.win32-py2.4.exe'), ('reportlab-2.5', '2.4', 'win32'))
self.assertEqual(setuptools.package_index.parse_bdist_wininst(
'reportlab-2.5.win32.exe'), ('reportlab-2.5', None, 'win32'))
self.assertEqual(setuptools.package_index.parse_bdist_wininst(
'reportlab-2.5.win-amd64-py2.7.exe'), ('reportlab-2.5', '2.7', 'win-amd64'))
self.assertEqual(setuptools.package_index.parse_bdist_wininst(
'reportlab-2.5.win-amd64.exe'), ('reportlab-2.5', None, 'win-amd64'))
def test__vcs_split_rev_from_url(self):
"""
Test the basic usage of _vcs_split_rev_from_url
"""
vsrfu = setuptools.package_index.PackageIndex._vcs_split_rev_from_url
url, rev = vsrfu('https://example.com/bar@2995')
self.assertEqual(url, 'https://example.com/bar')
self.assertEqual(rev, '2995')
def test_local_index(self):
"""
local_open should be able to read an index from the file system.
"""
f = open('index.html', 'w')
f.write('<div>content</div>')
f.close()
try:
url = 'file:' + pathname2url(os.getcwd()) + '/'
res = setuptools.package_index.local_open(url)
finally:
os.remove('index.html')
assert 'content' in res.read()
class TestContentCheckers(unittest.TestCase):
def test_md5(self):
checker = setuptools.package_index.HashChecker.from_url(
'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478')
checker.feed('You should probably not be using MD5'.encode('ascii'))
self.assertEqual(checker.hash.hexdigest(),
'f12895fdffbd45007040d2e44df98478')
self.assertTrue(checker.is_valid())
def test_other_fragment(self):
"Content checks should succeed silently if no hash is present"
checker = setuptools.package_index.HashChecker.from_url(
'http://foo/bar#something%20completely%20different')
checker.feed('anything'.encode('ascii'))
self.assertTrue(checker.is_valid())
def test_blank_md5(self):
"Content checks should succeed if a hash is empty"
checker = setuptools.package_index.HashChecker.from_url(
'http://foo/bar#md5=')
checker.feed('anything'.encode('ascii'))
self.assertTrue(checker.is_valid())
def test_get_hash_name_md5(self):
checker = setuptools.package_index.HashChecker.from_url(
'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478')
self.assertEqual(checker.hash_name, 'md5')
def test_report(self):
checker = setuptools.package_index.HashChecker.from_url(
'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478')
rep = checker.report(lambda x: x, 'My message about %s')
self.assertEqual(rep, 'My message about md5')

View File

@@ -0,0 +1,620 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# NOTE: the shebang and encoding lines are for ScriptHeaderTests do not remove
import os
import sys
import tempfile
import shutil
from unittest import TestCase
import pkg_resources
from pkg_resources import (parse_requirements, VersionConflict, parse_version,
Distribution, EntryPoint, Requirement, safe_version, safe_name,
WorkingSet)
from setuptools.command.easy_install import (get_script_header, is_sh,
nt_quote_arg)
from setuptools.compat import StringIO, iteritems
try:
frozenset
except NameError:
from sets import ImmutableSet as frozenset
def safe_repr(obj, short=False):
""" copied from Python2.7"""
try:
result = repr(obj)
except Exception:
result = object.__repr__(obj)
if not short or len(result) < pkg_resources._MAX_LENGTH:
return result
return result[:pkg_resources._MAX_LENGTH] + ' [truncated]...'
class Metadata(pkg_resources.EmptyProvider):
"""Mock object to return metadata as if from an on-disk distribution"""
def __init__(self,*pairs):
self.metadata = dict(pairs)
def has_metadata(self,name):
return name in self.metadata
def get_metadata(self,name):
return self.metadata[name]
def get_metadata_lines(self,name):
return pkg_resources.yield_lines(self.get_metadata(name))
dist_from_fn = pkg_resources.Distribution.from_filename
class DistroTests(TestCase):
def testCollection(self):
# empty path should produce no distributions
ad = pkg_resources.Environment([], platform=None, python=None)
self.assertEqual(list(ad), [])
self.assertEqual(ad['FooPkg'],[])
ad.add(dist_from_fn("FooPkg-1.3_1.egg"))
ad.add(dist_from_fn("FooPkg-1.4-py2.4-win32.egg"))
ad.add(dist_from_fn("FooPkg-1.2-py2.4.egg"))
# Name is in there now
self.assertTrue(ad['FooPkg'])
# But only 1 package
self.assertEqual(list(ad), ['foopkg'])
# Distributions sort by version
self.assertEqual(
[dist.version for dist in ad['FooPkg']], ['1.4','1.3-1','1.2']
)
# Removing a distribution leaves sequence alone
ad.remove(ad['FooPkg'][1])
self.assertEqual(
[dist.version for dist in ad['FooPkg']], ['1.4','1.2']
)
# And inserting adds them in order
ad.add(dist_from_fn("FooPkg-1.9.egg"))
self.assertEqual(
[dist.version for dist in ad['FooPkg']], ['1.9','1.4','1.2']
)
ws = WorkingSet([])
foo12 = dist_from_fn("FooPkg-1.2-py2.4.egg")
foo14 = dist_from_fn("FooPkg-1.4-py2.4-win32.egg")
req, = parse_requirements("FooPkg>=1.3")
# Nominal case: no distros on path, should yield all applicable
self.assertEqual(ad.best_match(req,ws).version, '1.9')
# If a matching distro is already installed, should return only that
ws.add(foo14)
self.assertEqual(ad.best_match(req,ws).version, '1.4')
# If the first matching distro is unsuitable, it's a version conflict
ws = WorkingSet([])
ws.add(foo12)
ws.add(foo14)
self.assertRaises(VersionConflict, ad.best_match, req, ws)
# If more than one match on the path, the first one takes precedence
ws = WorkingSet([])
ws.add(foo14)
ws.add(foo12)
ws.add(foo14)
self.assertEqual(ad.best_match(req,ws).version, '1.4')
def checkFooPkg(self,d):
self.assertEqual(d.project_name, "FooPkg")
self.assertEqual(d.key, "foopkg")
self.assertEqual(d.version, "1.3-1")
self.assertEqual(d.py_version, "2.4")
self.assertEqual(d.platform, "win32")
self.assertEqual(d.parsed_version, parse_version("1.3-1"))
def testDistroBasics(self):
d = Distribution(
"/some/path",
project_name="FooPkg",version="1.3-1",py_version="2.4",platform="win32"
)
self.checkFooPkg(d)
d = Distribution("/some/path")
self.assertEqual(d.py_version, sys.version[:3])
self.assertEqual(d.platform, None)
def testDistroParse(self):
d = dist_from_fn("FooPkg-1.3_1-py2.4-win32.egg")
self.checkFooPkg(d)
d = dist_from_fn("FooPkg-1.3_1-py2.4-win32.egg-info")
self.checkFooPkg(d)
def testDistroMetadata(self):
d = Distribution(
"/some/path", project_name="FooPkg", py_version="2.4", platform="win32",
metadata = Metadata(
('PKG-INFO',"Metadata-Version: 1.0\nVersion: 1.3-1\n")
)
)
self.checkFooPkg(d)
def distRequires(self, txt):
return Distribution("/foo", metadata=Metadata(('depends.txt', txt)))
def checkRequires(self, dist, txt, extras=()):
self.assertEqual(
list(dist.requires(extras)),
list(parse_requirements(txt))
)
def testDistroDependsSimple(self):
for v in "Twisted>=1.5", "Twisted>=1.5\nZConfig>=2.0":
self.checkRequires(self.distRequires(v), v)
def testResolve(self):
ad = pkg_resources.Environment([])
ws = WorkingSet([])
# Resolving no requirements -> nothing to install
self.assertEqual(list(ws.resolve([],ad)), [])
# Request something not in the collection -> DistributionNotFound
self.assertRaises(
pkg_resources.DistributionNotFound, ws.resolve, parse_requirements("Foo"), ad
)
Foo = Distribution.from_filename(
"/foo_dir/Foo-1.2.egg",
metadata=Metadata(('depends.txt', "[bar]\nBaz>=2.0"))
)
ad.add(Foo)
ad.add(Distribution.from_filename("Foo-0.9.egg"))
# Request thing(s) that are available -> list to activate
for i in range(3):
targets = list(ws.resolve(parse_requirements("Foo"), ad))
self.assertEqual(targets, [Foo])
list(map(ws.add,targets))
self.assertRaises(VersionConflict, ws.resolve,
parse_requirements("Foo==0.9"), ad)
ws = WorkingSet([]) # reset
# Request an extra that causes an unresolved dependency for "Baz"
self.assertRaises(
pkg_resources.DistributionNotFound, ws.resolve,parse_requirements("Foo[bar]"), ad
)
Baz = Distribution.from_filename(
"/foo_dir/Baz-2.1.egg", metadata=Metadata(('depends.txt', "Foo"))
)
ad.add(Baz)
# Activation list now includes resolved dependency
self.assertEqual(
list(ws.resolve(parse_requirements("Foo[bar]"), ad)), [Foo,Baz]
)
# Requests for conflicting versions produce VersionConflict
self.assertRaises(VersionConflict,
ws.resolve, parse_requirements("Foo==1.2\nFoo!=1.2"), ad)
def testDistroDependsOptions(self):
d = self.distRequires("""
Twisted>=1.5
[docgen]
ZConfig>=2.0
docutils>=0.3
[fastcgi]
fcgiapp>=0.1""")
self.checkRequires(d,"Twisted>=1.5")
self.checkRequires(
d,"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3".split(), ["docgen"]
)
self.checkRequires(
d,"Twisted>=1.5 fcgiapp>=0.1".split(), ["fastcgi"]
)
self.checkRequires(
d,"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3 fcgiapp>=0.1".split(),
["docgen","fastcgi"]
)
self.checkRequires(
d,"Twisted>=1.5 fcgiapp>=0.1 ZConfig>=2.0 docutils>=0.3".split(),
["fastcgi", "docgen"]
)
self.assertRaises(pkg_resources.UnknownExtra, d.requires, ["foo"])
class EntryPointTests(TestCase):
def assertfields(self, ep):
self.assertEqual(ep.name,"foo")
self.assertEqual(ep.module_name,"setuptools.tests.test_resources")
self.assertEqual(ep.attrs, ("EntryPointTests",))
self.assertEqual(ep.extras, ("x",))
self.assertTrue(ep.load() is EntryPointTests)
self.assertEqual(
str(ep),
"foo = setuptools.tests.test_resources:EntryPointTests [x]"
)
def setUp(self):
self.dist = Distribution.from_filename(
"FooPkg-1.2-py2.4.egg", metadata=Metadata(('requires.txt','[x]')))
def testBasics(self):
ep = EntryPoint(
"foo", "setuptools.tests.test_resources", ["EntryPointTests"],
["x"], self.dist
)
self.assertfields(ep)
def testParse(self):
s = "foo = setuptools.tests.test_resources:EntryPointTests [x]"
ep = EntryPoint.parse(s, self.dist)
self.assertfields(ep)
ep = EntryPoint.parse("bar baz= spammity[PING]")
self.assertEqual(ep.name,"bar baz")
self.assertEqual(ep.module_name,"spammity")
self.assertEqual(ep.attrs, ())
self.assertEqual(ep.extras, ("ping",))
ep = EntryPoint.parse(" fizzly = wocka:foo")
self.assertEqual(ep.name,"fizzly")
self.assertEqual(ep.module_name,"wocka")
self.assertEqual(ep.attrs, ("foo",))
self.assertEqual(ep.extras, ())
def testRejects(self):
for ep in [
"foo", "x=1=2", "x=a:b:c", "q=x/na", "fez=pish:tush-z", "x=f[a]>2",
]:
try: EntryPoint.parse(ep)
except ValueError: pass
else: raise AssertionError("Should've been bad", ep)
def checkSubMap(self, m):
self.assertEqual(len(m), len(self.submap_expect))
for key, ep in iteritems(self.submap_expect):
self.assertEqual(repr(m.get(key)), repr(ep))
submap_expect = dict(
feature1=EntryPoint('feature1', 'somemodule', ['somefunction']),
feature2=EntryPoint('feature2', 'another.module', ['SomeClass'], ['extra1','extra2']),
feature3=EntryPoint('feature3', 'this.module', extras=['something'])
)
submap_str = """
# define features for blah blah
feature1 = somemodule:somefunction
feature2 = another.module:SomeClass [extra1,extra2]
feature3 = this.module [something]
"""
def testParseList(self):
self.checkSubMap(EntryPoint.parse_group("xyz", self.submap_str))
self.assertRaises(ValueError, EntryPoint.parse_group, "x a", "foo=bar")
self.assertRaises(ValueError, EntryPoint.parse_group, "x",
["foo=baz", "foo=bar"])
def testParseMap(self):
m = EntryPoint.parse_map({'xyz':self.submap_str})
self.checkSubMap(m['xyz'])
self.assertEqual(list(m.keys()),['xyz'])
m = EntryPoint.parse_map("[xyz]\n"+self.submap_str)
self.checkSubMap(m['xyz'])
self.assertEqual(list(m.keys()),['xyz'])
self.assertRaises(ValueError, EntryPoint.parse_map, ["[xyz]", "[xyz]"])
self.assertRaises(ValueError, EntryPoint.parse_map, self.submap_str)
class RequirementsTests(TestCase):
def testBasics(self):
r = Requirement.parse("Twisted>=1.2")
self.assertEqual(str(r),"Twisted>=1.2")
self.assertEqual(repr(r),"Requirement.parse('Twisted>=1.2')")
self.assertEqual(r, Requirement("Twisted", [('>=','1.2')], ()))
self.assertEqual(r, Requirement("twisTed", [('>=','1.2')], ()))
self.assertNotEqual(r, Requirement("Twisted", [('>=','2.0')], ()))
self.assertNotEqual(r, Requirement("Zope", [('>=','1.2')], ()))
self.assertNotEqual(r, Requirement("Zope", [('>=','3.0')], ()))
self.assertNotEqual(r, Requirement.parse("Twisted[extras]>=1.2"))
def testOrdering(self):
r1 = Requirement("Twisted", [('==','1.2c1'),('>=','1.2')], ())
r2 = Requirement("Twisted", [('>=','1.2'),('==','1.2c1')], ())
self.assertEqual(r1,r2)
self.assertEqual(str(r1),str(r2))
self.assertEqual(str(r2),"Twisted==1.2c1,>=1.2")
def testBasicContains(self):
r = Requirement("Twisted", [('>=','1.2')], ())
foo_dist = Distribution.from_filename("FooPkg-1.3_1.egg")
twist11 = Distribution.from_filename("Twisted-1.1.egg")
twist12 = Distribution.from_filename("Twisted-1.2.egg")
self.assertTrue(parse_version('1.2') in r)
self.assertTrue(parse_version('1.1') not in r)
self.assertTrue('1.2' in r)
self.assertTrue('1.1' not in r)
self.assertTrue(foo_dist not in r)
self.assertTrue(twist11 not in r)
self.assertTrue(twist12 in r)
def testAdvancedContains(self):
r, = parse_requirements("Foo>=1.2,<=1.3,==1.9,>2.0,!=2.5,<3.0,==4.5")
for v in ('1.2','1.2.2','1.3','1.9','2.0.1','2.3','2.6','3.0c1','4.5'):
self.assertTrue(v in r, (v,r))
for v in ('1.2c1','1.3.1','1.5','1.9.1','2.0','2.5','3.0','4.0'):
self.assertTrue(v not in r, (v,r))
def testOptionsAndHashing(self):
r1 = Requirement.parse("Twisted[foo,bar]>=1.2")
r2 = Requirement.parse("Twisted[bar,FOO]>=1.2")
r3 = Requirement.parse("Twisted[BAR,FOO]>=1.2.0")
self.assertEqual(r1,r2)
self.assertEqual(r1,r3)
self.assertEqual(r1.extras, ("foo","bar"))
self.assertEqual(r2.extras, ("bar","foo")) # extras are normalized
self.assertEqual(hash(r1), hash(r2))
self.assertEqual(
hash(r1), hash(("twisted", ((">=",parse_version("1.2")),),
frozenset(["foo","bar"])))
)
def testVersionEquality(self):
r1 = Requirement.parse("foo==0.3a2")
r2 = Requirement.parse("foo!=0.3a4")
d = Distribution.from_filename
self.assertTrue(d("foo-0.3a4.egg") not in r1)
self.assertTrue(d("foo-0.3a1.egg") not in r1)
self.assertTrue(d("foo-0.3a4.egg") not in r2)
self.assertTrue(d("foo-0.3a2.egg") in r1)
self.assertTrue(d("foo-0.3a2.egg") in r2)
self.assertTrue(d("foo-0.3a3.egg") in r2)
self.assertTrue(d("foo-0.3a5.egg") in r2)
def testSetuptoolsProjectName(self):
"""
The setuptools project should implement the setuptools package.
"""
self.assertEqual(
Requirement.parse('setuptools').project_name, 'setuptools')
# setuptools 0.7 and higher means setuptools.
self.assertEqual(
Requirement.parse('setuptools == 0.7').project_name, 'setuptools')
self.assertEqual(
Requirement.parse('setuptools == 0.7a1').project_name, 'setuptools')
self.assertEqual(
Requirement.parse('setuptools >= 0.7').project_name, 'setuptools')
class ParseTests(TestCase):
def testEmptyParse(self):
self.assertEqual(list(parse_requirements('')), [])
def testYielding(self):
for inp,out in [
([], []), ('x',['x']), ([[]],[]), (' x\n y', ['x','y']),
(['x\n\n','y'], ['x','y']),
]:
self.assertEqual(list(pkg_resources.yield_lines(inp)),out)
def testSplitting(self):
sample = """
x
[Y]
z
a
[b ]
# foo
c
[ d]
[q]
v
"""
self.assertEqual(list(pkg_resources.split_sections(sample)),
[(None,["x"]), ("Y",["z","a"]), ("b",["c"]), ("d",[]), ("q",["v"])]
)
self.assertRaises(ValueError,list,pkg_resources.split_sections("[foo"))
def testSafeName(self):
self.assertEqual(safe_name("adns-python"), "adns-python")
self.assertEqual(safe_name("WSGI Utils"), "WSGI-Utils")
self.assertEqual(safe_name("WSGI Utils"), "WSGI-Utils")
self.assertEqual(safe_name("Money$$$Maker"), "Money-Maker")
self.assertNotEqual(safe_name("peak.web"), "peak-web")
def testSafeVersion(self):
self.assertEqual(safe_version("1.2-1"), "1.2-1")
self.assertEqual(safe_version("1.2 alpha"), "1.2.alpha")
self.assertEqual(safe_version("2.3.4 20050521"), "2.3.4.20050521")
self.assertEqual(safe_version("Money$$$Maker"), "Money-Maker")
self.assertEqual(safe_version("peak.web"), "peak.web")
def testSimpleRequirements(self):
self.assertEqual(
list(parse_requirements('Twis-Ted>=1.2-1')),
[Requirement('Twis-Ted',[('>=','1.2-1')], ())]
)
self.assertEqual(
list(parse_requirements('Twisted >=1.2, \ # more\n<2.0')),
[Requirement('Twisted',[('>=','1.2'),('<','2.0')], ())]
)
self.assertEqual(
Requirement.parse("FooBar==1.99a3"),
Requirement("FooBar", [('==','1.99a3')], ())
)
self.assertRaises(ValueError,Requirement.parse,">=2.3")
self.assertRaises(ValueError,Requirement.parse,"x\\")
self.assertRaises(ValueError,Requirement.parse,"x==2 q")
self.assertRaises(ValueError,Requirement.parse,"X==1\nY==2")
self.assertRaises(ValueError,Requirement.parse,"#")
def testVersionEquality(self):
def c(s1,s2):
p1, p2 = parse_version(s1),parse_version(s2)
self.assertEqual(p1,p2, (s1,s2,p1,p2))
c('1.2-rc1', '1.2rc1')
c('0.4', '0.4.0')
c('0.4.0.0', '0.4.0')
c('0.4.0-0', '0.4-0')
c('0pl1', '0.0pl1')
c('0pre1', '0.0c1')
c('0.0.0preview1', '0c1')
c('0.0c1', '0-rc1')
c('1.2a1', '1.2.a.1')
c('1.2...a', '1.2a')
def testVersionOrdering(self):
def c(s1,s2):
p1, p2 = parse_version(s1),parse_version(s2)
self.assertTrue(p1<p2, (s1,s2,p1,p2))
c('2.1','2.1.1')
c('2a1','2b0')
c('2a1','2.1')
c('2.3a1', '2.3')
c('2.1-1', '2.1-2')
c('2.1-1', '2.1.1')
c('2.1', '2.1pl4')
c('2.1a0-20040501', '2.1')
c('1.1', '02.1')
c('A56','B27')
c('3.2', '3.2.pl0')
c('3.2-1', '3.2pl1')
c('3.2pl1', '3.2pl1-1')
c('0.4', '4.0')
c('0.0.4', '0.4.0')
c('0pl1', '0.4pl1')
c('2.1.0-rc1','2.1.0')
c('2.1dev','2.1a0')
torture ="""
0.80.1-3 0.80.1-2 0.80.1-1 0.79.9999+0.80.0pre4-1
0.79.9999+0.80.0pre2-3 0.79.9999+0.80.0pre2-2
0.77.2-1 0.77.1-1 0.77.0-1
""".split()
for p,v1 in enumerate(torture):
for v2 in torture[p+1:]:
c(v2,v1)
class ScriptHeaderTests(TestCase):
non_ascii_exe = '/Users/José/bin/python'
exe_with_spaces = r'C:\Program Files\Python33\python.exe'
def test_get_script_header(self):
if not sys.platform.startswith('java') or not is_sh(sys.executable):
# This test is for non-Jython platforms
expected = '#!%s\n' % nt_quote_arg(os.path.normpath(sys.executable))
self.assertEqual(get_script_header('#!/usr/local/bin/python'),
expected)
expected = '#!%s -x\n' % nt_quote_arg(os.path.normpath(sys.executable))
self.assertEqual(get_script_header('#!/usr/bin/python -x'),
expected)
self.assertEqual(get_script_header('#!/usr/bin/python',
executable=self.non_ascii_exe),
'#!%s -x\n' % self.non_ascii_exe)
candidate = get_script_header('#!/usr/bin/python',
executable=self.exe_with_spaces)
self.assertEqual(candidate, '#!"%s"\n' % self.exe_with_spaces)
def test_get_script_header_jython_workaround(self):
# This test doesn't work with Python 3 in some locales
if (sys.version_info >= (3,) and os.environ.get("LC_CTYPE")
in (None, "C", "POSIX")):
return
class java:
class lang:
class System:
@staticmethod
def getProperty(property):
return ""
sys.modules["java"] = java
platform = sys.platform
sys.platform = 'java1.5.0_13'
stdout, stderr = sys.stdout, sys.stderr
try:
# A mock sys.executable that uses a shebang line (this file)
exe = os.path.normpath(os.path.splitext(__file__)[0] + '.py')
self.assertEqual(
get_script_header('#!/usr/local/bin/python', executable=exe),
'#!/usr/bin/env %s\n' % exe)
# Ensure we generate what is basically a broken shebang line
# when there's options, with a warning emitted
sys.stdout = sys.stderr = StringIO()
self.assertEqual(get_script_header('#!/usr/bin/python -x',
executable=exe),
'#!%s -x\n' % exe)
self.assertTrue('Unable to adapt shebang line' in sys.stdout.getvalue())
sys.stdout = sys.stderr = StringIO()
self.assertEqual(get_script_header('#!/usr/bin/python',
executable=self.non_ascii_exe),
'#!%s -x\n' % self.non_ascii_exe)
self.assertTrue('Unable to adapt shebang line' in sys.stdout.getvalue())
finally:
del sys.modules["java"]
sys.platform = platform
sys.stdout, sys.stderr = stdout, stderr
class NamespaceTests(TestCase):
def setUp(self):
self._ns_pkgs = pkg_resources._namespace_packages.copy()
self._tmpdir = tempfile.mkdtemp(prefix="tests-setuptools-")
os.makedirs(os.path.join(self._tmpdir, "site-pkgs"))
self._prev_sys_path = sys.path[:]
sys.path.append(os.path.join(self._tmpdir, "site-pkgs"))
def tearDown(self):
shutil.rmtree(self._tmpdir)
pkg_resources._namespace_packages = self._ns_pkgs.copy()
sys.path = self._prev_sys_path[:]
def _assertIn(self, member, container):
""" assertIn and assertTrue does not exist in Python2.3"""
if member not in container:
standardMsg = '%s not found in %s' % (safe_repr(member),
safe_repr(container))
self.fail(self._formatMessage(msg, standardMsg))
def test_two_levels_deep(self):
"""
Test nested namespace packages
Create namespace packages in the following tree :
site-packages-1/pkg1/pkg2
site-packages-2/pkg1/pkg2
Check both are in the _namespace_packages dict and that their __path__
is correct
"""
sys.path.append(os.path.join(self._tmpdir, "site-pkgs2"))
os.makedirs(os.path.join(self._tmpdir, "site-pkgs", "pkg1", "pkg2"))
os.makedirs(os.path.join(self._tmpdir, "site-pkgs2", "pkg1", "pkg2"))
ns_str = "__import__('pkg_resources').declare_namespace(__name__)\n"
for site in ["site-pkgs", "site-pkgs2"]:
pkg1_init = open(os.path.join(self._tmpdir, site,
"pkg1", "__init__.py"), "w")
pkg1_init.write(ns_str)
pkg1_init.close()
pkg2_init = open(os.path.join(self._tmpdir, site,
"pkg1", "pkg2", "__init__.py"), "w")
pkg2_init.write(ns_str)
pkg2_init.close()
import pkg1
self._assertIn("pkg1", pkg_resources._namespace_packages.keys())
try:
import pkg1.pkg2
except ImportError:
self.fail("Setuptools tried to import the parent namespace package")
# check the _namespace_packages dict
self._assertIn("pkg1.pkg2", pkg_resources._namespace_packages.keys())
self.assertEqual(pkg_resources._namespace_packages["pkg1"], ["pkg1.pkg2"])
# check the __path__ attribute contains both paths
self.assertEqual(pkg1.pkg2.__path__, [
os.path.join(self._tmpdir, "site-pkgs", "pkg1", "pkg2"),
os.path.join(self._tmpdir, "site-pkgs2", "pkg1", "pkg2")])

View File

@@ -0,0 +1,79 @@
"""develop tests
"""
import sys
import os
import shutil
import unittest
import tempfile
import types
import pkg_resources
import setuptools.sandbox
from setuptools.sandbox import DirectorySandbox, SandboxViolation
def has_win32com():
"""
Run this to determine if the local machine has win32com, and if it
does, include additional tests.
"""
if not sys.platform.startswith('win32'):
return False
try:
mod = __import__('win32com')
except ImportError:
return False
return True
class TestSandbox(unittest.TestCase):
def setUp(self):
self.dir = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.dir)
def test_devnull(self):
if sys.version < '2.4':
return
sandbox = DirectorySandbox(self.dir)
sandbox.run(self._file_writer(os.devnull))
def _file_writer(path):
def do_write():
f = open(path, 'w')
f.write('xxx')
f.close()
return do_write
_file_writer = staticmethod(_file_writer)
if has_win32com():
def test_win32com(self):
"""
win32com should not be prevented from caching COM interfaces
in gen_py.
"""
import win32com
gen_py = win32com.__gen_path__
target = os.path.join(gen_py, 'test_write')
sandbox = DirectorySandbox(self.dir)
try:
try:
sandbox.run(self._file_writer(target))
except SandboxViolation:
self.fail("Could not create gen_py file due to SandboxViolation")
finally:
if os.path.exists(target): os.remove(target)
def test_setup_py_with_BOM(self):
"""
It should be possible to execute a setup.py with a Byte Order Mark
"""
target = pkg_resources.resource_filename(__name__,
'script-with-bom.py')
namespace = types.ModuleType('namespace')
setuptools.sandbox.execfile(target, vars(namespace))
assert namespace.result == 'passed'
if __name__ == '__main__':
unittest.main()

View File

@@ -0,0 +1,535 @@
# -*- coding: utf-8 -*-
"""sdist tests"""
import locale
import os
import shutil
import sys
import tempfile
import unittest
import unicodedata
import re
from setuptools.tests import environment, test_svn
from setuptools.tests.py26compat import skipIf
from setuptools.compat import StringIO, unicode
from setuptools.tests.py26compat import skipIf
from setuptools.command.sdist import sdist, walk_revctrl
from setuptools.command.egg_info import manifest_maker
from setuptools.dist import Distribution
from setuptools import svn_utils
SETUP_ATTRS = {
'name': 'sdist_test',
'version': '0.0',
'packages': ['sdist_test'],
'package_data': {'sdist_test': ['*.txt']}
}
SETUP_PY = """\
from setuptools import setup
setup(**%r)
""" % SETUP_ATTRS
if sys.version_info >= (3,):
LATIN1_FILENAME = 'smörbröd.py'.encode('latin-1')
else:
LATIN1_FILENAME = 'sm\xf6rbr\xf6d.py'
# Cannot use context manager because of Python 2.4
def quiet():
global old_stdout, old_stderr
old_stdout, old_stderr = sys.stdout, sys.stderr
sys.stdout, sys.stderr = StringIO(), StringIO()
def unquiet():
sys.stdout, sys.stderr = old_stdout, old_stderr
# Fake byte literals for Python <= 2.5
def b(s, encoding='utf-8'):
if sys.version_info >= (3,):
return s.encode(encoding)
return s
# Convert to POSIX path
def posix(path):
if sys.version_info >= (3,) and not isinstance(path, str):
return path.replace(os.sep.encode('ascii'), b('/'))
else:
return path.replace(os.sep, '/')
# HFS Plus uses decomposed UTF-8
def decompose(path):
if isinstance(path, unicode):
return unicodedata.normalize('NFD', path)
try:
path = path.decode('utf-8')
path = unicodedata.normalize('NFD', path)
path = path.encode('utf-8')
except UnicodeError:
pass # Not UTF-8
return path
class TestSdistTest(unittest.TestCase):
def setUp(self):
self.temp_dir = tempfile.mkdtemp()
f = open(os.path.join(self.temp_dir, 'setup.py'), 'w')
f.write(SETUP_PY)
f.close()
# Set up the rest of the test package
test_pkg = os.path.join(self.temp_dir, 'sdist_test')
os.mkdir(test_pkg)
# *.rst was not included in package_data, so c.rst should not be
# automatically added to the manifest when not under version control
for fname in ['__init__.py', 'a.txt', 'b.txt', 'c.rst']:
# Just touch the files; their contents are irrelevant
open(os.path.join(test_pkg, fname), 'w').close()
self.old_cwd = os.getcwd()
os.chdir(self.temp_dir)
def tearDown(self):
os.chdir(self.old_cwd)
shutil.rmtree(self.temp_dir)
def test_package_data_in_sdist(self):
"""Regression test for pull request #4: ensures that files listed in
package_data are included in the manifest even if they're not added to
version control.
"""
dist = Distribution(SETUP_ATTRS)
dist.script_name = 'setup.py'
cmd = sdist(dist)
cmd.ensure_finalized()
# squelch output
quiet()
try:
cmd.run()
finally:
unquiet()
manifest = cmd.filelist.files
self.assertTrue(os.path.join('sdist_test', 'a.txt') in manifest)
self.assertTrue(os.path.join('sdist_test', 'b.txt') in manifest)
self.assertTrue(os.path.join('sdist_test', 'c.rst') not in manifest)
def test_manifest_is_written_with_utf8_encoding(self):
# Test for #303.
dist = Distribution(SETUP_ATTRS)
dist.script_name = 'setup.py'
mm = manifest_maker(dist)
mm.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt')
os.mkdir('sdist_test.egg-info')
# UTF-8 filename
filename = os.path.join('sdist_test', 'smörbröd.py')
# Add UTF-8 filename and write manifest
quiet()
try:
mm.run()
mm.filelist.files.append(filename)
mm.write_manifest()
finally:
unquiet()
manifest = open(mm.manifest, 'rbU')
contents = manifest.read()
manifest.close()
# The manifest should be UTF-8 encoded
try:
u_contents = contents.decode('UTF-8')
except UnicodeDecodeError:
e = sys.exc_info()[1]
self.fail(e)
# The manifest should contain the UTF-8 filename
if sys.version_info >= (3,):
self.assertTrue(posix(filename) in u_contents)
else:
self.assertTrue(posix(filename) in contents)
# Python 3 only
if sys.version_info >= (3,):
def test_write_manifest_allows_utf8_filenames(self):
# Test for #303.
dist = Distribution(SETUP_ATTRS)
dist.script_name = 'setup.py'
mm = manifest_maker(dist)
mm.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt')
os.mkdir('sdist_test.egg-info')
# UTF-8 filename
filename = os.path.join(b('sdist_test'), b('smörbröd.py'))
# Add filename and write manifest
quiet()
try:
mm.run()
u_filename = filename.decode('utf-8')
mm.filelist.files.append(u_filename)
# Re-write manifest
mm.write_manifest()
finally:
unquiet()
manifest = open(mm.manifest, 'rbU')
contents = manifest.read()
manifest.close()
# The manifest should be UTF-8 encoded
try:
contents.decode('UTF-8')
except UnicodeDecodeError:
e = sys.exc_info()[1]
self.fail(e)
# The manifest should contain the UTF-8 filename
self.assertTrue(posix(filename) in contents)
# The filelist should have been updated as well
self.assertTrue(u_filename in mm.filelist.files)
def test_write_manifest_skips_non_utf8_filenames(self):
# Test for #303.
dist = Distribution(SETUP_ATTRS)
dist.script_name = 'setup.py'
mm = manifest_maker(dist)
mm.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt')
os.mkdir('sdist_test.egg-info')
# Latin-1 filename
filename = os.path.join(b('sdist_test'), LATIN1_FILENAME)
# Add filename with surrogates and write manifest
quiet()
try:
mm.run()
u_filename = filename.decode('utf-8', 'surrogateescape')
mm.filelist.files.append(u_filename)
# Re-write manifest
mm.write_manifest()
finally:
unquiet()
manifest = open(mm.manifest, 'rbU')
contents = manifest.read()
manifest.close()
# The manifest should be UTF-8 encoded
try:
contents.decode('UTF-8')
except UnicodeDecodeError:
e = sys.exc_info()[1]
self.fail(e)
# The Latin-1 filename should have been skipped
self.assertFalse(posix(filename) in contents)
# The filelist should have been updated as well
self.assertFalse(u_filename in mm.filelist.files)
def test_manifest_is_read_with_utf8_encoding(self):
# Test for #303.
dist = Distribution(SETUP_ATTRS)
dist.script_name = 'setup.py'
cmd = sdist(dist)
cmd.ensure_finalized()
# Create manifest
quiet()
try:
cmd.run()
finally:
unquiet()
# Add UTF-8 filename to manifest
filename = os.path.join(b('sdist_test'), b('smörbröd.py'))
cmd.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt')
manifest = open(cmd.manifest, 'ab')
manifest.write(b('\n')+filename)
manifest.close()
# The file must exist to be included in the filelist
open(filename, 'w').close()
# Re-read manifest
cmd.filelist.files = []
quiet()
try:
cmd.read_manifest()
finally:
unquiet()
# The filelist should contain the UTF-8 filename
if sys.version_info >= (3,):
filename = filename.decode('utf-8')
self.assertTrue(filename in cmd.filelist.files)
# Python 3 only
if sys.version_info >= (3,):
def test_read_manifest_skips_non_utf8_filenames(self):
# Test for #303.
dist = Distribution(SETUP_ATTRS)
dist.script_name = 'setup.py'
cmd = sdist(dist)
cmd.ensure_finalized()
# Create manifest
quiet()
try:
cmd.run()
finally:
unquiet()
# Add Latin-1 filename to manifest
filename = os.path.join(b('sdist_test'), LATIN1_FILENAME)
cmd.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt')
manifest = open(cmd.manifest, 'ab')
manifest.write(b('\n')+filename)
manifest.close()
# The file must exist to be included in the filelist
open(filename, 'w').close()
# Re-read manifest
cmd.filelist.files = []
quiet()
try:
try:
cmd.read_manifest()
except UnicodeDecodeError:
e = sys.exc_info()[1]
self.fail(e)
finally:
unquiet()
# The Latin-1 filename should have been skipped
filename = filename.decode('latin-1')
self.assertFalse(filename in cmd.filelist.files)
@skipIf(sys.version_info >= (3,) and locale.getpreferredencoding() != 'UTF-8',
'Unittest fails if locale is not utf-8 but the manifests is recorded correctly')
def test_sdist_with_utf8_encoded_filename(self):
# Test for #303.
dist = Distribution(SETUP_ATTRS)
dist.script_name = 'setup.py'
cmd = sdist(dist)
cmd.ensure_finalized()
# UTF-8 filename
filename = os.path.join(b('sdist_test'), b('smörbröd.py'))
open(filename, 'w').close()
quiet()
try:
cmd.run()
finally:
unquiet()
if sys.platform == 'darwin':
filename = decompose(filename)
if sys.version_info >= (3,):
fs_enc = sys.getfilesystemencoding()
if sys.platform == 'win32':
if fs_enc == 'cp1252':
# Python 3 mangles the UTF-8 filename
filename = filename.decode('cp1252')
self.assertTrue(filename in cmd.filelist.files)
else:
filename = filename.decode('mbcs')
self.assertTrue(filename in cmd.filelist.files)
else:
filename = filename.decode('utf-8')
self.assertTrue(filename in cmd.filelist.files)
else:
self.assertTrue(filename in cmd.filelist.files)
def test_sdist_with_latin1_encoded_filename(self):
# Test for #303.
dist = Distribution(SETUP_ATTRS)
dist.script_name = 'setup.py'
cmd = sdist(dist)
cmd.ensure_finalized()
# Latin-1 filename
filename = os.path.join(b('sdist_test'), LATIN1_FILENAME)
open(filename, 'w').close()
self.assertTrue(os.path.isfile(filename))
quiet()
try:
cmd.run()
finally:
unquiet()
if sys.version_info >= (3,):
#not all windows systems have a default FS encoding of cp1252
if sys.platform == 'win32':
# Latin-1 is similar to Windows-1252 however
# on mbcs filesys it is not in latin-1 encoding
fs_enc = sys.getfilesystemencoding()
if fs_enc == 'mbcs':
filename = filename.decode('mbcs')
else:
filename = filename.decode('latin-1')
self.assertTrue(filename in cmd.filelist.files)
else:
# The Latin-1 filename should have been skipped
filename = filename.decode('latin-1')
self.assertFalse(filename in cmd.filelist.files)
else:
# No conversion takes place under Python 2 and the file
# is included. We shall keep it that way for BBB.
self.assertTrue(filename in cmd.filelist.files)
class TestDummyOutput(environment.ZippedEnvironment):
def setUp(self):
self.datafile = os.path.join('setuptools', 'tests',
'svn_data', "dummy.zip")
self.dataname = "dummy"
super(TestDummyOutput, self).setUp()
def _run(self):
code, data = environment.run_setup_py(["sdist"],
pypath=self.old_cwd,
data_stream=0)
if code:
info = "DIR: " + os.path.abspath('.')
info += "\n SDIST RETURNED: %i\n\n" % code
info += data
raise AssertionError(info)
datalines = data.splitlines()
possible = (
"running sdist",
"running egg_info",
"creating dummy\.egg-info",
"writing dummy\.egg-info",
"writing top-level names to dummy\.egg-info",
"writing dependency_links to dummy\.egg-info",
"writing manifest file 'dummy\.egg-info",
"reading manifest file 'dummy\.egg-info",
"reading manifest template 'MANIFEST\.in'",
"writing manifest file 'dummy\.egg-info",
"creating dummy-0.1.1",
"making hard links in dummy-0\.1\.1",
"copying files to dummy-0\.1\.1",
"copying \S+ -> dummy-0\.1\.1",
"copying dummy",
"copying dummy\.egg-info",
"hard linking \S+ -> dummy-0\.1\.1",
"hard linking dummy",
"hard linking dummy\.egg-info",
"Writing dummy-0\.1\.1",
"creating dist",
"creating 'dist",
"Creating tar archive",
"running check",
"adding 'dummy-0\.1\.1",
"tar .+ dist/dummy-0\.1\.1\.tar dummy-0\.1\.1",
"gzip .+ dist/dummy-0\.1\.1\.tar",
"removing 'dummy-0\.1\.1' \\(and everything under it\\)",
)
print(" DIR: " + os.path.abspath('.'))
for line in datalines:
found = False
for pattern in possible:
if re.match(pattern, line):
print(" READ: " + line)
found = True
break
if not found:
raise AssertionError("Unexpexected: %s\n-in-\n%s"
% (line, data))
return data
def test_sources(self):
self._run()
class TestSvn(environment.ZippedEnvironment):
def setUp(self):
version = svn_utils.SvnInfo.get_svn_version()
if not version: # None or Empty
return
self.base_version = tuple([int(x) for x in version.split('.')][:2])
if not self.base_version:
raise ValueError('No SVN tools installed')
elif self.base_version < (1, 3):
raise ValueError('Insufficient SVN Version %s' % version)
elif self.base_version >= (1, 9):
#trying the latest version
self.base_version = (1, 8)
self.dataname = "svn%i%i_example" % self.base_version
self.datafile = os.path.join('setuptools', 'tests',
'svn_data', self.dataname + ".zip")
super(TestSvn, self).setUp()
@skipIf(not test_svn._svn_check, "No SVN to text, in the first place")
def test_walksvn(self):
if self.base_version >= (1, 6):
folder2 = 'third party2'
folder3 = 'third party3'
else:
folder2 = 'third_party2'
folder3 = 'third_party3'
#TODO is this right
expected = set([
os.path.join('a file'),
os.path.join(folder2, 'Changes.txt'),
os.path.join(folder2, 'MD5SUMS'),
os.path.join(folder2, 'README.txt'),
os.path.join(folder3, 'Changes.txt'),
os.path.join(folder3, 'MD5SUMS'),
os.path.join(folder3, 'README.txt'),
os.path.join(folder3, 'TODO.txt'),
os.path.join(folder3, 'fin'),
os.path.join('third_party', 'README.txt'),
os.path.join('folder', folder2, 'Changes.txt'),
os.path.join('folder', folder2, 'MD5SUMS'),
os.path.join('folder', folder2, 'WatashiNiYomimasu.txt'),
os.path.join('folder', folder3, 'Changes.txt'),
os.path.join('folder', folder3, 'fin'),
os.path.join('folder', folder3, 'MD5SUMS'),
os.path.join('folder', folder3, 'oops'),
os.path.join('folder', folder3, 'WatashiNiYomimasu.txt'),
os.path.join('folder', folder3, 'ZuMachen.txt'),
os.path.join('folder', 'third_party', 'WatashiNiYomimasu.txt'),
os.path.join('folder', 'lalala.txt'),
os.path.join('folder', 'quest.txt'),
# The example will have a deleted file
# (or should) but shouldn't return it
])
self.assertEqual(set(x for x in walk_revctrl()), expected)
def test_suite():
return unittest.defaultTestLoader.loadTestsFromName(__name__)

View File

@@ -0,0 +1,245 @@
# -*- coding: utf-8 -*-
"""svn tests"""
import io
import os
import subprocess
import sys
import unittest
from setuptools.tests import environment
from setuptools.compat import unicode, unichr
from setuptools import svn_utils
from setuptools.tests.py26compat import skipIf
def _do_svn_check():
try:
subprocess.check_call(["svn", "--version"],
shell=(sys.platform == 'win32'))
return True
except (OSError, subprocess.CalledProcessError):
return False
_svn_check = _do_svn_check()
class TestSvnVersion(unittest.TestCase):
def test_no_svn_found(self):
path_variable = None
for env in os.environ:
if env.lower() == 'path':
path_variable = env
if path_variable is None:
try:
self.skipTest('Cannot figure out how to modify path')
except AttributeError: # PY26 doesn't have this
return
old_path = os.environ[path_variable]
os.environ[path_variable] = ''
try:
version = svn_utils.SvnInfo.get_svn_version()
self.assertEqual(version, '')
finally:
os.environ[path_variable] = old_path
@skipIf(not _svn_check, "No SVN to text, in the first place")
def test_svn_should_exist(self):
version = svn_utils.SvnInfo.get_svn_version()
self.assertNotEqual(version, '')
def _read_utf8_file(path):
fileobj = None
try:
fileobj = io.open(path, 'r', encoding='utf-8')
data = fileobj.read()
return data
finally:
if fileobj:
fileobj.close()
class ParserInfoXML(unittest.TestCase):
def parse_tester(self, svn_name, ext_spaces):
path = os.path.join('setuptools', 'tests',
'svn_data', svn_name + '_info.xml')
#Remember these are pre-generated to test XML parsing
# so these paths might not valid on your system
example_base = "%s_example" % svn_name
data = _read_utf8_file(path)
expected = set([
("\\".join((example_base, 'a file')), 'file'),
("\\".join((example_base, 'folder')), 'dir'),
("\\".join((example_base, 'folder', 'lalala.txt')), 'file'),
("\\".join((example_base, 'folder', 'quest.txt')), 'file'),
])
self.assertEqual(set(x for x in svn_utils.parse_dir_entries(data)),
expected)
def test_svn13(self):
self.parse_tester('svn13', False)
def test_svn14(self):
self.parse_tester('svn14', False)
def test_svn15(self):
self.parse_tester('svn15', False)
def test_svn16(self):
self.parse_tester('svn16', True)
def test_svn17(self):
self.parse_tester('svn17', True)
def test_svn18(self):
self.parse_tester('svn18', True)
class ParserExternalXML(unittest.TestCase):
def parse_tester(self, svn_name, ext_spaces):
path = os.path.join('setuptools', 'tests',
'svn_data', svn_name + '_ext_list.xml')
example_base = svn_name + '_example'
data = _read_utf8_file(path)
if ext_spaces:
folder2 = 'third party2'
folder3 = 'third party3'
else:
folder2 = 'third_party2'
folder3 = 'third_party3'
expected = set([
os.sep.join((example_base, folder2)),
os.sep.join((example_base, folder3)),
# folder is third_party大介
os.sep.join((example_base,
unicode('third_party') +
unichr(0x5927) + unichr(0x4ecb))),
os.sep.join((example_base, 'folder', folder2)),
os.sep.join((example_base, 'folder', folder3)),
os.sep.join((example_base, 'folder',
unicode('third_party') +
unichr(0x5927) + unichr(0x4ecb))),
])
expected = set(os.path.normpath(x) for x in expected)
dir_base = os.sep.join(('C:', 'development', 'svn_example'))
self.assertEqual(set(x for x
in svn_utils.parse_externals_xml(data, dir_base)), expected)
def test_svn15(self):
self.parse_tester('svn15', False)
def test_svn16(self):
self.parse_tester('svn16', True)
def test_svn17(self):
self.parse_tester('svn17', True)
def test_svn18(self):
self.parse_tester('svn18', True)
class ParseExternal(unittest.TestCase):
def parse_tester(self, svn_name, ext_spaces):
path = os.path.join('setuptools', 'tests',
'svn_data', svn_name + '_ext_list.txt')
data = _read_utf8_file(path)
if ext_spaces:
expected = set(['third party2', 'third party3',
'third party3b', 'third_party'])
else:
expected = set(['third_party2', 'third_party3', 'third_party'])
self.assertEqual(set(x for x in svn_utils.parse_external_prop(data)),
expected)
def test_svn13(self):
self.parse_tester('svn13', False)
def test_svn14(self):
self.parse_tester('svn14', False)
def test_svn15(self):
self.parse_tester('svn15', False)
def test_svn16(self):
self.parse_tester('svn16', True)
def test_svn17(self):
self.parse_tester('svn17', True)
def test_svn18(self):
self.parse_tester('svn18', True)
class TestSvn(environment.ZippedEnvironment):
def setUp(self):
version = svn_utils.SvnInfo.get_svn_version()
if not version: # empty or null
self.dataname = None
self.datafile = None
return
self.base_version = tuple([int(x) for x in version.split('.')[:2]])
if self.base_version < (1,3):
raise ValueError('Insufficient SVN Version %s' % version)
elif self.base_version >= (1,9):
#trying the latest version
self.base_version = (1,8)
self.dataname = "svn%i%i_example" % self.base_version
self.datafile = os.path.join('setuptools', 'tests',
'svn_data', self.dataname + ".zip")
super(TestSvn, self).setUp()
@skipIf(not _svn_check, "No SVN to text, in the first place")
def test_revision(self):
rev = svn_utils.SvnInfo.load('.').get_revision()
self.assertEqual(rev, 6)
@skipIf(not _svn_check, "No SVN to text, in the first place")
def test_entries(self):
expected = set([
(os.path.join('a file'), 'file'),
(os.path.join('folder'), 'dir'),
(os.path.join('folder', 'lalala.txt'), 'file'),
(os.path.join('folder', 'quest.txt'), 'file'),
#The example will have a deleted file (or should)
#but shouldn't return it
])
info = svn_utils.SvnInfo.load('.')
self.assertEqual(set(x for x in info.entries), expected)
@skipIf(not _svn_check, "No SVN to text, in the first place")
def test_externals(self):
if self.base_version >= (1,6):
folder2 = 'third party2'
folder3 = 'third party3'
else:
folder2 = 'third_party2'
folder3 = 'third_party3'
expected = set([
os.path.join(folder2),
os.path.join(folder3),
os.path.join('third_party'),
os.path.join('folder', folder2),
os.path.join('folder', folder3),
os.path.join('folder', 'third_party'),
])
info = svn_utils.SvnInfo.load('.')
self.assertEqual(set([x for x in info.externals]), expected)
def test_suite():
return unittest.defaultTestLoader.loadTestsFromName(__name__)

View File

@@ -0,0 +1,126 @@
# -*- coding: UTF-8 -*-
"""develop tests
"""
import os
import shutil
import site
import sys
import tempfile
import unittest
from distutils.errors import DistutilsError
from setuptools.compat import StringIO
from setuptools.command.test import test
from setuptools.command import easy_install as easy_install_pkg
from setuptools.dist import Distribution
SETUP_PY = """\
from setuptools import setup
setup(name='foo',
packages=['name', 'name.space', 'name.space.tests'],
namespace_packages=['name'],
test_suite='name.space.tests.test_suite',
)
"""
NS_INIT = """# -*- coding: Latin-1 -*-
# Söme Arbiträry Ünicode to test Issüé 310
try:
__import__('pkg_resources').declare_namespace(__name__)
except ImportError:
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
"""
# Make sure this is Latin-1 binary, before writing:
if sys.version_info < (3,):
NS_INIT = NS_INIT.decode('UTF-8')
NS_INIT = NS_INIT.encode('Latin-1')
TEST_PY = """import unittest
class TestTest(unittest.TestCase):
def test_test(self):
print "Foo" # Should fail under Python 3 unless 2to3 is used
test_suite = unittest.makeSuite(TestTest)
"""
class TestTestTest(unittest.TestCase):
def setUp(self):
if sys.version < "2.6" or hasattr(sys, 'real_prefix'):
return
# Directory structure
self.dir = tempfile.mkdtemp()
os.mkdir(os.path.join(self.dir, 'name'))
os.mkdir(os.path.join(self.dir, 'name', 'space'))
os.mkdir(os.path.join(self.dir, 'name', 'space', 'tests'))
# setup.py
setup = os.path.join(self.dir, 'setup.py')
f = open(setup, 'wt')
f.write(SETUP_PY)
f.close()
self.old_cwd = os.getcwd()
# name/__init__.py
init = os.path.join(self.dir, 'name', '__init__.py')
f = open(init, 'wb')
f.write(NS_INIT)
f.close()
# name/space/__init__.py
init = os.path.join(self.dir, 'name', 'space', '__init__.py')
f = open(init, 'wt')
f.write('#empty\n')
f.close()
# name/space/tests/__init__.py
init = os.path.join(self.dir, 'name', 'space', 'tests', '__init__.py')
f = open(init, 'wt')
f.write(TEST_PY)
f.close()
os.chdir(self.dir)
self.old_base = site.USER_BASE
site.USER_BASE = tempfile.mkdtemp()
self.old_site = site.USER_SITE
site.USER_SITE = tempfile.mkdtemp()
def tearDown(self):
if sys.version < "2.6" or hasattr(sys, 'real_prefix'):
return
os.chdir(self.old_cwd)
shutil.rmtree(self.dir)
shutil.rmtree(site.USER_BASE)
shutil.rmtree(site.USER_SITE)
site.USER_BASE = self.old_base
site.USER_SITE = self.old_site
def test_test(self):
if sys.version < "2.6" or hasattr(sys, 'real_prefix'):
return
dist = Distribution(dict(
name='foo',
packages=['name', 'name.space', 'name.space.tests'],
namespace_packages=['name'],
test_suite='name.space.tests.test_suite',
use_2to3=True,
))
dist.script_name = 'setup.py'
cmd = test(dist)
cmd.user = 1
cmd.ensure_finalized()
cmd.install_dir = site.USER_SITE
cmd.user = 1
old_stdout = sys.stdout
sys.stdout = StringIO()
try:
try: # try/except/finally doesn't work in Python 2.4, so we need nested try-statements.
cmd.run()
except SystemExit: # The test runner calls sys.exit, stop that making an error.
pass
finally:
sys.stdout = old_stdout

View File

@@ -0,0 +1,72 @@
"""build_ext tests
"""
import sys, os, shutil, tempfile, unittest, site, zipfile
from setuptools.command.upload_docs import upload_docs
from setuptools.dist import Distribution
SETUP_PY = """\
from setuptools import setup
setup(name='foo')
"""
class TestUploadDocsTest(unittest.TestCase):
def setUp(self):
self.dir = tempfile.mkdtemp()
setup = os.path.join(self.dir, 'setup.py')
f = open(setup, 'w')
f.write(SETUP_PY)
f.close()
self.old_cwd = os.getcwd()
os.chdir(self.dir)
self.upload_dir = os.path.join(self.dir, 'build')
os.mkdir(self.upload_dir)
# A test document.
f = open(os.path.join(self.upload_dir, 'index.html'), 'w')
f.write("Hello world.")
f.close()
# An empty folder.
os.mkdir(os.path.join(self.upload_dir, 'empty'))
if sys.version >= "2.6":
self.old_base = site.USER_BASE
site.USER_BASE = upload_docs.USER_BASE = tempfile.mkdtemp()
self.old_site = site.USER_SITE
site.USER_SITE = upload_docs.USER_SITE = tempfile.mkdtemp()
def tearDown(self):
os.chdir(self.old_cwd)
shutil.rmtree(self.dir)
if sys.version >= "2.6":
shutil.rmtree(site.USER_BASE)
shutil.rmtree(site.USER_SITE)
site.USER_BASE = self.old_base
site.USER_SITE = self.old_site
def test_create_zipfile(self):
# Test to make sure zipfile creation handles common cases.
# This explicitly includes a folder containing an empty folder.
dist = Distribution()
cmd = upload_docs(dist)
cmd.upload_dir = self.upload_dir
cmd.target_dir = self.upload_dir
tmp_dir = tempfile.mkdtemp()
tmp_file = os.path.join(tmp_dir, 'foo.zip')
try:
zip_file = cmd.create_zipfile(tmp_file)
assert zipfile.is_zipfile(tmp_file)
zip_file = zipfile.ZipFile(tmp_file) # woh...
assert zip_file.namelist() == ['index.html']
zip_file.close()
finally:
shutil.rmtree(tmp_dir)

View File

@@ -0,0 +1 @@
__version__ = '3.3'