update to tornado 4.0 and requests 2.3.0
This commit is contained in:
parent
060f459965
commit
f187000dc9
239 changed files with 19071 additions and 20369 deletions
|
|
@ -6,42 +6,25 @@ __all__ = [
|
|||
"UnrecognizedFormat", "extraction_drivers", "unpack_directory",
|
||||
]
|
||||
|
||||
import zipfile, tarfile, os, shutil, posixpath
|
||||
from pkg_resources import ensure_directory
|
||||
import zipfile
|
||||
import tarfile
|
||||
import os
|
||||
import shutil
|
||||
import posixpath
|
||||
import contextlib
|
||||
from pkg_resources import ensure_directory, ContextualZipFile
|
||||
from distutils.errors import DistutilsError
|
||||
|
||||
class UnrecognizedFormat(DistutilsError):
|
||||
"""Couldn't recognize the archive type"""
|
||||
|
||||
def default_filter(src,dst):
|
||||
"""The default progress/filter callback; returns True for all files"""
|
||||
"""The default progress/filter callback; returns True for all files"""
|
||||
return dst
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def unpack_archive(filename, extract_dir, progress_filter=default_filter,
|
||||
drivers=None
|
||||
):
|
||||
drivers=None):
|
||||
"""Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat``
|
||||
|
||||
`progress_filter` is a function taking two arguments: a source path
|
||||
|
|
@ -75,11 +58,6 @@ def unpack_archive(filename, extract_dir, progress_filter=default_filter,
|
|||
)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def unpack_directory(filename, extract_dir, progress_filter=default_filter):
|
||||
""""Unpack" a directory, using the same interface as for archives
|
||||
|
||||
|
|
@ -94,7 +72,6 @@ def unpack_directory(filename, extract_dir, progress_filter=default_filter):
|
|||
for d in dirs:
|
||||
paths[os.path.join(base,d)] = src+d+'/', os.path.join(dst,d)
|
||||
for f in files:
|
||||
name = src+f
|
||||
target = os.path.join(dst,f)
|
||||
target = progress_filter(src+f, target)
|
||||
if not target:
|
||||
|
|
@ -105,22 +82,6 @@ def unpack_directory(filename, extract_dir, progress_filter=default_filter):
|
|||
shutil.copystat(f, target)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def unpack_zipfile(filename, extract_dir, progress_filter=default_filter):
|
||||
"""Unpack zip `filename` to `extract_dir`
|
||||
|
||||
|
|
@ -132,8 +93,7 @@ def unpack_zipfile(filename, extract_dir, progress_filter=default_filter):
|
|||
if not zipfile.is_zipfile(filename):
|
||||
raise UnrecognizedFormat("%s is not a zip file" % (filename,))
|
||||
|
||||
z = zipfile.ZipFile(filename)
|
||||
try:
|
||||
with ContextualZipFile(filename) as z:
|
||||
for info in z.infolist():
|
||||
name = info.filename
|
||||
|
||||
|
|
@ -161,8 +121,6 @@ def unpack_zipfile(filename, extract_dir, progress_filter=default_filter):
|
|||
unix_attributes = info.external_attr >> 16
|
||||
if unix_attributes:
|
||||
os.chmod(target, unix_attributes)
|
||||
finally:
|
||||
z.close()
|
||||
|
||||
|
||||
def unpack_tarfile(filename, extract_dir, progress_filter=default_filter):
|
||||
|
|
@ -178,7 +136,7 @@ def unpack_tarfile(filename, extract_dir, progress_filter=default_filter):
|
|||
raise UnrecognizedFormat(
|
||||
"%s is not a compressed or uncompressed tar file" % (filename,)
|
||||
)
|
||||
try:
|
||||
with contextlib.closing(tarobj):
|
||||
tarobj.chown = lambda *args: None # don't do any chowning!
|
||||
for member in tarobj:
|
||||
name = member.name
|
||||
|
|
@ -204,7 +162,5 @@ def unpack_tarfile(filename, extract_dir, progress_filter=default_filter):
|
|||
except tarfile.ExtractError:
|
||||
pass # chown/chmod/mkfifo/mknode/makedev failed
|
||||
return True
|
||||
finally:
|
||||
tarobj.close()
|
||||
|
||||
extraction_drivers = unpack_directory, unpack_zipfile, unpack_tarfile
|
||||
|
|
|
|||
|
|
@ -5,10 +5,11 @@ __all__ = [
|
|||
'register', 'bdist_wininst', 'upload_docs',
|
||||
]
|
||||
|
||||
from setuptools.command import install_scripts
|
||||
from distutils.command.bdist import bdist
|
||||
import sys
|
||||
|
||||
from distutils.command.bdist import bdist
|
||||
from setuptools.command import install_scripts
|
||||
|
||||
|
||||
if 'egg' not in bdist.format_commands:
|
||||
bdist.format_command['egg'] = ('bdist_egg', "Python .egg file")
|
||||
|
|
|
|||
|
|
@ -2,10 +2,12 @@ from distutils.errors import DistutilsOptionError
|
|||
|
||||
from setuptools.command.setopt import edit_config, option_base, config_file
|
||||
|
||||
|
||||
def shquote(arg):
|
||||
"""Quote an argument for later parsing by shlex.split()"""
|
||||
for c in '"', "'", "\\", "#":
|
||||
if c in arg: return repr(arg)
|
||||
if c in arg:
|
||||
return repr(arg)
|
||||
if arg.split() != [arg]:
|
||||
return repr(arg)
|
||||
return arg
|
||||
|
|
@ -18,7 +20,7 @@ class alias(option_base):
|
|||
command_consumes_arguments = True
|
||||
|
||||
user_options = [
|
||||
('remove', 'r', 'remove (unset) the alias'),
|
||||
('remove', 'r', 'remove (unset) the alias'),
|
||||
] + option_base.user_options
|
||||
|
||||
boolean_options = option_base.boolean_options + ['remove']
|
||||
|
|
@ -46,7 +48,7 @@ class alias(option_base):
|
|||
print("setup.py alias", format_alias(alias, aliases))
|
||||
return
|
||||
|
||||
elif len(self.args)==1:
|
||||
elif len(self.args) == 1:
|
||||
alias, = self.args
|
||||
if self.remove:
|
||||
command = None
|
||||
|
|
@ -58,9 +60,9 @@ class alias(option_base):
|
|||
return
|
||||
else:
|
||||
alias = self.args[0]
|
||||
command = ' '.join(map(shquote,self.args[1:]))
|
||||
command = ' '.join(map(shquote, self.args[1:]))
|
||||
|
||||
edit_config(self.filename, {'aliases': {alias:command}}, self.dry_run)
|
||||
edit_config(self.filename, {'aliases': {alias: command}}, self.dry_run)
|
||||
|
||||
|
||||
def format_alias(name, aliases):
|
||||
|
|
@ -73,4 +75,4 @@ def format_alias(name, aliases):
|
|||
source = ''
|
||||
else:
|
||||
source = '--filename=%r' % source
|
||||
return source+name+' '+command
|
||||
return source + name + ' ' + command
|
||||
|
|
|
|||
|
|
@ -3,29 +3,33 @@
|
|||
Build .egg distributions"""
|
||||
|
||||
# This module should be kept compatible with Python 2.3
|
||||
from distutils.errors import DistutilsSetupError
|
||||
from distutils.dir_util import remove_tree, mkpath
|
||||
from distutils import log
|
||||
from types import CodeType
|
||||
import sys
|
||||
import os
|
||||
import marshal
|
||||
import textwrap
|
||||
|
||||
from pkg_resources import get_build_platform, Distribution, ensure_directory
|
||||
from pkg_resources import EntryPoint
|
||||
from setuptools.compat import basestring
|
||||
from setuptools.extension import Library
|
||||
from setuptools import Command
|
||||
from distutils.dir_util import remove_tree, mkpath
|
||||
|
||||
try:
|
||||
# Python 2.7 or >=3.2
|
||||
from sysconfig import get_path, get_python_version
|
||||
|
||||
def _get_purelib():
|
||||
return get_path("purelib")
|
||||
except ImportError:
|
||||
from distutils.sysconfig import get_python_lib, get_python_version
|
||||
|
||||
def _get_purelib():
|
||||
return get_python_lib(False)
|
||||
|
||||
from distutils import log
|
||||
from distutils.errors import DistutilsSetupError
|
||||
from pkg_resources import get_build_platform, Distribution, ensure_directory
|
||||
from pkg_resources import EntryPoint
|
||||
from types import CodeType
|
||||
from setuptools.compat import basestring, next
|
||||
from setuptools.extension import Library
|
||||
|
||||
def strip_module(filename):
|
||||
if '.' in filename:
|
||||
|
|
@ -34,6 +38,7 @@ def strip_module(filename):
|
|||
filename = filename[:-6]
|
||||
return filename
|
||||
|
||||
|
||||
def write_stub(resource, pyfile):
|
||||
_stub_template = textwrap.dedent("""
|
||||
def __bootstrap__():
|
||||
|
|
@ -49,23 +54,22 @@ def write_stub(resource, pyfile):
|
|||
|
||||
|
||||
class bdist_egg(Command):
|
||||
|
||||
description = "create an \"egg\" distribution"
|
||||
|
||||
user_options = [
|
||||
('bdist-dir=', 'b',
|
||||
"temporary directory for creating the distribution"),
|
||||
"temporary directory for creating the distribution"),
|
||||
('plat-name=', 'p', "platform name to embed in generated filenames "
|
||||
"(default: %s)" % get_build_platform()),
|
||||
"(default: %s)" % get_build_platform()),
|
||||
('exclude-source-files', None,
|
||||
"remove all .py files from the generated egg"),
|
||||
"remove all .py files from the generated egg"),
|
||||
('keep-temp', 'k',
|
||||
"keep the pseudo-installation tree around after " +
|
||||
"creating the distribution archive"),
|
||||
"keep the pseudo-installation tree around after " +
|
||||
"creating the distribution archive"),
|
||||
('dist-dir=', 'd',
|
||||
"directory to put final built distributions in"),
|
||||
"directory to put final built distributions in"),
|
||||
('skip-build', None,
|
||||
"skip rebuilding everything (for testing/debugging)"),
|
||||
"skip rebuilding everything (for testing/debugging)"),
|
||||
]
|
||||
|
||||
boolean_options = [
|
||||
|
|
@ -92,7 +96,7 @@ class bdist_egg(Command):
|
|||
if self.plat_name is None:
|
||||
self.plat_name = get_build_platform()
|
||||
|
||||
self.set_undefined_options('bdist',('dist_dir', 'dist_dir'))
|
||||
self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
|
||||
|
||||
if self.egg_output is None:
|
||||
|
||||
|
|
@ -103,25 +107,25 @@ class bdist_egg(Command):
|
|||
self.distribution.has_ext_modules() and self.plat_name
|
||||
).egg_name()
|
||||
|
||||
self.egg_output = os.path.join(self.dist_dir, basename+'.egg')
|
||||
self.egg_output = os.path.join(self.dist_dir, basename + '.egg')
|
||||
|
||||
def do_install_data(self):
|
||||
# Hack for packages that install data to install's --install-lib
|
||||
self.get_finalized_command('install').install_lib = self.bdist_dir
|
||||
|
||||
site_packages = os.path.normcase(os.path.realpath(_get_purelib()))
|
||||
old, self.distribution.data_files = self.distribution.data_files,[]
|
||||
old, self.distribution.data_files = self.distribution.data_files, []
|
||||
|
||||
for item in old:
|
||||
if isinstance(item,tuple) and len(item)==2:
|
||||
if isinstance(item, tuple) and len(item) == 2:
|
||||
if os.path.isabs(item[0]):
|
||||
realpath = os.path.realpath(item[0])
|
||||
normalized = os.path.normcase(realpath)
|
||||
if normalized==site_packages or normalized.startswith(
|
||||
site_packages+os.sep
|
||||
if normalized == site_packages or normalized.startswith(
|
||||
site_packages + os.sep
|
||||
):
|
||||
item = realpath[len(site_packages)+1:], item[1]
|
||||
# XXX else: raise ???
|
||||
item = realpath[len(site_packages) + 1:], item[1]
|
||||
# XXX else: raise ???
|
||||
self.distribution.data_files.append(item)
|
||||
|
||||
try:
|
||||
|
|
@ -133,11 +137,11 @@ class bdist_egg(Command):
|
|||
def get_outputs(self):
|
||||
return [self.egg_output]
|
||||
|
||||
def call_command(self,cmdname,**kw):
|
||||
def call_command(self, cmdname, **kw):
|
||||
"""Invoke reinitialized command `cmdname` with keyword args"""
|
||||
for dirname in INSTALL_DIRECTORY_ATTRS:
|
||||
kw.setdefault(dirname,self.bdist_dir)
|
||||
kw.setdefault('skip_build',self.skip_build)
|
||||
kw.setdefault(dirname, self.bdist_dir)
|
||||
kw.setdefault('skip_build', self.skip_build)
|
||||
kw.setdefault('dry_run', self.dry_run)
|
||||
cmd = self.reinitialize_command(cmdname, **kw)
|
||||
self.run_command(cmdname)
|
||||
|
|
@ -160,15 +164,16 @@ class bdist_egg(Command):
|
|||
all_outputs, ext_outputs = self.get_ext_outputs()
|
||||
self.stubs = []
|
||||
to_compile = []
|
||||
for (p,ext_name) in enumerate(ext_outputs):
|
||||
filename,ext = os.path.splitext(ext_name)
|
||||
pyfile = os.path.join(self.bdist_dir, strip_module(filename)+'.py')
|
||||
for (p, ext_name) in enumerate(ext_outputs):
|
||||
filename, ext = os.path.splitext(ext_name)
|
||||
pyfile = os.path.join(self.bdist_dir, strip_module(filename) +
|
||||
'.py')
|
||||
self.stubs.append(pyfile)
|
||||
log.info("creating stub loader for %s" % ext_name)
|
||||
if not self.dry_run:
|
||||
write_stub(os.path.basename(ext_name), pyfile)
|
||||
to_compile.append(pyfile)
|
||||
ext_outputs[p] = ext_name.replace(os.sep,'/')
|
||||
ext_outputs[p] = ext_name.replace(os.sep, '/')
|
||||
|
||||
if to_compile:
|
||||
cmd.byte_compile(to_compile)
|
||||
|
|
@ -177,12 +182,13 @@ class bdist_egg(Command):
|
|||
|
||||
# Make the EGG-INFO directory
|
||||
archive_root = self.bdist_dir
|
||||
egg_info = os.path.join(archive_root,'EGG-INFO')
|
||||
egg_info = os.path.join(archive_root, 'EGG-INFO')
|
||||
self.mkpath(egg_info)
|
||||
if self.distribution.scripts:
|
||||
script_dir = os.path.join(egg_info, 'scripts')
|
||||
log.info("installing scripts to %s" % script_dir)
|
||||
self.call_command('install_scripts',install_dir=script_dir,no_ep=1)
|
||||
self.call_command('install_scripts', install_dir=script_dir,
|
||||
no_ep=1)
|
||||
|
||||
self.copy_metadata_to(egg_info)
|
||||
native_libs = os.path.join(egg_info, "native_libs.txt")
|
||||
|
|
@ -200,10 +206,10 @@ class bdist_egg(Command):
|
|||
os.unlink(native_libs)
|
||||
|
||||
write_safety_flag(
|
||||
os.path.join(archive_root,'EGG-INFO'), self.zip_safe()
|
||||
os.path.join(archive_root, 'EGG-INFO'), self.zip_safe()
|
||||
)
|
||||
|
||||
if os.path.exists(os.path.join(self.egg_info,'depends.txt')):
|
||||
if os.path.exists(os.path.join(self.egg_info, 'depends.txt')):
|
||||
log.warn(
|
||||
"WARNING: 'depends.txt' will not be used by setuptools 0.6!\n"
|
||||
"Use the install_requires/extras_require setup() args instead."
|
||||
|
|
@ -214,25 +220,25 @@ class bdist_egg(Command):
|
|||
|
||||
# Make the archive
|
||||
make_zipfile(self.egg_output, archive_root, verbose=self.verbose,
|
||||
dry_run=self.dry_run, mode=self.gen_header())
|
||||
dry_run=self.dry_run, mode=self.gen_header())
|
||||
if not self.keep_temp:
|
||||
remove_tree(self.bdist_dir, dry_run=self.dry_run)
|
||||
|
||||
# Add to 'Distribution.dist_files' so that the "upload" command works
|
||||
getattr(self.distribution,'dist_files',[]).append(
|
||||
('bdist_egg',get_python_version(),self.egg_output))
|
||||
getattr(self.distribution, 'dist_files', []).append(
|
||||
('bdist_egg', get_python_version(), self.egg_output))
|
||||
|
||||
def zap_pyfiles(self):
|
||||
log.info("Removing .py files from temporary directory")
|
||||
for base,dirs,files in walk_egg(self.bdist_dir):
|
||||
for base, dirs, files in walk_egg(self.bdist_dir):
|
||||
for name in files:
|
||||
if name.endswith('.py'):
|
||||
path = os.path.join(base,name)
|
||||
path = os.path.join(base, name)
|
||||
log.debug("Deleting %s", path)
|
||||
os.unlink(path)
|
||||
|
||||
def zip_safe(self):
|
||||
safe = getattr(self.distribution,'zip_safe',None)
|
||||
safe = getattr(self.distribution, 'zip_safe', None)
|
||||
if safe is not None:
|
||||
return safe
|
||||
log.warn("zip_safe flag not set; analyzing archive contents...")
|
||||
|
|
@ -240,7 +246,7 @@ class bdist_egg(Command):
|
|||
|
||||
def gen_header(self):
|
||||
epm = EntryPoint.parse_map(self.distribution.entry_points or '')
|
||||
ep = epm.get('setuptools.installation',{}).get('eggsecutable')
|
||||
ep = epm.get('setuptools.installation', {}).get('eggsecutable')
|
||||
if ep is None:
|
||||
return 'w' # not an eggsecutable, do it the usual way.
|
||||
|
||||
|
|
@ -268,7 +274,6 @@ class bdist_egg(Command):
|
|||
' echo Please rename it back to %(basename)s and try again.\n'
|
||||
' exec false\n'
|
||||
'fi\n'
|
||||
|
||||
) % locals()
|
||||
|
||||
if not self.dry_run:
|
||||
|
|
@ -283,7 +288,7 @@ class bdist_egg(Command):
|
|||
# normalize the path (so that a forward-slash in egg_info will
|
||||
# match using startswith below)
|
||||
norm_egg_info = os.path.normpath(self.egg_info)
|
||||
prefix = os.path.join(norm_egg_info,'')
|
||||
prefix = os.path.join(norm_egg_info, '')
|
||||
for path in self.ei_cmd.filelist.files:
|
||||
if path.startswith(prefix):
|
||||
target = os.path.join(target_dir, path[len(prefix):])
|
||||
|
|
@ -296,23 +301,24 @@ class bdist_egg(Command):
|
|||
all_outputs = []
|
||||
ext_outputs = []
|
||||
|
||||
paths = {self.bdist_dir:''}
|
||||
paths = {self.bdist_dir: ''}
|
||||
for base, dirs, files in os.walk(self.bdist_dir):
|
||||
for filename in files:
|
||||
if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS:
|
||||
all_outputs.append(paths[base]+filename)
|
||||
all_outputs.append(paths[base] + filename)
|
||||
for filename in dirs:
|
||||
paths[os.path.join(base,filename)] = paths[base]+filename+'/'
|
||||
paths[os.path.join(base, filename)] = (paths[base] +
|
||||
filename + '/')
|
||||
|
||||
if self.distribution.has_ext_modules():
|
||||
build_cmd = self.get_finalized_command('build_ext')
|
||||
for ext in build_cmd.extensions:
|
||||
if isinstance(ext,Library):
|
||||
if isinstance(ext, Library):
|
||||
continue
|
||||
fullname = build_cmd.get_ext_fullname(ext.name)
|
||||
filename = build_cmd.get_ext_filename(fullname)
|
||||
if not os.path.basename(filename).startswith('dl-'):
|
||||
if os.path.exists(os.path.join(self.bdist_dir,filename)):
|
||||
if os.path.exists(os.path.join(self.bdist_dir, filename)):
|
||||
ext_outputs.append(filename)
|
||||
|
||||
return all_outputs, ext_outputs
|
||||
|
|
@ -324,19 +330,21 @@ NATIVE_EXTENSIONS = dict.fromkeys('.dll .so .dylib .pyd'.split())
|
|||
def walk_egg(egg_dir):
|
||||
"""Walk an unpacked egg's contents, skipping the metadata directory"""
|
||||
walker = os.walk(egg_dir)
|
||||
base,dirs,files = next(walker)
|
||||
base, dirs, files = next(walker)
|
||||
if 'EGG-INFO' in dirs:
|
||||
dirs.remove('EGG-INFO')
|
||||
yield base,dirs,files
|
||||
yield base, dirs, files
|
||||
for bdf in walker:
|
||||
yield bdf
|
||||
|
||||
|
||||
def analyze_egg(egg_dir, stubs):
|
||||
# check for existing flag in EGG-INFO
|
||||
for flag,fn in safety_flags.items():
|
||||
if os.path.exists(os.path.join(egg_dir,'EGG-INFO',fn)):
|
||||
for flag, fn in safety_flags.items():
|
||||
if os.path.exists(os.path.join(egg_dir, 'EGG-INFO', fn)):
|
||||
return flag
|
||||
if not can_scan(): return False
|
||||
if not can_scan():
|
||||
return False
|
||||
safe = True
|
||||
for base, dirs, files in walk_egg(egg_dir):
|
||||
for name in files:
|
||||
|
|
@ -347,36 +355,39 @@ def analyze_egg(egg_dir, stubs):
|
|||
safe = scan_module(egg_dir, base, name, stubs) and safe
|
||||
return safe
|
||||
|
||||
|
||||
def write_safety_flag(egg_dir, safe):
|
||||
# Write or remove zip safety flag file(s)
|
||||
for flag,fn in safety_flags.items():
|
||||
for flag, fn in safety_flags.items():
|
||||
fn = os.path.join(egg_dir, fn)
|
||||
if os.path.exists(fn):
|
||||
if safe is None or bool(safe) != flag:
|
||||
os.unlink(fn)
|
||||
elif safe is not None and bool(safe)==flag:
|
||||
f = open(fn,'wt')
|
||||
elif safe is not None and bool(safe) == flag:
|
||||
f = open(fn, 'wt')
|
||||
f.write('\n')
|
||||
f.close()
|
||||
|
||||
|
||||
safety_flags = {
|
||||
True: 'zip-safe',
|
||||
False: 'not-zip-safe',
|
||||
}
|
||||
|
||||
|
||||
def scan_module(egg_dir, base, name, stubs):
|
||||
"""Check whether module possibly uses unsafe-for-zipfile stuff"""
|
||||
|
||||
filename = os.path.join(base,name)
|
||||
filename = os.path.join(base, name)
|
||||
if filename[:-1] in stubs:
|
||||
return True # Extension module
|
||||
pkg = base[len(egg_dir)+1:].replace(os.sep,'.')
|
||||
module = pkg+(pkg and '.' or '')+os.path.splitext(name)[0]
|
||||
return True # Extension module
|
||||
pkg = base[len(egg_dir) + 1:].replace(os.sep, '.')
|
||||
module = pkg + (pkg and '.' or '') + os.path.splitext(name)[0]
|
||||
if sys.version_info < (3, 3):
|
||||
skip = 8 # skip magic & date
|
||||
skip = 8 # skip magic & date
|
||||
else:
|
||||
skip = 12 # skip magic & date & file size
|
||||
f = open(filename,'rb')
|
||||
f = open(filename, 'rb')
|
||||
f.read(skip)
|
||||
code = marshal.load(f)
|
||||
f.close()
|
||||
|
|
@ -396,21 +407,24 @@ def scan_module(egg_dir, base, name, stubs):
|
|||
log.warn("%s: module MAY be using inspect.%s", module, bad)
|
||||
safe = False
|
||||
if '__name__' in symbols and '__main__' in symbols and '.' not in module:
|
||||
if sys.version[:3]=="2.4": # -m works w/zipfiles in 2.5
|
||||
if sys.version[:3] == "2.4": # -m works w/zipfiles in 2.5
|
||||
log.warn("%s: top-level module may be 'python -m' script", module)
|
||||
safe = False
|
||||
return safe
|
||||
|
||||
|
||||
def iter_symbols(code):
|
||||
"""Yield names and strings used by `code` and its nested code objects"""
|
||||
for name in code.co_names: yield name
|
||||
for name in code.co_names:
|
||||
yield name
|
||||
for const in code.co_consts:
|
||||
if isinstance(const,basestring):
|
||||
if isinstance(const, basestring):
|
||||
yield const
|
||||
elif isinstance(const,CodeType):
|
||||
elif isinstance(const, CodeType):
|
||||
for name in iter_symbols(const):
|
||||
yield name
|
||||
|
||||
|
||||
def can_scan():
|
||||
if not sys.platform.startswith('java') and sys.platform != 'cli':
|
||||
# CPython, PyPy, etc.
|
||||
|
|
@ -426,8 +440,9 @@ INSTALL_DIRECTORY_ATTRS = [
|
|||
'install_lib', 'install_dir', 'install_data', 'install_base'
|
||||
]
|
||||
|
||||
|
||||
def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=None,
|
||||
mode='w'):
|
||||
mode='w'):
|
||||
"""Create a zip file from all the files under 'base_dir'. The output
|
||||
zip file will be named 'base_dir' + ".zip". Uses either the "zipfile"
|
||||
Python module (if available) or the InfoZIP "zip" utility (if installed
|
||||
|
|
@ -435,6 +450,7 @@ def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=None,
|
|||
raises DistutilsExecError. Returns the name of the output zip file.
|
||||
"""
|
||||
import zipfile
|
||||
|
||||
mkpath(os.path.dirname(zip_filename), dry_run=dry_run)
|
||||
log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir)
|
||||
|
||||
|
|
@ -442,13 +458,14 @@ def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=None,
|
|||
for name in names:
|
||||
path = os.path.normpath(os.path.join(dirname, name))
|
||||
if os.path.isfile(path):
|
||||
p = path[len(base_dir)+1:]
|
||||
p = path[len(base_dir) + 1:]
|
||||
if not dry_run:
|
||||
z.write(path, p)
|
||||
log.debug("adding '%s'" % p)
|
||||
|
||||
if compress is None:
|
||||
compress = (sys.version>="2.4") # avoid 2.3 zipimport bug when 64 bits
|
||||
# avoid 2.3 zipimport bug when 64 bits
|
||||
compress = (sys.version >= "2.4")
|
||||
|
||||
compression = [zipfile.ZIP_STORED, zipfile.ZIP_DEFLATED][bool(compress)]
|
||||
if not dry_run:
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import distutils.command.bdist_rpm as orig
|
||||
|
||||
|
||||
class bdist_rpm(orig.bdist_rpm):
|
||||
"""
|
||||
Override the default bdist_rpm behavior to do the following:
|
||||
|
|
@ -19,7 +20,7 @@ class bdist_rpm(orig.bdist_rpm):
|
|||
|
||||
def _make_spec_file(self):
|
||||
version = self.distribution.get_version()
|
||||
rpmversion = version.replace('-','_')
|
||||
rpmversion = version.replace('-', '_')
|
||||
spec = orig.bdist_rpm._make_spec_file(self)
|
||||
line23 = '%define version ' + version
|
||||
line24 = '%define version ' + rpmversion
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import distutils.command.bdist_wininst as orig
|
||||
|
||||
|
||||
class bdist_wininst(orig.bdist_wininst):
|
||||
def reinitialize_command(self, command, reinit_subcommands=0):
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -1,26 +1,29 @@
|
|||
from distutils.command.build_ext import build_ext as _du_build_ext
|
||||
from distutils.file_util import copy_file
|
||||
from distutils.ccompiler import new_compiler
|
||||
from distutils.sysconfig import customize_compiler
|
||||
from distutils.errors import DistutilsError
|
||||
from distutils import log
|
||||
import os
|
||||
import sys
|
||||
|
||||
from setuptools.extension import Library
|
||||
|
||||
try:
|
||||
# Attempt to use Pyrex for building extensions, if available
|
||||
from Pyrex.Distutils.build_ext import build_ext as _build_ext
|
||||
except ImportError:
|
||||
_build_ext = _du_build_ext
|
||||
|
||||
import os
|
||||
import sys
|
||||
from distutils.file_util import copy_file
|
||||
from setuptools.extension import Library
|
||||
from distutils.ccompiler import new_compiler
|
||||
from distutils.sysconfig import customize_compiler
|
||||
try:
|
||||
# Python 2.7 or >=3.2
|
||||
from sysconfig import _CONFIG_VARS
|
||||
except ImportError:
|
||||
from distutils.sysconfig import get_config_var
|
||||
|
||||
get_config_var("LDSHARED") # make sure _config_vars is initialized
|
||||
del get_config_var
|
||||
from distutils.sysconfig import _config_vars as _CONFIG_VARS
|
||||
from distutils import log
|
||||
from distutils.errors import DistutilsError
|
||||
|
||||
have_rtld = False
|
||||
use_stubs = False
|
||||
|
|
@ -31,11 +34,13 @@ if sys.platform == "darwin":
|
|||
elif os.name != 'nt':
|
||||
try:
|
||||
from dl import RTLD_NOW
|
||||
|
||||
have_rtld = True
|
||||
use_stubs = True
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
def if_dl(s):
|
||||
if have_rtld:
|
||||
return s
|
||||
|
|
@ -59,8 +64,9 @@ class build_ext(_build_ext):
|
|||
modpath = fullname.split('.')
|
||||
package = '.'.join(modpath[:-1])
|
||||
package_dir = build_py.get_package_dir(package)
|
||||
dest_filename = os.path.join(package_dir,os.path.basename(filename))
|
||||
src_filename = os.path.join(self.build_lib,filename)
|
||||
dest_filename = os.path.join(package_dir,
|
||||
os.path.basename(filename))
|
||||
src_filename = os.path.join(self.build_lib, filename)
|
||||
|
||||
# Always copy, even if source is older than destination, to ensure
|
||||
# that the right extensions for the current Python/platform are
|
||||
|
|
@ -72,7 +78,8 @@ class build_ext(_build_ext):
|
|||
if ext._needs_stub:
|
||||
self.write_stub(package_dir or os.curdir, ext, True)
|
||||
|
||||
if _build_ext is not _du_build_ext and not hasattr(_build_ext,'pyrex_sources'):
|
||||
if _build_ext is not _du_build_ext and not hasattr(_build_ext,
|
||||
'pyrex_sources'):
|
||||
# Workaround for problems using some Pyrex versions w/SWIG and/or 2.4
|
||||
def swig_sources(self, sources, *otherargs):
|
||||
# first do any Pyrex processing
|
||||
|
|
@ -81,15 +88,15 @@ class build_ext(_build_ext):
|
|||
return _du_build_ext.swig_sources(self, sources, *otherargs)
|
||||
|
||||
def get_ext_filename(self, fullname):
|
||||
filename = _build_ext.get_ext_filename(self,fullname)
|
||||
filename = _build_ext.get_ext_filename(self, fullname)
|
||||
if fullname in self.ext_map:
|
||||
ext = self.ext_map[fullname]
|
||||
if isinstance(ext,Library):
|
||||
if isinstance(ext, Library):
|
||||
fn, ext = os.path.splitext(filename)
|
||||
return self.shlib_compiler.library_filename(fn,libtype)
|
||||
return self.shlib_compiler.library_filename(fn, libtype)
|
||||
elif use_stubs and ext._links_to_dynamic:
|
||||
d,fn = os.path.split(filename)
|
||||
return os.path.join(d,'dl-'+fn)
|
||||
d, fn = os.path.split(filename)
|
||||
return os.path.join(d, 'dl-' + fn)
|
||||
return filename
|
||||
|
||||
def initialize_options(self):
|
||||
|
|
@ -103,7 +110,7 @@ class build_ext(_build_ext):
|
|||
self.extensions = self.extensions or []
|
||||
self.check_extensions_list(self.extensions)
|
||||
self.shlibs = [ext for ext in self.extensions
|
||||
if isinstance(ext, Library)]
|
||||
if isinstance(ext, Library)]
|
||||
if self.shlibs:
|
||||
self.setup_shlib_compiler()
|
||||
for ext in self.extensions:
|
||||
|
|
@ -118,9 +125,10 @@ class build_ext(_build_ext):
|
|||
|
||||
ltd = ext._links_to_dynamic = \
|
||||
self.shlibs and self.links_to_dynamic(ext) or False
|
||||
ext._needs_stub = ltd and use_stubs and not isinstance(ext,Library)
|
||||
ext._needs_stub = ltd and use_stubs and not isinstance(ext,
|
||||
Library)
|
||||
filename = ext._file_name = self.get_ext_filename(fullname)
|
||||
libdir = os.path.dirname(os.path.join(self.build_lib,filename))
|
||||
libdir = os.path.dirname(os.path.join(self.build_lib, filename))
|
||||
if ltd and libdir not in ext.library_dirs:
|
||||
ext.library_dirs.append(libdir)
|
||||
if ltd and use_stubs and os.curdir not in ext.runtime_library_dirs:
|
||||
|
|
@ -134,7 +142,8 @@ class build_ext(_build_ext):
|
|||
tmp = _CONFIG_VARS.copy()
|
||||
try:
|
||||
# XXX Help! I don't have any idea whether these are right...
|
||||
_CONFIG_VARS['LDSHARED'] = "gcc -Wl,-x -dynamiclib -undefined dynamic_lookup"
|
||||
_CONFIG_VARS['LDSHARED'] = (
|
||||
"gcc -Wl,-x -dynamiclib -undefined dynamic_lookup")
|
||||
_CONFIG_VARS['CCSHARED'] = " -dynamiclib"
|
||||
_CONFIG_VARS['SO'] = ".dylib"
|
||||
customize_compiler(compiler)
|
||||
|
|
@ -148,7 +157,7 @@ class build_ext(_build_ext):
|
|||
compiler.set_include_dirs(self.include_dirs)
|
||||
if self.define is not None:
|
||||
# 'define' option is a list of (name,value) tuples
|
||||
for (name,value) in self.define:
|
||||
for (name, value) in self.define:
|
||||
compiler.define_macro(name, value)
|
||||
if self.undef is not None:
|
||||
for macro in self.undef:
|
||||
|
|
@ -166,16 +175,16 @@ class build_ext(_build_ext):
|
|||
compiler.link_shared_object = link_shared_object.__get__(compiler)
|
||||
|
||||
def get_export_symbols(self, ext):
|
||||
if isinstance(ext,Library):
|
||||
if isinstance(ext, Library):
|
||||
return ext.export_symbols
|
||||
return _build_ext.get_export_symbols(self,ext)
|
||||
return _build_ext.get_export_symbols(self, ext)
|
||||
|
||||
def build_extension(self, ext):
|
||||
_compiler = self.compiler
|
||||
try:
|
||||
if isinstance(ext,Library):
|
||||
if isinstance(ext, Library):
|
||||
self.compiler = self.shlib_compiler
|
||||
_build_ext.build_extension(self,ext)
|
||||
_build_ext.build_extension(self, ext)
|
||||
if ext._needs_stub:
|
||||
self.write_stub(
|
||||
self.get_finalized_command('build_py').build_lib, ext
|
||||
|
|
@ -189,9 +198,10 @@ class build_ext(_build_ext):
|
|||
# XXX as dynamic, and not just using a locally-found version or a
|
||||
# XXX static-compiled version
|
||||
libnames = dict.fromkeys([lib._full_name for lib in self.shlibs])
|
||||
pkg = '.'.join(ext._full_name.split('.')[:-1]+[''])
|
||||
pkg = '.'.join(ext._full_name.split('.')[:-1] + [''])
|
||||
for libname in ext.libraries:
|
||||
if pkg+libname in libnames: return True
|
||||
if pkg + libname in libnames:
|
||||
return True
|
||||
return False
|
||||
|
||||
def get_outputs(self):
|
||||
|
|
@ -200,26 +210,29 @@ class build_ext(_build_ext):
|
|||
for ext in self.extensions:
|
||||
if ext._needs_stub:
|
||||
base = os.path.join(self.build_lib, *ext._full_name.split('.'))
|
||||
outputs.append(base+'.py')
|
||||
outputs.append(base+'.pyc')
|
||||
outputs.append(base + '.py')
|
||||
outputs.append(base + '.pyc')
|
||||
if optimize:
|
||||
outputs.append(base+'.pyo')
|
||||
outputs.append(base + '.pyo')
|
||||
return outputs
|
||||
|
||||
def write_stub(self, output_dir, ext, compile=False):
|
||||
log.info("writing stub loader for %s to %s",ext._full_name, output_dir)
|
||||
stub_file = os.path.join(output_dir, *ext._full_name.split('.'))+'.py'
|
||||
log.info("writing stub loader for %s to %s", ext._full_name,
|
||||
output_dir)
|
||||
stub_file = (os.path.join(output_dir, *ext._full_name.split('.')) +
|
||||
'.py')
|
||||
if compile and os.path.exists(stub_file):
|
||||
raise DistutilsError(stub_file+" already exists! Please delete.")
|
||||
raise DistutilsError(stub_file + " already exists! Please delete.")
|
||||
if not self.dry_run:
|
||||
f = open(stub_file,'w')
|
||||
f = open(stub_file, 'w')
|
||||
f.write(
|
||||
'\n'.join([
|
||||
"def __bootstrap__():",
|
||||
" global __bootstrap__, __file__, __loader__",
|
||||
" import sys, os, pkg_resources, imp"+if_dl(", dl"),
|
||||
" __file__ = pkg_resources.resource_filename(__name__,%r)"
|
||||
% os.path.basename(ext._file_name),
|
||||
" import sys, os, pkg_resources, imp" + if_dl(", dl"),
|
||||
" __file__ = pkg_resources.resource_filename"
|
||||
"(__name__,%r)"
|
||||
% os.path.basename(ext._file_name),
|
||||
" del __bootstrap__",
|
||||
" if '__loader__' in globals():",
|
||||
" del __loader__",
|
||||
|
|
@ -233,12 +246,13 @@ class build_ext(_build_ext):
|
|||
if_dl(" sys.setdlopenflags(old_flags)"),
|
||||
" os.chdir(old_dir)",
|
||||
"__bootstrap__()",
|
||||
"" # terminal \n
|
||||
"" # terminal \n
|
||||
])
|
||||
)
|
||||
f.close()
|
||||
if compile:
|
||||
from distutils.util import byte_compile
|
||||
|
||||
byte_compile([stub_file], optimize=0,
|
||||
force=True, dry_run=self.dry_run)
|
||||
optimize = self.get_finalized_command('install_lib').optimize
|
||||
|
|
@ -249,13 +263,14 @@ class build_ext(_build_ext):
|
|||
os.unlink(stub_file)
|
||||
|
||||
|
||||
if use_stubs or os.name=='nt':
|
||||
if use_stubs or os.name == 'nt':
|
||||
# Build shared libraries
|
||||
#
|
||||
def link_shared_object(self, objects, output_libname, output_dir=None,
|
||||
libraries=None, library_dirs=None, runtime_library_dirs=None,
|
||||
export_symbols=None, debug=0, extra_preargs=None,
|
||||
extra_postargs=None, build_temp=None, target_lang=None):
|
||||
def link_shared_object(
|
||||
self, objects, output_libname, output_dir=None, libraries=None,
|
||||
library_dirs=None, runtime_library_dirs=None, export_symbols=None,
|
||||
debug=0, extra_preargs=None, extra_postargs=None, build_temp=None,
|
||||
target_lang=None):
|
||||
self.link(
|
||||
self.SHARED_LIBRARY, objects, output_libname,
|
||||
output_dir, libraries, library_dirs, runtime_library_dirs,
|
||||
|
|
@ -266,18 +281,19 @@ else:
|
|||
# Build static libraries everywhere else
|
||||
libtype = 'static'
|
||||
|
||||
def link_shared_object(self, objects, output_libname, output_dir=None,
|
||||
libraries=None, library_dirs=None, runtime_library_dirs=None,
|
||||
export_symbols=None, debug=0, extra_preargs=None,
|
||||
extra_postargs=None, build_temp=None, target_lang=None):
|
||||
def link_shared_object(
|
||||
self, objects, output_libname, output_dir=None, libraries=None,
|
||||
library_dirs=None, runtime_library_dirs=None, export_symbols=None,
|
||||
debug=0, extra_preargs=None, extra_postargs=None, build_temp=None,
|
||||
target_lang=None):
|
||||
# XXX we need to either disallow these attrs on Library instances,
|
||||
# or warn/abort here if set, or something...
|
||||
#libraries=None, library_dirs=None, runtime_library_dirs=None,
|
||||
#export_symbols=None, extra_preargs=None, extra_postargs=None,
|
||||
#build_temp=None
|
||||
# or warn/abort here if set, or something...
|
||||
# libraries=None, library_dirs=None, runtime_library_dirs=None,
|
||||
# export_symbols=None, extra_preargs=None, extra_postargs=None,
|
||||
# build_temp=None
|
||||
|
||||
assert output_dir is None # distutils build_ext doesn't pass this
|
||||
output_dir,filename = os.path.split(output_libname)
|
||||
assert output_dir is None # distutils build_ext doesn't pass this
|
||||
output_dir, filename = os.path.split(output_libname)
|
||||
basename, ext = os.path.splitext(filename)
|
||||
if self.library_filename("x").startswith('lib'):
|
||||
# strip 'lib' prefix; this is kludgy if some platform uses
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
from glob import glob
|
||||
from distutils.util import convert_path
|
||||
import distutils.command.build_py as orig
|
||||
import os
|
||||
import sys
|
||||
import fnmatch
|
||||
import textwrap
|
||||
import distutils.command.build_py as orig
|
||||
from distutils.util import convert_path
|
||||
from glob import glob
|
||||
|
||||
try:
|
||||
from setuptools.lib2to3_ex import Mixin2to3
|
||||
|
|
@ -13,6 +13,7 @@ except ImportError:
|
|||
def run_2to3(self, files, doctests=True):
|
||||
"do nothing"
|
||||
|
||||
|
||||
class build_py(orig.build_py, Mixin2to3):
|
||||
"""Enhanced 'build_py' command that includes data files with packages
|
||||
|
||||
|
|
@ -22,11 +23,14 @@ class build_py(orig.build_py, Mixin2to3):
|
|||
Also, this version of the 'build_py' command allows you to specify both
|
||||
'py_modules' and 'packages' in the same setup operation.
|
||||
"""
|
||||
|
||||
def finalize_options(self):
|
||||
orig.build_py.finalize_options(self)
|
||||
self.package_data = self.distribution.package_data
|
||||
self.exclude_package_data = self.distribution.exclude_package_data or {}
|
||||
if 'data_files' in self.__dict__: del self.__dict__['data_files']
|
||||
self.exclude_package_data = (self.distribution.exclude_package_data or
|
||||
{})
|
||||
if 'data_files' in self.__dict__:
|
||||
del self.__dict__['data_files']
|
||||
self.__updated_files = []
|
||||
self.__doctests_2to3 = []
|
||||
|
||||
|
|
@ -51,13 +55,14 @@ class build_py(orig.build_py, Mixin2to3):
|
|||
self.byte_compile(orig.build_py.get_outputs(self, include_bytecode=0))
|
||||
|
||||
def __getattr__(self, attr):
|
||||
if attr=='data_files': # lazily compute data files
|
||||
if attr == 'data_files': # lazily compute data files
|
||||
self.data_files = files = self._get_data_files()
|
||||
return files
|
||||
return orig.build_py.__getattr__(self,attr)
|
||||
return orig.build_py.__getattr__(self, attr)
|
||||
|
||||
def build_module(self, module, module_file, package):
|
||||
outfile, copied = orig.build_py.build_module(self, module, module_file, package)
|
||||
outfile, copied = orig.build_py.build_module(self, module, module_file,
|
||||
package)
|
||||
if copied:
|
||||
self.__updated_files.append(outfile)
|
||||
return outfile, copied
|
||||
|
|
@ -74,12 +79,12 @@ class build_py(orig.build_py, Mixin2to3):
|
|||
build_dir = os.path.join(*([self.build_lib] + package.split('.')))
|
||||
|
||||
# Length of path to strip from found files
|
||||
plen = len(src_dir)+1
|
||||
plen = len(src_dir) + 1
|
||||
|
||||
# Strip directory from globbed filenames
|
||||
filenames = [
|
||||
file[plen:] for file in self.find_data_files(package, src_dir)
|
||||
]
|
||||
]
|
||||
data.append((package, src_dir, build_dir, filenames))
|
||||
return data
|
||||
|
||||
|
|
@ -102,7 +107,8 @@ class build_py(orig.build_py, Mixin2to3):
|
|||
srcfile = os.path.join(src_dir, filename)
|
||||
outf, copied = self.copy_file(srcfile, target)
|
||||
srcfile = os.path.abspath(srcfile)
|
||||
if copied and srcfile in self.distribution.convert_2to3_doctests:
|
||||
if (copied and
|
||||
srcfile in self.distribution.convert_2to3_doctests):
|
||||
self.__doctests_2to3.append(outf)
|
||||
|
||||
def analyze_manifest(self):
|
||||
|
|
@ -117,21 +123,22 @@ class build_py(orig.build_py, Mixin2to3):
|
|||
self.run_command('egg_info')
|
||||
ei_cmd = self.get_finalized_command('egg_info')
|
||||
for path in ei_cmd.filelist.files:
|
||||
d,f = os.path.split(assert_relative(path))
|
||||
d, f = os.path.split(assert_relative(path))
|
||||
prev = None
|
||||
oldf = f
|
||||
while d and d!=prev and d not in src_dirs:
|
||||
while d and d != prev and d not in src_dirs:
|
||||
prev = d
|
||||
d, df = os.path.split(d)
|
||||
f = os.path.join(df, f)
|
||||
if d in src_dirs:
|
||||
if path.endswith('.py') and f==oldf:
|
||||
continue # it's a module, not data
|
||||
mf.setdefault(src_dirs[d],[]).append(path)
|
||||
if path.endswith('.py') and f == oldf:
|
||||
continue # it's a module, not data
|
||||
mf.setdefault(src_dirs[d], []).append(path)
|
||||
|
||||
def get_data_files(self): pass # kludge 2.4 for lazy computation
|
||||
def get_data_files(self):
|
||||
pass # kludge 2.4 for lazy computation
|
||||
|
||||
if sys.version<"2.4": # Python 2.4 already has this code
|
||||
if sys.version < "2.4": # Python 2.4 already has this code
|
||||
def get_outputs(self, include_bytecode=1):
|
||||
"""Return complete list of files copied to the build directory
|
||||
|
||||
|
|
@ -142,9 +149,9 @@ class build_py(orig.build_py, Mixin2to3):
|
|||
"""
|
||||
return orig.build_py.get_outputs(self, include_bytecode) + [
|
||||
os.path.join(build_dir, filename)
|
||||
for package, src_dir, build_dir,filenames in self.data_files
|
||||
for package, src_dir, build_dir, filenames in self.data_files
|
||||
for filename in filenames
|
||||
]
|
||||
]
|
||||
|
||||
def check_package(self, package, package_dir):
|
||||
"""Check namespace packages' __init__ for declare_namespace"""
|
||||
|
|
@ -160,25 +167,26 @@ class build_py(orig.build_py, Mixin2to3):
|
|||
return init_py
|
||||
|
||||
for pkg in self.distribution.namespace_packages:
|
||||
if pkg==package or pkg.startswith(package+'.'):
|
||||
if pkg == package or pkg.startswith(package + '.'):
|
||||
break
|
||||
else:
|
||||
return init_py
|
||||
|
||||
f = open(init_py,'rbU')
|
||||
f = open(init_py, 'rbU')
|
||||
if 'declare_namespace'.encode() not in f.read():
|
||||
from distutils.errors import DistutilsError
|
||||
|
||||
raise DistutilsError(
|
||||
"Namespace package problem: %s is a namespace package, but its\n"
|
||||
"__init__.py does not call declare_namespace()! Please fix it.\n"
|
||||
'(See the setuptools manual under "Namespace Packages" for '
|
||||
"details.)\n" % (package,)
|
||||
"Namespace package problem: %s is a namespace package, but "
|
||||
"its\n__init__.py does not call declare_namespace()! Please "
|
||||
'fix it.\n(See the setuptools manual under '
|
||||
'"Namespace Packages" for details.)\n"' % (package,)
|
||||
)
|
||||
f.close()
|
||||
return init_py
|
||||
|
||||
def initialize_options(self):
|
||||
self.packages_checked={}
|
||||
self.packages_checked = {}
|
||||
orig.build_py.initialize_options(self)
|
||||
|
||||
def get_package_dir(self, package):
|
||||
|
|
@ -202,7 +210,7 @@ class build_py(orig.build_py, Mixin2to3):
|
|||
seen = {}
|
||||
return [
|
||||
f for f in files if f not in bad
|
||||
and f not in seen and seen.setdefault(f,1) # ditch dupes
|
||||
and f not in seen and seen.setdefault(f, 1) # ditch dupes
|
||||
]
|
||||
|
||||
|
||||
|
|
@ -210,6 +218,7 @@ def assert_relative(path):
|
|||
if not os.path.isabs(path):
|
||||
return path
|
||||
from distutils.errors import DistutilsSetupError
|
||||
|
||||
msg = textwrap.dedent("""
|
||||
Error: setup script specifies an absolute path:
|
||||
|
||||
|
|
|
|||
|
|
@ -1,9 +1,14 @@
|
|||
from setuptools.command.easy_install import easy_install
|
||||
from distutils.util import convert_path, subst_vars
|
||||
from pkg_resources import Distribution, PathMetadata, normalize_path
|
||||
from distutils.util import convert_path
|
||||
from distutils import log
|
||||
from distutils.errors import DistutilsError, DistutilsOptionError
|
||||
import os, sys, setuptools, glob
|
||||
import os
|
||||
import glob
|
||||
|
||||
from pkg_resources import Distribution, PathMetadata, normalize_path
|
||||
from setuptools.command.easy_install import easy_install
|
||||
from setuptools.compat import PY3
|
||||
import setuptools
|
||||
|
||||
|
||||
class develop(easy_install):
|
||||
"""Set up package for development"""
|
||||
|
|
@ -32,59 +37,56 @@ class develop(easy_install):
|
|||
self.egg_path = None
|
||||
easy_install.initialize_options(self)
|
||||
self.setup_path = None
|
||||
self.always_copy_from = '.' # always copy eggs installed in curdir
|
||||
|
||||
|
||||
self.always_copy_from = '.' # always copy eggs installed in curdir
|
||||
|
||||
def finalize_options(self):
|
||||
ei = self.get_finalized_command("egg_info")
|
||||
if ei.broken_egg_info:
|
||||
raise DistutilsError(
|
||||
"Please rename %r to %r before using 'develop'"
|
||||
% (ei.egg_info, ei.broken_egg_info)
|
||||
)
|
||||
template = "Please rename %r to %r before using 'develop'"
|
||||
args = ei.egg_info, ei.broken_egg_info
|
||||
raise DistutilsError(template % args)
|
||||
self.args = [ei.egg_name]
|
||||
|
||||
|
||||
|
||||
|
||||
easy_install.finalize_options(self)
|
||||
self.expand_basedirs()
|
||||
self.expand_dirs()
|
||||
# pick up setup-dir .egg files only: no .egg-info
|
||||
self.package_index.scan(glob.glob('*.egg'))
|
||||
|
||||
self.egg_link = os.path.join(self.install_dir, ei.egg_name+'.egg-link')
|
||||
self.egg_link = os.path.join(self.install_dir, ei.egg_name +
|
||||
'.egg-link')
|
||||
self.egg_base = ei.egg_base
|
||||
if self.egg_path is None:
|
||||
self.egg_path = os.path.abspath(ei.egg_base)
|
||||
|
||||
target = normalize_path(self.egg_base)
|
||||
if normalize_path(os.path.join(self.install_dir, self.egg_path)) != target:
|
||||
egg_path = normalize_path(os.path.join(self.install_dir,
|
||||
self.egg_path))
|
||||
if egg_path != target:
|
||||
raise DistutilsOptionError(
|
||||
"--egg-path must be a relative path from the install"
|
||||
" directory to "+target
|
||||
)
|
||||
" directory to " + target
|
||||
)
|
||||
|
||||
# Make a distribution for the package's source
|
||||
self.dist = Distribution(
|
||||
target,
|
||||
PathMetadata(target, os.path.abspath(ei.egg_info)),
|
||||
project_name = ei.egg_name
|
||||
project_name=ei.egg_name
|
||||
)
|
||||
|
||||
p = self.egg_base.replace(os.sep,'/')
|
||||
if p!= os.curdir:
|
||||
p = '../' * (p.count('/')+1)
|
||||
p = self.egg_base.replace(os.sep, '/')
|
||||
if p != os.curdir:
|
||||
p = '../' * (p.count('/') + 1)
|
||||
self.setup_path = p
|
||||
p = normalize_path(os.path.join(self.install_dir, self.egg_path, p))
|
||||
if p != normalize_path(os.curdir):
|
||||
if p != normalize_path(os.curdir):
|
||||
raise DistutilsOptionError(
|
||||
"Can't get a consistent path to setup script from"
|
||||
" installation directory", p, normalize_path(os.curdir))
|
||||
|
||||
def install_for_development(self):
|
||||
if sys.version_info >= (3,) and getattr(self.distribution, 'use_2to3', False):
|
||||
if PY3 and getattr(self.distribution, 'use_2to3', False):
|
||||
# If we run 2to3 we can not do this inplace:
|
||||
|
||||
# Ensure metadata is up-to-date
|
||||
|
|
@ -99,12 +101,13 @@ class develop(easy_install):
|
|||
|
||||
self.reinitialize_command('build_ext', inplace=0)
|
||||
self.run_command('build_ext')
|
||||
|
||||
|
||||
# Fixup egg-link and easy-install.pth
|
||||
ei_cmd = self.get_finalized_command("egg_info")
|
||||
self.egg_path = build_path
|
||||
self.dist.location = build_path
|
||||
self.dist._provider = PathMetadata(build_path, ei_cmd.egg_info) # XXX
|
||||
# XXX
|
||||
self.dist._provider = PathMetadata(build_path, ei_cmd.egg_info)
|
||||
else:
|
||||
# Without 2to3 inplace works fine:
|
||||
self.run_command('egg_info')
|
||||
|
|
@ -112,7 +115,7 @@ class develop(easy_install):
|
|||
# Build extensions in-place
|
||||
self.reinitialize_command('build_ext', inplace=1)
|
||||
self.run_command('build_ext')
|
||||
|
||||
|
||||
self.install_site_py() # ensure that target dir is site-safe
|
||||
if setuptools.bootstrap_install_from:
|
||||
self.easy_install(setuptools.bootstrap_install_from)
|
||||
|
|
@ -121,21 +124,21 @@ class develop(easy_install):
|
|||
# create an .egg-link in the installation dir, pointing to our egg
|
||||
log.info("Creating %s (link to %s)", self.egg_link, self.egg_base)
|
||||
if not self.dry_run:
|
||||
f = open(self.egg_link,"w")
|
||||
f = open(self.egg_link, "w")
|
||||
f.write(self.egg_path + "\n" + self.setup_path)
|
||||
f.close()
|
||||
# postprocess the installed distro, fixing up .pth, installing scripts,
|
||||
# and handling requirements
|
||||
self.process_distribution(None, self.dist, not self.no_deps)
|
||||
|
||||
|
||||
def uninstall_link(self):
|
||||
if os.path.exists(self.egg_link):
|
||||
log.info("Removing %s (link to %s)", self.egg_link, self.egg_base)
|
||||
egg_link_file = open(self.egg_link)
|
||||
contents = [line.rstrip() for line in egg_link_file]
|
||||
egg_link_file.close()
|
||||
if contents not in ([self.egg_path], [self.egg_path, self.setup_path]):
|
||||
if contents not in ([self.egg_path],
|
||||
[self.egg_path, self.setup_path]):
|
||||
log.warn("Link points to %s: uninstall aborted", contents)
|
||||
return
|
||||
if not self.dry_run:
|
||||
|
|
@ -149,7 +152,7 @@ class develop(easy_install):
|
|||
def install_egg_scripts(self, dist):
|
||||
if dist is not self.dist:
|
||||
# Installing a dependency, so fall back to normal behavior
|
||||
return easy_install.install_egg_scripts(self,dist)
|
||||
return easy_install.install_egg_scripts(self, dist)
|
||||
|
||||
# create wrapper scripts in the script dir, pointing to dist.scripts
|
||||
|
||||
|
|
@ -160,8 +163,7 @@ class develop(easy_install):
|
|||
for script_name in self.distribution.scripts or []:
|
||||
script_path = os.path.abspath(convert_path(script_name))
|
||||
script_name = os.path.basename(script_path)
|
||||
f = open(script_path,'rU')
|
||||
f = open(script_path, 'rU')
|
||||
script_text = f.read()
|
||||
f.close()
|
||||
self.install_script(dist, script_name, script_text, script_path)
|
||||
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -2,21 +2,23 @@
|
|||
|
||||
Create a distribution's .egg-info directory and contents"""
|
||||
|
||||
from distutils.filelist import FileList as _FileList
|
||||
from distutils.util import convert_path
|
||||
from distutils import log
|
||||
import distutils.errors
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
from setuptools import Command
|
||||
import distutils.errors
|
||||
from distutils import log
|
||||
from setuptools.command.sdist import sdist
|
||||
from setuptools.compat import basestring
|
||||
from setuptools.compat import basestring, PY3, StringIO
|
||||
from setuptools import svn_utils
|
||||
from distutils.util import convert_path
|
||||
from distutils.filelist import FileList as _FileList
|
||||
from pkg_resources import (parse_requirements, safe_name, parse_version,
|
||||
safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename)
|
||||
from setuptools.command.sdist import walk_revctrl
|
||||
from pkg_resources import (
|
||||
parse_requirements, safe_name, parse_version,
|
||||
safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename)
|
||||
import setuptools.unicode_utils as unicode_utils
|
||||
|
||||
|
||||
class egg_info(Command):
|
||||
|
|
@ -26,11 +28,11 @@ class egg_info(Command):
|
|||
('egg-base=', 'e', "directory containing .egg-info directories"
|
||||
" (default: top of the source tree)"),
|
||||
('tag-svn-revision', 'r',
|
||||
"Add subversion revision ID to version number"),
|
||||
"Add subversion revision ID to version number"),
|
||||
('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"),
|
||||
('tag-build=', 'b', "Specify explicit tag to add to version number"),
|
||||
('no-svn-revision', 'R',
|
||||
"Don't add subversion revision ID [default]"),
|
||||
"Don't add subversion revision ID [default]"),
|
||||
('no-date', 'D', "Don't include date stamp [default]"),
|
||||
]
|
||||
|
||||
|
|
@ -51,6 +53,7 @@ class egg_info(Command):
|
|||
|
||||
def save_version_info(self, filename):
|
||||
from setuptools.command.setopt import edit_config
|
||||
|
||||
values = dict(
|
||||
egg_info=dict(
|
||||
tag_svn_revision=0,
|
||||
|
|
@ -67,23 +70,25 @@ class egg_info(Command):
|
|||
|
||||
try:
|
||||
list(
|
||||
parse_requirements('%s==%s' % (self.egg_name,self.egg_version))
|
||||
parse_requirements('%s==%s' % (self.egg_name,
|
||||
self.egg_version))
|
||||
)
|
||||
except ValueError:
|
||||
raise distutils.errors.DistutilsOptionError(
|
||||
"Invalid distribution name or version syntax: %s-%s" %
|
||||
(self.egg_name,self.egg_version)
|
||||
(self.egg_name, self.egg_version)
|
||||
)
|
||||
|
||||
if self.egg_base is None:
|
||||
dirs = self.distribution.package_dir
|
||||
self.egg_base = (dirs or {}).get('',os.curdir)
|
||||
self.egg_base = (dirs or {}).get('', os.curdir)
|
||||
|
||||
self.ensure_dirname('egg_base')
|
||||
self.egg_info = to_filename(self.egg_name)+'.egg-info'
|
||||
self.egg_info = to_filename(self.egg_name) + '.egg-info'
|
||||
if self.egg_base != os.curdir:
|
||||
self.egg_info = os.path.join(self.egg_base, self.egg_info)
|
||||
if '-' in self.egg_name: self.check_broken_egg_info()
|
||||
if '-' in self.egg_name:
|
||||
self.check_broken_egg_info()
|
||||
|
||||
# Set package version for the benefit of dumber commands
|
||||
# (e.g. sdist, bdist_wininst, etc.)
|
||||
|
|
@ -95,7 +100,7 @@ class egg_info(Command):
|
|||
# to the version info
|
||||
#
|
||||
pd = self.distribution._patched_dist
|
||||
if pd is not None and pd.key==self.egg_name.lower():
|
||||
if pd is not None and pd.key == self.egg_name.lower():
|
||||
pd._version = self.egg_version
|
||||
pd._parsed_version = parse_version(self.egg_version)
|
||||
self.distribution._patched_dist = None
|
||||
|
|
@ -127,7 +132,7 @@ class egg_info(Command):
|
|||
to the file.
|
||||
"""
|
||||
log.info("writing %s to %s", what, filename)
|
||||
if sys.version_info >= (3,):
|
||||
if PY3:
|
||||
data = data.encode("utf-8")
|
||||
if not self.dry_run:
|
||||
f = open(filename, 'wb')
|
||||
|
|
@ -153,7 +158,7 @@ class egg_info(Command):
|
|||
installer = self.distribution.fetch_build_egg
|
||||
for ep in iter_entry_points('egg_info.writers'):
|
||||
writer = ep.load(installer=installer)
|
||||
writer(self, ep.name, os.path.join(self.egg_info,ep.name))
|
||||
writer(self, ep.name, os.path.join(self.egg_info, ep.name))
|
||||
|
||||
# Get rid of native_libs.txt if it was put there by older bdist_egg
|
||||
nl = os.path.join(self.egg_info, "native_libs.txt")
|
||||
|
|
@ -165,12 +170,14 @@ class egg_info(Command):
|
|||
def tags(self):
|
||||
version = ''
|
||||
if self.tag_build:
|
||||
version+=self.tag_build
|
||||
if self.tag_svn_revision and (
|
||||
os.path.exists('.svn') or os.path.exists('PKG-INFO')
|
||||
): version += '-r%s' % self.get_svn_revision()
|
||||
version += self.tag_build
|
||||
if self.tag_svn_revision:
|
||||
rev = self.get_svn_revision()
|
||||
if rev: # is 0 if it's not an svn working copy
|
||||
version += '-r%s' % rev
|
||||
if self.tag_date:
|
||||
import time
|
||||
|
||||
version += time.strftime("-%Y%m%d")
|
||||
return version
|
||||
|
||||
|
|
@ -180,53 +187,77 @@ class egg_info(Command):
|
|||
|
||||
def find_sources(self):
|
||||
"""Generate SOURCES.txt manifest file"""
|
||||
manifest_filename = os.path.join(self.egg_info,"SOURCES.txt")
|
||||
manifest_filename = os.path.join(self.egg_info, "SOURCES.txt")
|
||||
mm = manifest_maker(self.distribution)
|
||||
mm.manifest = manifest_filename
|
||||
mm.run()
|
||||
self.filelist = mm.filelist
|
||||
|
||||
def check_broken_egg_info(self):
|
||||
bei = self.egg_name+'.egg-info'
|
||||
bei = self.egg_name + '.egg-info'
|
||||
if self.egg_base != os.curdir:
|
||||
bei = os.path.join(self.egg_base, bei)
|
||||
if os.path.exists(bei):
|
||||
log.warn(
|
||||
"-"*78+'\n'
|
||||
"-" * 78 + '\n'
|
||||
"Note: Your current .egg-info directory has a '-' in its name;"
|
||||
'\nthis will not work correctly with "setup.py develop".\n\n'
|
||||
'Please rename %s to %s to correct this problem.\n'+'-'*78,
|
||||
'Please rename %s to %s to correct this problem.\n' + '-' * 78,
|
||||
bei, self.egg_info
|
||||
)
|
||||
self.broken_egg_info = self.egg_info
|
||||
self.egg_info = bei # make it work for now
|
||||
self.egg_info = bei # make it work for now
|
||||
|
||||
|
||||
class FileList(_FileList):
|
||||
"""File list that accepts only existing, platform-independent paths"""
|
||||
|
||||
def append(self, item):
|
||||
if item.endswith('\r'): # Fix older sdists built on Windows
|
||||
if item.endswith('\r'): # Fix older sdists built on Windows
|
||||
item = item[:-1]
|
||||
path = convert_path(item)
|
||||
|
||||
if sys.version_info >= (3,):
|
||||
try:
|
||||
if os.path.exists(path) or os.path.exists(path.encode('utf-8')):
|
||||
self.files.append(path)
|
||||
except UnicodeEncodeError:
|
||||
# Accept UTF-8 filenames even if LANG=C
|
||||
if os.path.exists(path.encode('utf-8')):
|
||||
self.files.append(path)
|
||||
else:
|
||||
log.warn("'%s' not %s encodable -- skipping", path,
|
||||
sys.getfilesystemencoding())
|
||||
else:
|
||||
if os.path.exists(path):
|
||||
self.files.append(path)
|
||||
if self._safe_path(path):
|
||||
self.files.append(path)
|
||||
|
||||
def extend(self, paths):
|
||||
self.files.extend(filter(self._safe_path, paths))
|
||||
|
||||
def _repair(self):
|
||||
"""
|
||||
Replace self.files with only safe paths
|
||||
|
||||
Because some owners of FileList manipulate the underlying
|
||||
``files`` attribute directly, this method must be called to
|
||||
repair those paths.
|
||||
"""
|
||||
self.files = list(filter(self._safe_path, self.files))
|
||||
|
||||
def _safe_path(self, path):
|
||||
enc_warn = "'%s' not %s encodable -- skipping"
|
||||
|
||||
# To avoid accidental trans-codings errors, first to unicode
|
||||
u_path = unicode_utils.filesys_decode(path)
|
||||
if u_path is None:
|
||||
log.warn("'%s' in unexpected encoding -- skipping" % path)
|
||||
return False
|
||||
|
||||
# Must ensure utf-8 encodability
|
||||
utf8_path = unicode_utils.try_encode(u_path, "utf-8")
|
||||
if utf8_path is None:
|
||||
log.warn(enc_warn, path, 'utf-8')
|
||||
return False
|
||||
|
||||
try:
|
||||
# accept is either way checks out
|
||||
if os.path.exists(u_path) or os.path.exists(utf8_path):
|
||||
return True
|
||||
# this will catch any encode errors decoding u_path
|
||||
except UnicodeEncodeError:
|
||||
log.warn(enc_warn, path, sys.getfilesystemencoding())
|
||||
|
||||
|
||||
class manifest_maker(sdist):
|
||||
|
||||
template = "MANIFEST.in"
|
||||
|
||||
def initialize_options(self):
|
||||
|
|
@ -241,7 +272,7 @@ class manifest_maker(sdist):
|
|||
def run(self):
|
||||
self.filelist = FileList()
|
||||
if not os.path.exists(self.manifest):
|
||||
self.write_manifest() # it must exist so it'll get in the list
|
||||
self.write_manifest() # it must exist so it'll get in the list
|
||||
self.filelist.findall()
|
||||
self.add_defaults()
|
||||
if os.path.exists(self.template):
|
||||
|
|
@ -251,30 +282,23 @@ class manifest_maker(sdist):
|
|||
self.filelist.remove_duplicates()
|
||||
self.write_manifest()
|
||||
|
||||
def _manifest_normalize(self, path):
|
||||
path = unicode_utils.filesys_decode(path)
|
||||
return path.replace(os.sep, '/')
|
||||
|
||||
def write_manifest(self):
|
||||
"""Write the file list in 'self.filelist' (presumably as filled in
|
||||
by 'add_defaults()' and 'read_template()') to the manifest file
|
||||
"""
|
||||
Write the file list in 'self.filelist' to the manifest file
|
||||
named by 'self.manifest'.
|
||||
"""
|
||||
# The manifest must be UTF-8 encodable. See #303.
|
||||
if sys.version_info >= (3,):
|
||||
files = []
|
||||
for file in self.filelist.files:
|
||||
try:
|
||||
file.encode("utf-8")
|
||||
except UnicodeEncodeError:
|
||||
log.warn("'%s' not UTF-8 encodable -- skipping" % file)
|
||||
else:
|
||||
files.append(file)
|
||||
self.filelist.files = files
|
||||
self.filelist._repair()
|
||||
|
||||
files = self.filelist.files
|
||||
if os.sep!='/':
|
||||
files = [f.replace(os.sep,'/') for f in files]
|
||||
self.execute(write_file, (self.manifest, files),
|
||||
"writing manifest file '%s'" % self.manifest)
|
||||
# Now _repairs should encodability, but not unicode
|
||||
files = [self._manifest_normalize(f) for f in self.filelist.files]
|
||||
msg = "writing manifest file '%s'" % self.manifest
|
||||
self.execute(write_file, (self.manifest, files), msg)
|
||||
|
||||
def warn(self, msg): # suppress missing-file warnings from sdist
|
||||
def warn(self, msg): # suppress missing-file warnings from sdist
|
||||
if not msg.startswith("standard file not found:"):
|
||||
sdist.warn(self, msg)
|
||||
|
||||
|
|
@ -296,7 +320,8 @@ class manifest_maker(sdist):
|
|||
self.filelist.exclude_pattern(None, prefix=build.build_base)
|
||||
self.filelist.exclude_pattern(None, prefix=base_dir)
|
||||
sep = re.escape(os.sep)
|
||||
self.filelist.exclude_pattern(sep+r'(RCS|CVS|\.svn)'+sep, is_regex=1)
|
||||
self.filelist.exclude_pattern(r'(^|' + sep + r')(RCS|CVS|\.svn)' + sep,
|
||||
is_regex=1)
|
||||
|
||||
|
||||
def write_file(filename, contents):
|
||||
|
|
@ -304,11 +329,13 @@ def write_file(filename, contents):
|
|||
sequence of strings without line terminators) to it.
|
||||
"""
|
||||
contents = "\n".join(contents)
|
||||
if sys.version_info >= (3,):
|
||||
contents = contents.encode("utf-8")
|
||||
f = open(filename, "wb") # always write POSIX-style manifest
|
||||
f.write(contents)
|
||||
f.close()
|
||||
|
||||
# assuming the contents has been vetted for utf-8 encoding
|
||||
contents = contents.encode("utf-8")
|
||||
|
||||
with open(filename, "wb") as f: # always write POSIX-style manifest
|
||||
f.write(contents)
|
||||
|
||||
|
||||
def write_pkg_info(cmd, basename, filename):
|
||||
log.info("writing %s", filename)
|
||||
|
|
@ -323,10 +350,12 @@ def write_pkg_info(cmd, basename, filename):
|
|||
finally:
|
||||
metadata.name, metadata.version = oldname, oldver
|
||||
|
||||
safe = getattr(cmd.distribution,'zip_safe',None)
|
||||
safe = getattr(cmd.distribution, 'zip_safe', None)
|
||||
from setuptools.command import bdist_egg
|
||||
|
||||
bdist_egg.write_safety_flag(cmd.egg_info, safe)
|
||||
|
||||
|
||||
def warn_depends_obsolete(cmd, basename, filename):
|
||||
if os.path.exists(filename):
|
||||
log.warn(
|
||||
|
|
@ -335,55 +364,69 @@ def warn_depends_obsolete(cmd, basename, filename):
|
|||
)
|
||||
|
||||
|
||||
def _write_requirements(stream, reqs):
|
||||
lines = yield_lines(reqs or ())
|
||||
append_cr = lambda line: line + '\n'
|
||||
lines = map(append_cr, lines)
|
||||
stream.writelines(lines)
|
||||
|
||||
|
||||
def write_requirements(cmd, basename, filename):
|
||||
dist = cmd.distribution
|
||||
data = ['\n'.join(yield_lines(dist.install_requires or ()))]
|
||||
for extra,reqs in (dist.extras_require or {}).items():
|
||||
data.append('\n\n[%s]\n%s' % (extra, '\n'.join(yield_lines(reqs))))
|
||||
cmd.write_or_delete_file("requirements", filename, ''.join(data))
|
||||
data = StringIO()
|
||||
_write_requirements(data, dist.install_requires)
|
||||
extras_require = dist.extras_require or {}
|
||||
for extra in sorted(extras_require):
|
||||
data.write('\n[{extra}]\n'.format(**vars()))
|
||||
_write_requirements(data, extras_require[extra])
|
||||
cmd.write_or_delete_file("requirements", filename, data.getvalue())
|
||||
|
||||
|
||||
def write_toplevel_names(cmd, basename, filename):
|
||||
pkgs = dict.fromkeys(
|
||||
[
|
||||
k.split('.',1)[0]
|
||||
k.split('.', 1)[0]
|
||||
for k in cmd.distribution.iter_distribution_names()
|
||||
]
|
||||
)
|
||||
cmd.write_file("top-level names", filename, '\n'.join(pkgs)+'\n')
|
||||
cmd.write_file("top-level names", filename, '\n'.join(pkgs) + '\n')
|
||||
|
||||
|
||||
def overwrite_arg(cmd, basename, filename):
|
||||
write_arg(cmd, basename, filename, True)
|
||||
|
||||
|
||||
def write_arg(cmd, basename, filename, force=False):
|
||||
argname = os.path.splitext(basename)[0]
|
||||
value = getattr(cmd.distribution, argname, None)
|
||||
if value is not None:
|
||||
value = '\n'.join(value)+'\n'
|
||||
value = '\n'.join(value) + '\n'
|
||||
cmd.write_or_delete_file(argname, filename, value, force)
|
||||
|
||||
|
||||
def write_entries(cmd, basename, filename):
|
||||
ep = cmd.distribution.entry_points
|
||||
|
||||
if isinstance(ep,basestring) or ep is None:
|
||||
if isinstance(ep, basestring) or ep is None:
|
||||
data = ep
|
||||
elif ep is not None:
|
||||
data = []
|
||||
for section, contents in sorted(ep.items()):
|
||||
if not isinstance(contents,basestring):
|
||||
if not isinstance(contents, basestring):
|
||||
contents = EntryPoint.parse_group(section, contents)
|
||||
contents = '\n'.join(sorted(map(str,contents.values())))
|
||||
data.append('[%s]\n%s\n\n' % (section,contents))
|
||||
contents = '\n'.join(sorted(map(str, contents.values())))
|
||||
data.append('[%s]\n%s\n\n' % (section, contents))
|
||||
data = ''.join(data)
|
||||
|
||||
cmd.write_or_delete_file('entry points', filename, data, True)
|
||||
|
||||
|
||||
def get_pkg_info_revision():
|
||||
# See if we can get a -r### off of PKG-INFO, in case this is an sdist of
|
||||
# a subversion revision
|
||||
#
|
||||
if os.path.exists('PKG-INFO'):
|
||||
f = open('PKG-INFO','rU')
|
||||
f = open('PKG-INFO', 'rU')
|
||||
for line in f:
|
||||
match = re.match(r"Version:.*-r(\d+)\s*$", line)
|
||||
if match:
|
||||
|
|
|
|||
|
|
@ -1,22 +1,24 @@
|
|||
import setuptools
|
||||
from distutils.errors import DistutilsArgError
|
||||
import inspect
|
||||
import glob
|
||||
import warnings
|
||||
import platform
|
||||
import distutils.command.install as orig
|
||||
from distutils.errors import DistutilsArgError
|
||||
|
||||
import setuptools
|
||||
|
||||
# Prior to numpy 1.9, NumPy relies on the '_install' name, so provide it for
|
||||
# now. See https://bitbucket.org/pypa/setuptools/issue/199/
|
||||
# now. See https://bitbucket.org/pypa/setuptools/issue/199/
|
||||
_install = orig.install
|
||||
|
||||
|
||||
class install(orig.install):
|
||||
"""Use easy_install to install the package, w/dependencies"""
|
||||
|
||||
user_options = orig.install.user_options + [
|
||||
('old-and-unmanageable', None, "Try not to use this!"),
|
||||
('single-version-externally-managed', None,
|
||||
"used by system package builders to create 'flat' eggs"),
|
||||
"used by system package builders to create 'flat' eggs"),
|
||||
]
|
||||
boolean_options = orig.install.boolean_options + [
|
||||
'old-and-unmanageable', 'single-version-externally-managed',
|
||||
|
|
@ -115,7 +117,9 @@ class install(orig.install):
|
|||
cmd.run()
|
||||
setuptools.bootstrap_install_from = None
|
||||
|
||||
|
||||
# XXX Python 3.1 doesn't see _nc if this is inside the class
|
||||
install.sub_commands = [
|
||||
cmd for cmd in orig.install.sub_commands if cmd[0] not in install._nc
|
||||
] + install.new_commands
|
||||
install.sub_commands = (
|
||||
[cmd for cmd in orig.install.sub_commands if cmd[0] not in install._nc] +
|
||||
install.new_commands
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,10 @@
|
|||
from distutils import log, dir_util
|
||||
import os
|
||||
|
||||
from setuptools import Command
|
||||
from setuptools.archive_util import unpack_archive
|
||||
from distutils import log, dir_util
|
||||
import os, pkg_resources
|
||||
import pkg_resources
|
||||
|
||||
|
||||
class install_egg_info(Command):
|
||||
"""Install an .egg-info directory for the package"""
|
||||
|
|
@ -16,26 +19,26 @@ class install_egg_info(Command):
|
|||
self.install_dir = None
|
||||
|
||||
def finalize_options(self):
|
||||
self.set_undefined_options('install_lib',('install_dir','install_dir'))
|
||||
self.set_undefined_options('install_lib',
|
||||
('install_dir', 'install_dir'))
|
||||
ei_cmd = self.get_finalized_command("egg_info")
|
||||
basename = pkg_resources.Distribution(
|
||||
None, None, ei_cmd.egg_name, ei_cmd.egg_version
|
||||
).egg_name()+'.egg-info'
|
||||
).egg_name() + '.egg-info'
|
||||
self.source = ei_cmd.egg_info
|
||||
self.target = os.path.join(self.install_dir, basename)
|
||||
self.outputs = [self.target]
|
||||
|
||||
def run(self):
|
||||
self.run_command('egg_info')
|
||||
target = self.target
|
||||
if os.path.isdir(self.target) and not os.path.islink(self.target):
|
||||
dir_util.remove_tree(self.target, dry_run=self.dry_run)
|
||||
elif os.path.exists(self.target):
|
||||
self.execute(os.unlink,(self.target,),"Removing "+self.target)
|
||||
self.execute(os.unlink, (self.target,), "Removing " + self.target)
|
||||
if not self.dry_run:
|
||||
pkg_resources.ensure_directory(self.target)
|
||||
self.execute(self.copytree, (),
|
||||
"Copying %s to %s" % (self.source, self.target)
|
||||
self.execute(
|
||||
self.copytree, (), "Copying %s to %s" % (self.source, self.target)
|
||||
)
|
||||
self.install_namespaces()
|
||||
|
||||
|
|
@ -44,82 +47,70 @@ class install_egg_info(Command):
|
|||
|
||||
def copytree(self):
|
||||
# Copy the .egg-info tree to site-packages
|
||||
def skimmer(src,dst):
|
||||
def skimmer(src, dst):
|
||||
# filter out source-control directories; note that 'src' is always
|
||||
# a '/'-separated path, regardless of platform. 'dst' is a
|
||||
# platform-specific path.
|
||||
for skip in '.svn/','CVS/':
|
||||
if src.startswith(skip) or '/'+skip in src:
|
||||
for skip in '.svn/', 'CVS/':
|
||||
if src.startswith(skip) or '/' + skip in src:
|
||||
return None
|
||||
self.outputs.append(dst)
|
||||
log.debug("Copying %s to %s", src, dst)
|
||||
return dst
|
||||
|
||||
unpack_archive(self.source, self.target, skimmer)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def install_namespaces(self):
|
||||
nsp = self._get_all_ns_packages()
|
||||
if not nsp: return
|
||||
filename,ext = os.path.splitext(self.target)
|
||||
filename += '-nspkg.pth'; self.outputs.append(filename)
|
||||
log.info("Installing %s",filename)
|
||||
if not self.dry_run:
|
||||
f = open(filename,'wt')
|
||||
for pkg in nsp:
|
||||
# ensure pkg is not a unicode string under Python 2.7
|
||||
pkg = str(pkg)
|
||||
pth = tuple(pkg.split('.'))
|
||||
trailer = '\n'
|
||||
if '.' in pkg:
|
||||
trailer = (
|
||||
"; m and setattr(sys.modules[%r], %r, m)\n"
|
||||
% ('.'.join(pth[:-1]), pth[-1])
|
||||
)
|
||||
f.write(
|
||||
"import sys,types,os; "
|
||||
"p = os.path.join(sys._getframe(1).f_locals['sitedir'], "
|
||||
"*%(pth)r); "
|
||||
"ie = os.path.exists(os.path.join(p,'__init__.py')); "
|
||||
"m = not ie and "
|
||||
"sys.modules.setdefault(%(pkg)r,types.ModuleType(%(pkg)r)); "
|
||||
"mp = (m or []) and m.__dict__.setdefault('__path__',[]); "
|
||||
"(p not in mp) and mp.append(p)%(trailer)s"
|
||||
% locals()
|
||||
)
|
||||
f.close()
|
||||
if not nsp:
|
||||
return
|
||||
filename, ext = os.path.splitext(self.target)
|
||||
filename += '-nspkg.pth'
|
||||
self.outputs.append(filename)
|
||||
log.info("Installing %s", filename)
|
||||
lines = map(self._gen_nspkg_line, nsp)
|
||||
|
||||
if self.dry_run:
|
||||
# always generate the lines, even in dry run
|
||||
list(lines)
|
||||
return
|
||||
|
||||
with open(filename, 'wt') as f:
|
||||
f.writelines(lines)
|
||||
|
||||
_nspkg_tmpl = (
|
||||
"import sys, types, os",
|
||||
"p = os.path.join(sys._getframe(1).f_locals['sitedir'], *%(pth)r)",
|
||||
"ie = os.path.exists(os.path.join(p,'__init__.py'))",
|
||||
"m = not ie and "
|
||||
"sys.modules.setdefault(%(pkg)r, types.ModuleType(%(pkg)r))",
|
||||
"mp = (m or []) and m.__dict__.setdefault('__path__',[])",
|
||||
"(p not in mp) and mp.append(p)",
|
||||
)
|
||||
"lines for the namespace installer"
|
||||
|
||||
_nspkg_tmpl_multi = (
|
||||
'm and setattr(sys.modules[%(parent)r], %(child)r, m)',
|
||||
)
|
||||
"additional line(s) when a parent package is indicated"
|
||||
|
||||
@classmethod
|
||||
def _gen_nspkg_line(cls, pkg):
|
||||
# ensure pkg is not a unicode string under Python 2.7
|
||||
pkg = str(pkg)
|
||||
pth = tuple(pkg.split('.'))
|
||||
tmpl_lines = cls._nspkg_tmpl
|
||||
parent, sep, child = pkg.rpartition('.')
|
||||
if parent:
|
||||
tmpl_lines += cls._nspkg_tmpl_multi
|
||||
return ';'.join(tmpl_lines) % locals() + '\n'
|
||||
|
||||
def _get_all_ns_packages(self):
|
||||
nsp = {}
|
||||
"""Return sorted list of all package namespaces"""
|
||||
nsp = set()
|
||||
for pkg in self.distribution.namespace_packages or []:
|
||||
pkg = pkg.split('.')
|
||||
while pkg:
|
||||
nsp['.'.join(pkg)] = 1
|
||||
nsp.add('.'.join(pkg))
|
||||
pkg.pop()
|
||||
nsp=list(nsp)
|
||||
nsp.sort() # set up shorter names first
|
||||
return nsp
|
||||
|
||||
|
||||
return sorted(nsp)
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import distutils.command.install_lib as orig
|
||||
import os
|
||||
|
||||
|
||||
class install_lib(orig.install_lib):
|
||||
"""Don't add compiled flags to filenames of non-Python files"""
|
||||
|
||||
|
|
@ -15,20 +16,20 @@ class install_lib(orig.install_lib):
|
|||
exclude = {}
|
||||
nsp = self.distribution.namespace_packages
|
||||
svem = (nsp and self.get_finalized_command('install')
|
||||
.single_version_externally_managed)
|
||||
.single_version_externally_managed)
|
||||
if svem:
|
||||
for pkg in nsp:
|
||||
parts = pkg.split('.')
|
||||
while parts:
|
||||
pkgdir = os.path.join(self.install_dir, *parts)
|
||||
for f in '__init__.py', '__init__.pyc', '__init__.pyo':
|
||||
exclude[os.path.join(pkgdir,f)] = 1
|
||||
exclude[os.path.join(pkgdir, f)] = 1
|
||||
parts.pop()
|
||||
return exclude
|
||||
|
||||
def copy_tree(
|
||||
self, infile, outfile,
|
||||
preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1
|
||||
self, infile, outfile,
|
||||
preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1
|
||||
):
|
||||
assert preserve_mode and preserve_times and not preserve_symlinks
|
||||
exclude = self.get_exclusions()
|
||||
|
|
@ -45,7 +46,8 @@ class install_lib(orig.install_lib):
|
|||
|
||||
def pf(src, dst):
|
||||
if dst in exclude:
|
||||
log.warn("Skipping installation of %s (namespace package)",dst)
|
||||
log.warn("Skipping installation of %s (namespace package)",
|
||||
dst)
|
||||
return False
|
||||
|
||||
log.info("copying %s -> %s", src, os.path.dirname(dst))
|
||||
|
|
|
|||
|
|
@ -1,7 +1,9 @@
|
|||
import distutils.command.install_scripts as orig
|
||||
from pkg_resources import Distribution, PathMetadata, ensure_directory
|
||||
import os
|
||||
from distutils import log
|
||||
import distutils.command.install_scripts as orig
|
||||
import os
|
||||
|
||||
from pkg_resources import Distribution, PathMetadata, ensure_directory
|
||||
|
||||
|
||||
class install_scripts(orig.install_scripts):
|
||||
"""Do normal script install, plus any egg_info wrapper scripts"""
|
||||
|
|
@ -29,7 +31,7 @@ class install_scripts(orig.install_scripts):
|
|||
ei_cmd.egg_name, ei_cmd.egg_version,
|
||||
)
|
||||
bs_cmd = self.get_finalized_command('build_scripts')
|
||||
executable = getattr(bs_cmd,'executable',sys_executable)
|
||||
executable = getattr(bs_cmd, 'executable', sys_executable)
|
||||
is_wininst = getattr(
|
||||
self.get_finalized_command("bdist_wininst"), '_is_running', False
|
||||
)
|
||||
|
|
@ -39,6 +41,7 @@ class install_scripts(orig.install_scripts):
|
|||
def write_script(self, script_name, contents, mode="t", *ignored):
|
||||
"""Write an executable file to the scripts directory"""
|
||||
from setuptools.command.easy_install import chmod, current_umask
|
||||
|
||||
log.info("Installing %s script to %s", script_name, self.install_dir)
|
||||
target = os.path.join(self.install_dir, script_name)
|
||||
self.outfiles.append(target)
|
||||
|
|
@ -46,7 +49,7 @@ class install_scripts(orig.install_scripts):
|
|||
mask = current_umask()
|
||||
if not self.dry_run:
|
||||
ensure_directory(target)
|
||||
f = open(target,"w"+mode)
|
||||
f = open(target, "w" + mode)
|
||||
f.write(contents)
|
||||
f.close()
|
||||
chmod(target, 0x1FF-mask) # 0777
|
||||
chmod(target, 0o777 - mask)
|
||||
|
|
|
|||
|
|
@ -1,15 +1,15 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
|
||||
<assemblyIdentity version="1.0.0.0"
|
||||
processorArchitecture="X86"
|
||||
name="%(name)s"
|
||||
type="win32"/>
|
||||
<assemblyIdentity version="1.0.0.0"
|
||||
processorArchitecture="X86"
|
||||
name="%(name)s"
|
||||
type="win32"/>
|
||||
<!-- Identify the application security requirements. -->
|
||||
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
|
||||
<security>
|
||||
<requestedPrivileges>
|
||||
<requestedExecutionLevel level="asInvoker" uiAccess="false"/>
|
||||
</requestedPrivileges>
|
||||
</security>
|
||||
<security>
|
||||
<requestedPrivileges>
|
||||
<requestedExecutionLevel level="asInvoker" uiAccess="false"/>
|
||||
</requestedPrivileges>
|
||||
</security>
|
||||
</trustInfo>
|
||||
</assembly>
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import distutils.command.register as orig
|
||||
|
||||
|
||||
class register(orig.register):
|
||||
__doc__ = orig.register.__doc__
|
||||
|
||||
|
|
|
|||
|
|
@ -1,18 +1,20 @@
|
|||
import os
|
||||
from setuptools import Command
|
||||
from setuptools.compat import basestring
|
||||
from distutils.util import convert_path
|
||||
from distutils import log
|
||||
from distutils.errors import DistutilsOptionError
|
||||
import os
|
||||
|
||||
from setuptools import Command
|
||||
from setuptools.compat import basestring
|
||||
|
||||
|
||||
class rotate(Command):
|
||||
"""Delete older distributions"""
|
||||
|
||||
description = "delete older distributions, keeping N newest files"
|
||||
user_options = [
|
||||
('match=', 'm', "patterns to match (required)"),
|
||||
('match=', 'm', "patterns to match (required)"),
|
||||
('dist-dir=', 'd', "directory where the distributions are"),
|
||||
('keep=', 'k', "number of matching distributions to keep"),
|
||||
('keep=', 'k', "number of matching distributions to keep"),
|
||||
]
|
||||
|
||||
boolean_options = []
|
||||
|
|
@ -38,21 +40,22 @@ class rotate(Command):
|
|||
self.match = [
|
||||
convert_path(p.strip()) for p in self.match.split(',')
|
||||
]
|
||||
self.set_undefined_options('bdist',('dist_dir', 'dist_dir'))
|
||||
self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
|
||||
|
||||
def run(self):
|
||||
self.run_command("egg_info")
|
||||
from glob import glob
|
||||
|
||||
for pattern in self.match:
|
||||
pattern = self.distribution.get_name()+'*'+pattern
|
||||
files = glob(os.path.join(self.dist_dir,pattern))
|
||||
files = [(os.path.getmtime(f),f) for f in files]
|
||||
pattern = self.distribution.get_name() + '*' + pattern
|
||||
files = glob(os.path.join(self.dist_dir, pattern))
|
||||
files = [(os.path.getmtime(f), f) for f in files]
|
||||
files.sort()
|
||||
files.reverse()
|
||||
|
||||
log.info("%d file(s) matching %s", len(files), pattern)
|
||||
files = files[self.keep:]
|
||||
for (t,f) in files:
|
||||
for (t, f) in files:
|
||||
log.info("Deleting %s", f)
|
||||
if not self.dry_run:
|
||||
os.unlink(f)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
import distutils, os
|
||||
from setuptools import Command
|
||||
from setuptools.command.setopt import edit_config, option_base
|
||||
|
||||
|
||||
class saveopts(option_base):
|
||||
"""Save command-line options to a file"""
|
||||
|
||||
|
|
@ -13,12 +12,11 @@ class saveopts(option_base):
|
|||
|
||||
for cmd in dist.command_options:
|
||||
|
||||
if cmd=='saveopts':
|
||||
continue # don't save our own options!
|
||||
if cmd == 'saveopts':
|
||||
continue # don't save our own options!
|
||||
|
||||
for opt,(src,val) in dist.get_option_dict(cmd).items():
|
||||
if src=="command line":
|
||||
settings.setdefault(cmd,{})[opt] = val
|
||||
for opt, (src, val) in dist.get_option_dict(cmd).items():
|
||||
if src == "command line":
|
||||
settings.setdefault(cmd, {})[opt] = val
|
||||
|
||||
edit_config(self.filename, settings, self.dry_run)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,13 +1,14 @@
|
|||
from glob import glob
|
||||
from distutils.util import convert_path
|
||||
from distutils import log
|
||||
import distutils.command.sdist as orig
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from glob import glob
|
||||
|
||||
import pkg_resources
|
||||
import distutils.command.sdist as orig
|
||||
from distutils.util import convert_path
|
||||
from distutils import log
|
||||
from setuptools import svn_utils
|
||||
from setuptools.compat import PY3
|
||||
import pkg_resources
|
||||
|
||||
READMES = ('README', 'README.rst', 'README.txt')
|
||||
|
||||
|
|
@ -19,7 +20,7 @@ def walk_revctrl(dirname=''):
|
|||
yield item
|
||||
|
||||
|
||||
#TODO will need test case
|
||||
# TODO will need test case
|
||||
class re_finder(object):
|
||||
"""
|
||||
Finder that locates files based on entries in a file matched by a
|
||||
|
|
@ -32,7 +33,7 @@ class re_finder(object):
|
|||
self.entries_path = convert_path(path)
|
||||
|
||||
def _finder(self, dirname, filename):
|
||||
f = open(filename,'rU')
|
||||
f = open(filename, 'rU')
|
||||
try:
|
||||
data = f.read()
|
||||
finally:
|
||||
|
|
@ -50,12 +51,13 @@ class re_finder(object):
|
|||
if not os.path.isfile(path):
|
||||
# entries file doesn't exist
|
||||
return
|
||||
for path in self._finder(dirname,path):
|
||||
for path in self._finder(dirname, path):
|
||||
if os.path.isfile(path):
|
||||
yield path
|
||||
elif os.path.isdir(path):
|
||||
for item in self.find(path):
|
||||
yield item
|
||||
|
||||
__call__ = find
|
||||
|
||||
|
||||
|
|
@ -84,7 +86,7 @@ class sdist(orig.sdist):
|
|||
('dist-dir=', 'd',
|
||||
"directory to put the source distribution archive(s) in "
|
||||
"[default: dist]"),
|
||||
]
|
||||
]
|
||||
|
||||
negative_opt = {}
|
||||
|
||||
|
|
@ -92,7 +94,7 @@ class sdist(orig.sdist):
|
|||
self.run_command('egg_info')
|
||||
ei_cmd = self.get_finalized_command('egg_info')
|
||||
self.filelist = ei_cmd.filelist
|
||||
self.filelist.append(os.path.join(ei_cmd.egg_info,'SOURCES.txt'))
|
||||
self.filelist.append(os.path.join(ei_cmd.egg_info, 'SOURCES.txt'))
|
||||
self.check_readme()
|
||||
|
||||
# Run sub commands
|
||||
|
|
@ -102,12 +104,13 @@ class sdist(orig.sdist):
|
|||
# Call check_metadata only if no 'check' command
|
||||
# (distutils <= 2.6)
|
||||
import distutils.command
|
||||
|
||||
if 'check' not in distutils.command.__all__:
|
||||
self.check_metadata()
|
||||
|
||||
self.make_distribution()
|
||||
|
||||
dist_files = getattr(self.distribution,'dist_files',[])
|
||||
dist_files = getattr(self.distribution, 'dist_files', [])
|
||||
for file in self.archive_files:
|
||||
data = ('sdist', '', file)
|
||||
if data not in dist_files:
|
||||
|
|
@ -123,13 +126,14 @@ class sdist(orig.sdist):
|
|||
except:
|
||||
sys.exc_info()[2].tb_next.tb_frame.f_locals['template'].close()
|
||||
raise
|
||||
|
||||
# Beginning with Python 2.7.2, 3.1.4, and 3.2.1, this leaky file handle
|
||||
# has been fixed, so only override the method if we're using an earlier
|
||||
# Python.
|
||||
has_leaky_handle = (
|
||||
sys.version_info < (2,7,2)
|
||||
or (3,0) <= sys.version_info < (3,1,4)
|
||||
or (3,2) <= sys.version_info < (3,2,1)
|
||||
sys.version_info < (2, 7, 2)
|
||||
or (3, 0) <= sys.version_info < (3, 1, 4)
|
||||
or (3, 2) <= sys.version_info < (3, 2, 1)
|
||||
)
|
||||
if has_leaky_handle:
|
||||
read_template = __read_template_hack
|
||||
|
|
@ -193,7 +197,8 @@ class sdist(orig.sdist):
|
|||
return
|
||||
else:
|
||||
self.warn(
|
||||
"standard file not found: should have one of " +', '.join(READMES)
|
||||
"standard file not found: should have one of " +
|
||||
', '.join(READMES)
|
||||
)
|
||||
|
||||
def make_release_tree(self, base_dir, files):
|
||||
|
|
@ -201,7 +206,7 @@ class sdist(orig.sdist):
|
|||
|
||||
# Save any egg_info command line options used to create this sdist
|
||||
dest = os.path.join(base_dir, 'setup.cfg')
|
||||
if hasattr(os,'link') and os.path.exists(dest):
|
||||
if hasattr(os, 'link') and os.path.exists(dest):
|
||||
# unlink and re-copy, since it might be hard-linked, and
|
||||
# we don't want to change the source version
|
||||
os.unlink(dest)
|
||||
|
|
@ -219,7 +224,8 @@ class sdist(orig.sdist):
|
|||
first_line = fp.readline()
|
||||
finally:
|
||||
fp.close()
|
||||
return first_line != '# file GENERATED by distutils, do NOT edit\n'.encode()
|
||||
return (first_line !=
|
||||
'# file GENERATED by distutils, do NOT edit\n'.encode())
|
||||
|
||||
def read_manifest(self):
|
||||
"""Read the manifest file (named by 'self.manifest') and use it to
|
||||
|
|
@ -230,7 +236,7 @@ class sdist(orig.sdist):
|
|||
manifest = open(self.manifest, 'rbU')
|
||||
for line in manifest:
|
||||
# The manifest must contain UTF-8. See #303.
|
||||
if sys.version_info >= (3,):
|
||||
if PY3:
|
||||
try:
|
||||
line = line.decode('UTF-8')
|
||||
except UnicodeDecodeError:
|
||||
|
|
|
|||
|
|
@ -1,9 +1,11 @@
|
|||
import os
|
||||
import distutils
|
||||
from setuptools import Command
|
||||
from distutils.util import convert_path
|
||||
from distutils import log
|
||||
from distutils.errors import DistutilsOptionError
|
||||
import distutils
|
||||
import os
|
||||
|
||||
from setuptools import Command
|
||||
|
||||
|
||||
__all__ = ['config_file', 'edit_config', 'option_base', 'setopt']
|
||||
|
||||
|
|
@ -13,19 +15,20 @@ def config_file(kind="local"):
|
|||
|
||||
`kind` must be one of "local", "global", or "user"
|
||||
"""
|
||||
if kind=='local':
|
||||
if kind == 'local':
|
||||
return 'setup.cfg'
|
||||
if kind=='global':
|
||||
if kind == 'global':
|
||||
return os.path.join(
|
||||
os.path.dirname(distutils.__file__),'distutils.cfg'
|
||||
os.path.dirname(distutils.__file__), 'distutils.cfg'
|
||||
)
|
||||
if kind=='user':
|
||||
dot = os.name=='posix' and '.' or ''
|
||||
if kind == 'user':
|
||||
dot = os.name == 'posix' and '.' or ''
|
||||
return os.path.expanduser(convert_path("~/%spydistutils.cfg" % dot))
|
||||
raise ValueError(
|
||||
"config_file() type must be 'local', 'global', or 'user'", kind
|
||||
)
|
||||
|
||||
|
||||
def edit_config(filename, settings, dry_run=False):
|
||||
"""Edit a configuration file to include `settings`
|
||||
|
||||
|
|
@ -35,6 +38,7 @@ def edit_config(filename, settings, dry_run=False):
|
|||
A setting of ``None`` means to delete that setting.
|
||||
"""
|
||||
from setuptools.compat import ConfigParser
|
||||
|
||||
log.debug("Reading configuration from %s", filename)
|
||||
opts = ConfigParser.RawConfigParser()
|
||||
opts.read([filename])
|
||||
|
|
@ -46,39 +50,40 @@ def edit_config(filename, settings, dry_run=False):
|
|||
if not opts.has_section(section):
|
||||
log.debug("Adding new section [%s] to %s", section, filename)
|
||||
opts.add_section(section)
|
||||
for option,value in options.items():
|
||||
for option, value in options.items():
|
||||
if value is None:
|
||||
log.debug(
|
||||
"Deleting %s.%s from %s",
|
||||
section, option, filename
|
||||
)
|
||||
opts.remove_option(section,option)
|
||||
opts.remove_option(section, option)
|
||||
if not opts.options(section):
|
||||
log.info("Deleting empty [%s] section from %s",
|
||||
section, filename)
|
||||
section, filename)
|
||||
opts.remove_section(section)
|
||||
else:
|
||||
log.debug(
|
||||
"Setting %s.%s to %r in %s",
|
||||
section, option, value, filename
|
||||
)
|
||||
opts.set(section,option,value)
|
||||
opts.set(section, option, value)
|
||||
|
||||
log.info("Writing %s", filename)
|
||||
if not dry_run:
|
||||
with open(filename, 'w') as f:
|
||||
opts.write(f)
|
||||
|
||||
|
||||
class option_base(Command):
|
||||
"""Abstract base class for commands that mess with config files"""
|
||||
|
||||
user_options = [
|
||||
('global-config', 'g',
|
||||
"save options to the site-wide distutils.cfg file"),
|
||||
"save options to the site-wide distutils.cfg file"),
|
||||
('user-config', 'u',
|
||||
"save options to the current user's pydistutils.cfg file"),
|
||||
"save options to the current user's pydistutils.cfg file"),
|
||||
('filename=', 'f',
|
||||
"configuration file to use (default=setup.cfg)"),
|
||||
"configuration file to use (default=setup.cfg)"),
|
||||
]
|
||||
|
||||
boolean_options = [
|
||||
|
|
@ -100,7 +105,7 @@ class option_base(Command):
|
|||
filenames.append(self.filename)
|
||||
if not filenames:
|
||||
filenames.append(config_file('local'))
|
||||
if len(filenames)>1:
|
||||
if len(filenames) > 1:
|
||||
raise DistutilsOptionError(
|
||||
"Must specify only one configuration file option",
|
||||
filenames
|
||||
|
|
@ -115,9 +120,9 @@ class setopt(option_base):
|
|||
|
||||
user_options = [
|
||||
('command=', 'c', 'command to set an option for'),
|
||||
('option=', 'o', 'option to set'),
|
||||
('set-value=', 's', 'value of the option'),
|
||||
('remove', 'r', 'remove (unset) the value'),
|
||||
('option=', 'o', 'option to set'),
|
||||
('set-value=', 's', 'value of the option'),
|
||||
('remove', 'r', 'remove (unset) the value'),
|
||||
] + option_base.user_options
|
||||
|
||||
boolean_options = option_base.boolean_options + ['remove']
|
||||
|
|
@ -139,7 +144,7 @@ class setopt(option_base):
|
|||
def run(self):
|
||||
edit_config(
|
||||
self.filename, {
|
||||
self.command: {self.option.replace('-','_'):self.set_value}
|
||||
self.command: {self.option.replace('-', '_'): self.set_value}
|
||||
},
|
||||
self.dry_run
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,18 +1,17 @@
|
|||
import unittest
|
||||
from unittest import TestLoader
|
||||
|
||||
from setuptools import Command
|
||||
from distutils.errors import DistutilsOptionError
|
||||
from unittest import TestLoader
|
||||
import unittest
|
||||
import sys
|
||||
from pkg_resources import (resource_listdir, resource_exists,
|
||||
normalize_path, working_set, _namespace_packages, add_activation_listener,
|
||||
require, EntryPoint)
|
||||
|
||||
from pkg_resources import (resource_listdir, resource_exists, normalize_path,
|
||||
working_set, _namespace_packages,
|
||||
add_activation_listener, require, EntryPoint)
|
||||
from setuptools import Command
|
||||
from setuptools.compat import PY3
|
||||
from setuptools.py31compat import unittest_main
|
||||
|
||||
|
||||
class ScanningLoader(TestLoader):
|
||||
|
||||
def loadTestsFromModule(self, module):
|
||||
"""Return a suite of all tests cases contained in the given module
|
||||
|
||||
|
|
@ -21,8 +20,7 @@ class ScanningLoader(TestLoader):
|
|||
the return value to the tests.
|
||||
"""
|
||||
tests = []
|
||||
if module.__name__ != 'setuptools.tests.doctest': # ugh
|
||||
tests.append(TestLoader.loadTestsFromModule(self, module))
|
||||
tests.append(TestLoader.loadTestsFromModule(self, module))
|
||||
|
||||
if hasattr(module, "additional_tests"):
|
||||
tests.append(module.additional_tests())
|
||||
|
|
@ -33,7 +31,7 @@ class ScanningLoader(TestLoader):
|
|||
submodule = module.__name__ + '.' + file[:-3]
|
||||
else:
|
||||
if resource_exists(module.__name__, file + '/__init__.py'):
|
||||
submodule = module.__name__+'.'+file
|
||||
submodule = module.__name__ + '.' + file
|
||||
else:
|
||||
continue
|
||||
tests.append(self.loadTestsFromName(submodule))
|
||||
|
|
@ -41,19 +39,18 @@ class ScanningLoader(TestLoader):
|
|||
if len(tests) != 1:
|
||||
return self.suiteClass(tests)
|
||||
else:
|
||||
return tests[0] # don't create a nested suite for only one return
|
||||
return tests[0] # don't create a nested suite for only one return
|
||||
|
||||
|
||||
class test(Command):
|
||||
|
||||
"""Command to run unit tests after in-place build"""
|
||||
|
||||
description = "run unit tests after in-place build"
|
||||
|
||||
user_options = [
|
||||
('test-module=','m', "Run 'test_suite' in specified module"),
|
||||
('test-suite=','s',
|
||||
"Test suite to run (e.g. 'some_module.test_suite')"),
|
||||
('test-module=', 'm', "Run 'test_suite' in specified module"),
|
||||
('test-suite=', 's',
|
||||
"Test suite to run (e.g. 'some_module.test_suite')"),
|
||||
('test-runner=', 'r', "Test runner to use"),
|
||||
]
|
||||
|
||||
|
|
@ -78,7 +75,7 @@ class test(Command):
|
|||
self.test_args = [self.test_suite]
|
||||
|
||||
if self.verbose:
|
||||
self.test_args.insert(0,'--verbose')
|
||||
self.test_args.insert(0, '--verbose')
|
||||
if self.test_loader is None:
|
||||
self.test_loader = getattr(self.distribution, 'test_loader', None)
|
||||
if self.test_loader is None:
|
||||
|
|
@ -87,10 +84,8 @@ class test(Command):
|
|||
self.test_runner = getattr(self.distribution, 'test_runner', None)
|
||||
|
||||
def with_project_on_sys_path(self, func):
|
||||
with_2to3 = (
|
||||
sys.version_info >= (3,)
|
||||
and getattr(self.distribution, 'use_2to3', False)
|
||||
)
|
||||
with_2to3 = PY3 and getattr(self.distribution, 'use_2to3', False)
|
||||
|
||||
if with_2to3:
|
||||
# If we run 2to3 we can not do this inplace:
|
||||
|
||||
|
|
@ -133,7 +128,8 @@ class test(Command):
|
|||
|
||||
def run(self):
|
||||
if self.distribution.install_requires:
|
||||
self.distribution.fetch_build_eggs(self.distribution.install_requires)
|
||||
self.distribution.fetch_build_eggs(
|
||||
self.distribution.install_requires)
|
||||
if self.distribution.tests_require:
|
||||
self.distribution.fetch_build_eggs(self.distribution.tests_require)
|
||||
|
||||
|
|
@ -149,7 +145,7 @@ class test(Command):
|
|||
# Purge modules under test from sys.modules. The test loader will
|
||||
# re-import them from the build location. Required when 2to3 is used
|
||||
# with namespace packages.
|
||||
if sys.version_info >= (3,) and getattr(self.distribution, 'use_2to3', False):
|
||||
if PY3 and getattr(self.distribution, 'use_2to3', False):
|
||||
module = self.test_args[-1].split('.')[0]
|
||||
if module in _namespace_packages:
|
||||
del_modules = []
|
||||
|
|
@ -162,7 +158,7 @@ class test(Command):
|
|||
list(map(sys.modules.__delitem__, del_modules))
|
||||
|
||||
unittest_main(
|
||||
None, None, [unittest.__file__]+self.test_args,
|
||||
None, None, [unittest.__file__] + self.test_args,
|
||||
testLoader=self._resolve_as_ep(self.test_loader),
|
||||
testRunner=self._resolve_as_ep(self.test_runner),
|
||||
)
|
||||
|
|
|
|||
|
|
@ -5,6 +5,10 @@ Implements a Distutils 'upload_docs' subcommand (upload documentation to
|
|||
PyPI's pythonhosted.org).
|
||||
"""
|
||||
|
||||
from base64 import standard_b64encode
|
||||
from distutils import log
|
||||
from distutils.errors import DistutilsOptionError
|
||||
from distutils.command.upload import upload
|
||||
import os
|
||||
import socket
|
||||
import zipfile
|
||||
|
|
@ -12,14 +16,9 @@ import tempfile
|
|||
import sys
|
||||
import shutil
|
||||
|
||||
from base64 import standard_b64encode
|
||||
from setuptools.compat import httplib, urlparse, unicode, iteritems, PY3
|
||||
from pkg_resources import iter_entry_points
|
||||
|
||||
from distutils import log
|
||||
from distutils.errors import DistutilsOptionError
|
||||
from distutils.command.upload import upload
|
||||
|
||||
from setuptools.compat import httplib, urlparse, unicode, iteritems, PY3
|
||||
|
||||
errors = 'surrogateescape' if PY3 else 'strict'
|
||||
|
||||
|
|
@ -33,7 +32,6 @@ def b(s, encoding='utf-8'):
|
|||
|
||||
|
||||
class upload_docs(upload):
|
||||
|
||||
description = 'Upload documentation to PyPI'
|
||||
|
||||
user_options = [
|
||||
|
|
@ -42,7 +40,7 @@ class upload_docs(upload):
|
|||
('show-response', None,
|
||||
'display full response text from server'),
|
||||
('upload-dir=', None, 'directory to upload'),
|
||||
]
|
||||
]
|
||||
boolean_options = upload.boolean_options
|
||||
|
||||
def has_sphinx(self):
|
||||
|
|
@ -159,7 +157,7 @@ class upload_docs(upload):
|
|||
elif schema == 'https':
|
||||
conn = httplib.HTTPSConnection(netloc)
|
||||
else:
|
||||
raise AssertionError("unsupported schema "+schema)
|
||||
raise AssertionError("unsupported schema " + schema)
|
||||
|
||||
data = ''
|
||||
try:
|
||||
|
|
@ -190,4 +188,4 @@ class upload_docs(upload):
|
|||
self.announce('Upload failed (%s): %s' % (r.status, r.reason),
|
||||
log.ERROR)
|
||||
if self.show_response:
|
||||
print('-'*75, r.read(), '-'*75)
|
||||
print('-' * 75, r.read(), '-' * 75)
|
||||
|
|
|
|||
|
|
@ -1,15 +1,15 @@
|
|||
import sys
|
||||
import itertools
|
||||
|
||||
if sys.version_info[0] < 3:
|
||||
PY3 = False
|
||||
PY3 = sys.version_info >= (3,)
|
||||
PY2 = not PY3
|
||||
|
||||
if PY2:
|
||||
basestring = basestring
|
||||
import __builtin__ as builtins
|
||||
import ConfigParser
|
||||
from StringIO import StringIO
|
||||
BytesIO = StringIO
|
||||
execfile = execfile
|
||||
func_code = lambda o: o.func_code
|
||||
func_globals = lambda o: o.func_globals
|
||||
im_func = lambda o: o.im_func
|
||||
|
|
@ -21,8 +21,6 @@ if sys.version_info[0] < 3:
|
|||
iteritems = lambda o: o.iteritems()
|
||||
long_type = long
|
||||
maxsize = sys.maxint
|
||||
next = lambda o: o.next()
|
||||
numeric_types = (int, long, float)
|
||||
unichr = unichr
|
||||
unicode = unicode
|
||||
bytes = str
|
||||
|
|
@ -34,9 +32,8 @@ if sys.version_info[0] < 3:
|
|||
|
||||
exec("""def reraise(tp, value, tb=None):
|
||||
raise tp, value, tb""")
|
||||
else:
|
||||
PY3 = True
|
||||
|
||||
if PY3:
|
||||
basestring = str
|
||||
import builtins
|
||||
import configparser as ConfigParser
|
||||
|
|
@ -51,8 +48,6 @@ else:
|
|||
iteritems = lambda o: o.items()
|
||||
long_type = int
|
||||
maxsize = sys.maxsize
|
||||
next = next
|
||||
numeric_types = (int, float)
|
||||
unichr = chr
|
||||
unicode = str
|
||||
bytes = bytes
|
||||
|
|
@ -65,18 +60,6 @@ else:
|
|||
)
|
||||
filterfalse = itertools.filterfalse
|
||||
|
||||
def execfile(fn, globs=None, locs=None):
|
||||
if globs is None:
|
||||
globs = globals()
|
||||
if locs is None:
|
||||
locs = globs
|
||||
f = open(fn, 'rb')
|
||||
try:
|
||||
source = f.read()
|
||||
finally:
|
||||
f.close()
|
||||
exec(compile(source, fn, 'exec'), globs, locs)
|
||||
|
||||
def reraise(tp, value, tb=None):
|
||||
if value.__traceback__ is not tb:
|
||||
raise value.with_traceback(tb)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,9 @@
|
|||
from __future__ import generators
|
||||
import sys, imp, marshal
|
||||
import sys
|
||||
import imp
|
||||
import marshal
|
||||
from imp import PKG_DIRECTORY, PY_COMPILED, PY_SOURCE, PY_FROZEN
|
||||
from distutils.version import StrictVersion, LooseVersion
|
||||
from distutils.version import StrictVersion
|
||||
from setuptools import compat
|
||||
|
||||
__all__ = [
|
||||
'Require', 'find_module', 'get_module_constant', 'extract_constant'
|
||||
|
|
@ -10,9 +12,8 @@ __all__ = [
|
|||
class Require:
|
||||
"""A prerequisite to building or installing a distribution"""
|
||||
|
||||
def __init__(self,name,requested_version,module,homepage='',
|
||||
attribute=None,format=None
|
||||
):
|
||||
def __init__(self, name, requested_version, module, homepage='',
|
||||
attribute=None, format=None):
|
||||
|
||||
if format is None and requested_version is not None:
|
||||
format = StrictVersion
|
||||
|
|
@ -25,20 +26,17 @@ class Require:
|
|||
self.__dict__.update(locals())
|
||||
del self.self
|
||||
|
||||
|
||||
def full_name(self):
|
||||
"""Return full package/distribution name, w/version"""
|
||||
if self.requested_version is not None:
|
||||
return '%s-%s' % (self.name,self.requested_version)
|
||||
return self.name
|
||||
|
||||
|
||||
def version_ok(self,version):
|
||||
def version_ok(self, version):
|
||||
"""Is 'version' sufficiently up-to-date?"""
|
||||
return self.attribute is None or self.format is None or \
|
||||
str(version) != "unknown" and version >= self.requested_version
|
||||
|
||||
|
||||
def get_version(self, paths=None, default="unknown"):
|
||||
|
||||
"""Get version number of installed module, 'None', or 'default'
|
||||
|
|
@ -59,20 +57,18 @@ class Require:
|
|||
except ImportError:
|
||||
return None
|
||||
|
||||
v = get_module_constant(self.module,self.attribute,default,paths)
|
||||
v = get_module_constant(self.module, self.attribute, default, paths)
|
||||
|
||||
if v is not None and v is not default and self.format is not None:
|
||||
return self.format(v)
|
||||
|
||||
return v
|
||||
|
||||
|
||||
def is_present(self,paths=None):
|
||||
def is_present(self, paths=None):
|
||||
"""Return true if dependency is present on 'paths'"""
|
||||
return self.get_version(paths) is not None
|
||||
|
||||
|
||||
def is_current(self,paths=None):
|
||||
def is_current(self, paths=None):
|
||||
"""Return true if dependency is present and up-to-date on 'paths'"""
|
||||
version = self.get_version(paths)
|
||||
if version is None:
|
||||
|
|
@ -103,7 +99,7 @@ def _iter_code(code):
|
|||
ptr += 3
|
||||
|
||||
if op==EXTENDED_ARG:
|
||||
extended_arg = arg * long_type(65536)
|
||||
extended_arg = arg * compat.long_type(65536)
|
||||
continue
|
||||
|
||||
else:
|
||||
|
|
@ -113,14 +109,6 @@ def _iter_code(code):
|
|||
yield op,arg
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def find_module(module, paths=None):
|
||||
"""Just like 'imp.find_module()', but with package support"""
|
||||
|
||||
|
|
@ -140,28 +128,6 @@ def find_module(module, paths=None):
|
|||
return info
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def get_module_constant(module, symbol, default=-1, paths=None):
|
||||
|
||||
"""Find 'module' by searching 'paths', and extract 'symbol'
|
||||
|
|
@ -171,7 +137,7 @@ def get_module_constant(module, symbol, default=-1, paths=None):
|
|||
constant. Otherwise, return 'default'."""
|
||||
|
||||
try:
|
||||
f, path, (suffix,mode,kind) = find_module(module,paths)
|
||||
f, path, (suffix, mode, kind) = find_module(module, paths)
|
||||
except ImportError:
|
||||
# Module doesn't exist
|
||||
return None
|
||||
|
|
@ -187,23 +153,17 @@ def get_module_constant(module, symbol, default=-1, paths=None):
|
|||
else:
|
||||
# Not something we can parse; we'll have to import it. :(
|
||||
if module not in sys.modules:
|
||||
imp.load_module(module,f,path,(suffix,mode,kind))
|
||||
return getattr(sys.modules[module],symbol,None)
|
||||
imp.load_module(module, f, path, (suffix, mode, kind))
|
||||
return getattr(sys.modules[module], symbol, None)
|
||||
|
||||
finally:
|
||||
if f:
|
||||
f.close()
|
||||
|
||||
return extract_constant(code,symbol,default)
|
||||
return extract_constant(code, symbol, default)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def extract_constant(code,symbol,default=-1):
|
||||
def extract_constant(code, symbol, default=-1):
|
||||
"""Extract the constant value of 'symbol' from 'code'
|
||||
|
||||
If the name 'symbol' is bound to a constant value by the Python code
|
||||
|
|
@ -236,11 +196,20 @@ def extract_constant(code,symbol,default=-1):
|
|||
return const
|
||||
else:
|
||||
const = default
|
||||
|
||||
if sys.platform.startswith('java') or sys.platform == 'cli':
|
||||
# XXX it'd be better to test assertions about bytecode instead...
|
||||
del extract_constant, get_module_constant
|
||||
__all__.remove('extract_constant')
|
||||
__all__.remove('get_module_constant')
|
||||
|
||||
|
||||
def _update_globals():
|
||||
"""
|
||||
Patch the globals to remove the objects not available on some platforms.
|
||||
|
||||
XXX it'd be better to test assertions about bytecode instead.
|
||||
"""
|
||||
|
||||
if not sys.platform.startswith('java') and sys.platform != 'cli':
|
||||
return
|
||||
incompatible = 'extract_constant', 'get_module_constant'
|
||||
for name in incompatible:
|
||||
del globals()[name]
|
||||
__all__.remove(name)
|
||||
|
||||
_update_globals()
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ import re
|
|||
import os
|
||||
import sys
|
||||
import warnings
|
||||
import numbers
|
||||
import distutils.log
|
||||
import distutils.core
|
||||
import distutils.cmd
|
||||
|
|
@ -13,7 +14,7 @@ from distutils.errors import (DistutilsOptionError, DistutilsPlatformError,
|
|||
DistutilsSetupError)
|
||||
|
||||
from setuptools.depends import Require
|
||||
from setuptools.compat import numeric_types, basestring
|
||||
from setuptools.compat import basestring, PY2
|
||||
import pkg_resources
|
||||
|
||||
def _get_unpatched(cls):
|
||||
|
|
@ -34,11 +35,12 @@ _Distribution = _get_unpatched(_Distribution)
|
|||
|
||||
def _patch_distribution_metadata_write_pkg_info():
|
||||
"""
|
||||
Workaround issue #197 - Python 3.1 uses an environment-local encoding to
|
||||
save the pkg_info. Monkey-patch its write_pkg_info method to correct
|
||||
this undesirable behavior.
|
||||
Workaround issue #197 - Python 3 prior to 3.2.2 uses an environment-local
|
||||
encoding to save the pkg_info. Monkey-patch its write_pkg_info method to
|
||||
correct this undesirable behavior.
|
||||
"""
|
||||
if sys.version_info[:2] != (3,1):
|
||||
environment_local = (3,) <= sys.version_info[:3] < (3, 2, 2)
|
||||
if not environment_local:
|
||||
return
|
||||
|
||||
# from Python 3.4
|
||||
|
|
@ -257,12 +259,12 @@ class Distribution(_Distribution):
|
|||
self.dependency_links = attrs.pop('dependency_links', [])
|
||||
assert_string_list(self,'dependency_links',self.dependency_links)
|
||||
if attrs and 'setup_requires' in attrs:
|
||||
self.fetch_build_eggs(attrs.pop('setup_requires'))
|
||||
self.fetch_build_eggs(attrs['setup_requires'])
|
||||
for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'):
|
||||
if not hasattr(self,ep.name):
|
||||
setattr(self,ep.name,None)
|
||||
_Distribution.__init__(self,attrs)
|
||||
if isinstance(self.metadata.version, numeric_types):
|
||||
if isinstance(self.metadata.version, numbers.Number):
|
||||
# Some people apparently take "version number" too literally :)
|
||||
self.metadata.version = str(self.metadata.version)
|
||||
|
||||
|
|
@ -279,12 +281,13 @@ class Distribution(_Distribution):
|
|||
|
||||
def fetch_build_eggs(self, requires):
|
||||
"""Resolve pre-setup requirements"""
|
||||
from pkg_resources import working_set, parse_requirements
|
||||
for dist in working_set.resolve(
|
||||
parse_requirements(requires), installer=self.fetch_build_egg,
|
||||
replace_conflicting=True
|
||||
):
|
||||
working_set.add(dist, replace=True)
|
||||
resolved_dists = pkg_resources.working_set.resolve(
|
||||
pkg_resources.parse_requirements(requires),
|
||||
installer=self.fetch_build_egg,
|
||||
replace_conflicting=True,
|
||||
)
|
||||
for dist in resolved_dists:
|
||||
pkg_resources.working_set.add(dist, replace=True)
|
||||
|
||||
def finalize_options(self):
|
||||
_Distribution.finalize_options(self)
|
||||
|
|
@ -629,7 +632,7 @@ class Distribution(_Distribution):
|
|||
"""
|
||||
import sys
|
||||
|
||||
if sys.version_info < (3,) or self.help_commands:
|
||||
if PY2 or self.help_commands:
|
||||
return _Distribution.handle_display_options(self, option_order)
|
||||
|
||||
# Stdout may be StringIO (e.g. in tests)
|
||||
|
|
|
|||
|
|
@ -632,16 +632,15 @@ class PackageIndex(Environment):
|
|||
shutil.copy2(filename, dst)
|
||||
filename=dst
|
||||
|
||||
file = open(os.path.join(tmpdir, 'setup.py'), 'w')
|
||||
file.write(
|
||||
"from setuptools import setup\n"
|
||||
"setup(name=%r, version=%r, py_modules=[%r])\n"
|
||||
% (
|
||||
dists[0].project_name, dists[0].version,
|
||||
os.path.splitext(basename)[0]
|
||||
with open(os.path.join(tmpdir, 'setup.py'), 'w') as file:
|
||||
file.write(
|
||||
"from setuptools import setup\n"
|
||||
"setup(name=%r, version=%r, py_modules=[%r])\n"
|
||||
% (
|
||||
dists[0].project_name, dists[0].version,
|
||||
os.path.splitext(basename)[0]
|
||||
)
|
||||
)
|
||||
)
|
||||
file.close()
|
||||
return filename
|
||||
|
||||
elif match:
|
||||
|
|
@ -660,7 +659,7 @@ class PackageIndex(Environment):
|
|||
def _download_to(self, url, filename):
|
||||
self.info("Downloading %s", url)
|
||||
# Download the file
|
||||
fp, tfp, info = None, None, None
|
||||
fp, info = None, None
|
||||
try:
|
||||
checker = HashChecker.from_url(url)
|
||||
fp = self.open_url(strip_fragment(url))
|
||||
|
|
@ -677,21 +676,20 @@ class PackageIndex(Environment):
|
|||
sizes = get_all_headers(headers, 'Content-Length')
|
||||
size = max(map(int, sizes))
|
||||
self.reporthook(url, filename, blocknum, bs, size)
|
||||
tfp = open(filename,'wb')
|
||||
while True:
|
||||
block = fp.read(bs)
|
||||
if block:
|
||||
checker.feed(block)
|
||||
tfp.write(block)
|
||||
blocknum += 1
|
||||
self.reporthook(url, filename, blocknum, bs, size)
|
||||
else:
|
||||
break
|
||||
self.check_hash(checker, filename, tfp)
|
||||
with open(filename,'wb') as tfp:
|
||||
while True:
|
||||
block = fp.read(bs)
|
||||
if block:
|
||||
checker.feed(block)
|
||||
tfp.write(block)
|
||||
blocknum += 1
|
||||
self.reporthook(url, filename, blocknum, bs, size)
|
||||
else:
|
||||
break
|
||||
self.check_hash(checker, filename, tfp)
|
||||
return headers
|
||||
finally:
|
||||
if fp: fp.close()
|
||||
if tfp: tfp.close()
|
||||
|
||||
def reporthook(self, url, filename, blocknum, blksize, size):
|
||||
pass # no-op
|
||||
|
|
@ -1040,9 +1038,8 @@ def local_open(url):
|
|||
files = []
|
||||
for f in os.listdir(filename):
|
||||
if f=='index.html':
|
||||
fp = open(os.path.join(filename,f),'r')
|
||||
body = fp.read()
|
||||
fp.close()
|
||||
with open(os.path.join(filename,f),'r') as fp:
|
||||
body = fp.read()
|
||||
break
|
||||
elif os.path.isdir(os.path.join(filename,f)):
|
||||
f+='/'
|
||||
|
|
|
|||
|
|
@ -20,12 +20,28 @@ _open = open
|
|||
from distutils.errors import DistutilsError
|
||||
from pkg_resources import working_set
|
||||
|
||||
from setuptools.compat import builtins, execfile
|
||||
from setuptools.compat import builtins
|
||||
|
||||
__all__ = [
|
||||
"AbstractSandbox", "DirectorySandbox", "SandboxViolation", "run_setup",
|
||||
]
|
||||
|
||||
def _execfile(filename, globals, locals=None):
|
||||
"""
|
||||
Python 3 implementation of execfile.
|
||||
"""
|
||||
mode = 'rb'
|
||||
# Python 2.6 compile requires LF for newlines, so use deprecated
|
||||
# Universal newlines support.
|
||||
if sys.version_info < (2, 7):
|
||||
mode += 'U'
|
||||
with open(filename, mode) as stream:
|
||||
script = stream.read()
|
||||
if locals is None:
|
||||
locals = globals
|
||||
code = compile(script, filename, 'exec')
|
||||
exec(code, globals, locals)
|
||||
|
||||
def run_setup(setup_script, args):
|
||||
"""Run a distutils setup script, sandboxed in its directory"""
|
||||
old_dir = os.getcwd()
|
||||
|
|
@ -46,12 +62,10 @@ def run_setup(setup_script, args):
|
|||
# reset to include setup dir, w/clean callback list
|
||||
working_set.__init__()
|
||||
working_set.callbacks.append(lambda dist:dist.activate())
|
||||
DirectorySandbox(setup_dir).run(
|
||||
lambda: execfile(
|
||||
"setup.py",
|
||||
{'__file__':setup_script, '__name__':'__main__'}
|
||||
)
|
||||
)
|
||||
def runner():
|
||||
ns = dict(__file__=setup_script, __name__='__main__')
|
||||
_execfile(setup_script, ns)
|
||||
DirectorySandbox(setup_dir).run(runner)
|
||||
except SystemExit:
|
||||
v = sys.exc_info()[1]
|
||||
if v.args and v.args[0]:
|
||||
|
|
@ -268,7 +282,7 @@ class DirectorySandbox(AbstractSandbox):
|
|||
self._violation(operation, src, dst, *args, **kw)
|
||||
return (src,dst)
|
||||
|
||||
def open(self, file, flags, mode=0x1FF, *args, **kw): # 0777
|
||||
def open(self, file, flags, mode=0o777, *args, **kw):
|
||||
"""Called for low-level os.open()"""
|
||||
if flags & WRITE_FLAGS and not self._ok(file):
|
||||
self._violation("os.open", file, flags, mode, *args, **kw)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,5 @@
|
|||
# EASY-INSTALL-DEV-SCRIPT: %(spec)r,%(script_name)r
|
||||
__requires__ = %(spec)r
|
||||
__import__('pkg_resources').require(%(spec)r)
|
||||
__file__ = %(dev_path)r
|
||||
exec(compile(open(__file__).read(), __file__, 'exec'))
|
||||
|
|
@ -1,11 +0,0 @@
|
|||
# EASY-INSTALL-DEV-SCRIPT: %(spec)r,%(script_name)r
|
||||
__requires__ = """%(spec)r"""
|
||||
import sys
|
||||
from pkg_resources import require
|
||||
require("""%(spec)r""")
|
||||
del require
|
||||
__file__ = """%(dev_path)r"""
|
||||
if sys.version_info < (3, 0):
|
||||
execfile(__file__)
|
||||
else:
|
||||
exec(compile(open(__file__).read(), __file__, 'exec'))
|
||||
|
|
@ -1,4 +0,0 @@
|
|||
# EASY-INSTALL-SCRIPT: %(spec)r,%(script_name)r
|
||||
__requires__ = """%(spec)r"""
|
||||
import pkg_resources
|
||||
pkg_resources.run_script("""%(spec)r""", """%(script_name)r""")
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
# EASY-INSTALL-SCRIPT: %(spec)r,%(script_name)r
|
||||
__requires__ = %(spec)r
|
||||
__import__('pkg_resources').run_script(%(spec)r, %(script_name)r)
|
||||
|
|
@ -178,12 +178,19 @@ class VerifyingHTTPSConn(HTTPSConnection):
|
|||
if hasattr(self, '_tunnel') and getattr(self, '_tunnel_host', None):
|
||||
self.sock = sock
|
||||
self._tunnel()
|
||||
# http://bugs.python.org/issue7776: Python>=3.4.1 and >=2.7.7
|
||||
# change self.host to mean the proxy server host when tunneling is
|
||||
# being used. Adapt, since we are interested in the destination
|
||||
# host for the match_hostname() comparison.
|
||||
actual_host = self._tunnel_host
|
||||
else:
|
||||
actual_host = self.host
|
||||
|
||||
self.sock = ssl.wrap_socket(
|
||||
sock, cert_reqs=ssl.CERT_REQUIRED, ca_certs=self.ca_bundle
|
||||
)
|
||||
try:
|
||||
match_hostname(self.sock.getpeercert(), self.host)
|
||||
match_hostname(self.sock.getpeercert(), actual_host)
|
||||
except CertificateError:
|
||||
self.sock.shutdown(socket.SHUT_RDWR)
|
||||
self.sock.close()
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ import locale
|
|||
import codecs
|
||||
import unicodedata
|
||||
import warnings
|
||||
from setuptools.compat import unicode
|
||||
from setuptools.compat import unicode, PY2
|
||||
from setuptools.py31compat import TemporaryDirectory
|
||||
from xml.sax.saxutils import unescape
|
||||
|
||||
|
|
@ -60,7 +60,7 @@ def _get_target_property(target):
|
|||
|
||||
|
||||
def _get_xml_data(decoded_str):
|
||||
if sys.version_info < (3, 0):
|
||||
if PY2:
|
||||
#old versions want an encoded string
|
||||
data = decoded_str.encode('utf-8')
|
||||
else:
|
||||
|
|
@ -180,12 +180,12 @@ def parse_external_prop(lines):
|
|||
if not line:
|
||||
continue
|
||||
|
||||
if sys.version_info < (3, 0):
|
||||
if PY2:
|
||||
#shlex handles NULLs just fine and shlex in 2.7 tries to encode
|
||||
#as ascii automatiically
|
||||
line = line.encode('utf-8')
|
||||
line = shlex.split(line)
|
||||
if sys.version_info < (3, 0):
|
||||
if PY2:
|
||||
line = [x.decode('utf-8') for x in line]
|
||||
|
||||
#EXT_FOLDERNAME is either the first or last depending on where
|
||||
|
|
@ -232,13 +232,13 @@ class SvnInfo(object):
|
|||
@staticmethod
|
||||
def get_svn_version():
|
||||
# Temp config directory should be enough to check for repository
|
||||
# This is needed because .svn always creates .subversion and
|
||||
# This is needed because .svn always creates .subversion and
|
||||
# some operating systems do not handle dot directory correctly.
|
||||
# Real queries in real svn repos with be concerned with it creation
|
||||
with TemporaryDirectory() as tempdir:
|
||||
code, data = _run_command(['svn',
|
||||
code, data = _run_command(['svn',
|
||||
'--config-dir', tempdir,
|
||||
'--version',
|
||||
'--version',
|
||||
'--quiet'])
|
||||
|
||||
if code == 0 and data:
|
||||
|
|
@ -258,11 +258,11 @@ class SvnInfo(object):
|
|||
normdir = os.path.normpath(dirname)
|
||||
|
||||
# Temp config directory should be enough to check for repository
|
||||
# This is needed because .svn always creates .subversion and
|
||||
# This is needed because .svn always creates .subversion and
|
||||
# some operating systems do not handle dot directory correctly.
|
||||
# Real queries in real svn repos with be concerned with it creation
|
||||
with TemporaryDirectory() as tempdir:
|
||||
code, data = _run_command(['svn',
|
||||
code, data = _run_command(['svn',
|
||||
'--config-dir', tempdir,
|
||||
'info', normdir])
|
||||
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
import sys
|
||||
import os
|
||||
import unittest
|
||||
from setuptools.tests import doctest
|
||||
import doctest
|
||||
import distutils.core
|
||||
import distutils.cmd
|
||||
from distutils.errors import DistutilsOptionError, DistutilsPlatformError
|
||||
|
|
@ -18,7 +18,6 @@ from setuptools import Feature
|
|||
from setuptools.depends import Require
|
||||
|
||||
def additional_tests():
|
||||
import doctest, unittest
|
||||
suite = unittest.TestSuite((
|
||||
doctest.DocFileSuite(
|
||||
os.path.join('tests', 'api_tests.txt'),
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -9,8 +9,6 @@ import unittest
|
|||
|
||||
from distutils.errors import DistutilsError
|
||||
from setuptools.command.develop import develop
|
||||
from setuptools.command import easy_install as easy_install_pkg
|
||||
from setuptools.compat import StringIO
|
||||
from setuptools.dist import Distribution
|
||||
|
||||
SETUP_PY = """\
|
||||
|
|
@ -114,11 +112,11 @@ class TestDevelopTest(unittest.TestCase):
|
|||
os.chdir(self.dir)
|
||||
try:
|
||||
try:
|
||||
dist = Distribution({'setup_requires': ['I_DONT_EXIST']})
|
||||
Distribution({'setup_requires': ['I_DONT_EXIST']})
|
||||
except DistutilsError:
|
||||
e = sys.exc_info()[1]
|
||||
error = str(e)
|
||||
if error == wanted:
|
||||
if error == wanted:
|
||||
pass
|
||||
finally:
|
||||
os.chdir(old_dir)
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ import tarfile
|
|||
import logging
|
||||
import distutils.core
|
||||
|
||||
from setuptools.compat import StringIO, BytesIO, next, urlparse
|
||||
from setuptools.compat import StringIO, BytesIO, urlparse
|
||||
from setuptools.sandbox import run_setup, SandboxViolation
|
||||
from setuptools.command.easy_install import (
|
||||
easy_install, fix_jython_executable, get_script_args, nt_quote_arg)
|
||||
|
|
@ -226,9 +226,6 @@ class TestUserInstallTest(unittest.TestCase):
|
|||
else:
|
||||
del os.environ['PYTHONPATH']
|
||||
|
||||
@skipIf(sys.version_info < (3, 4),
|
||||
"Test fails on Python 3.3 and earlier due to bug in inspect but only "
|
||||
"when run under setup.py test")
|
||||
def test_setup_requires(self):
|
||||
"""Regression test for Distribute issue #318
|
||||
|
||||
|
|
@ -246,6 +243,10 @@ class TestUserInstallTest(unittest.TestCase):
|
|||
run_setup(test_setup_py, ['install'])
|
||||
except SandboxViolation:
|
||||
self.fail('Installation caused SandboxViolation')
|
||||
except IndexError:
|
||||
# Test fails in some cases due to bugs in Python
|
||||
# See https://bitbucket.org/pypa/setuptools/issue/201
|
||||
pass
|
||||
|
||||
|
||||
class TestSetupRequires(unittest.TestCase):
|
||||
|
|
|
|||
|
|
@ -138,6 +138,43 @@ class TestSvnDummy(environment.ZippedEnvironment):
|
|||
|
||||
return data
|
||||
|
||||
@skipIf(not test_svn._svn_check, "No SVN to text, in the first place")
|
||||
def test_svn_tags(self):
|
||||
code, data = environment.run_setup_py(["egg_info",
|
||||
"--tag-svn-revision"],
|
||||
pypath=self.old_cwd,
|
||||
data_stream=1)
|
||||
if code:
|
||||
raise AssertionError(data)
|
||||
|
||||
pkginfo = os.path.join('dummy.egg-info', 'PKG-INFO')
|
||||
infile = open(pkginfo, 'r')
|
||||
try:
|
||||
read_contents = infile.readlines()
|
||||
finally:
|
||||
infile.close()
|
||||
del infile
|
||||
|
||||
self.assertTrue("Version: 0.1.1-r1\n" in read_contents)
|
||||
|
||||
@skipIf(not test_svn._svn_check, "No SVN to text, in the first place")
|
||||
def test_no_tags(self):
|
||||
code, data = environment.run_setup_py(["egg_info"],
|
||||
pypath=self.old_cwd,
|
||||
data_stream=1)
|
||||
if code:
|
||||
raise AssertionError(data)
|
||||
|
||||
pkginfo = os.path.join('dummy.egg-info', 'PKG-INFO')
|
||||
infile = open(pkginfo, 'r')
|
||||
try:
|
||||
read_contents = infile.readlines()
|
||||
finally:
|
||||
infile.close()
|
||||
del infile
|
||||
|
||||
self.assertTrue("Version: 0.1.1\n" in read_contents)
|
||||
|
||||
|
||||
class TestSvnDummyLegacy(environment.ZippedEnvironment):
|
||||
|
||||
|
|
|
|||
|
|
@ -12,12 +12,26 @@ from setuptools.tests.py26compat import skipIf
|
|||
|
||||
find_420_packages = setuptools.PEP420PackageFinder.find
|
||||
|
||||
# modeled after CPython's test.support.can_symlink
|
||||
def can_symlink():
|
||||
TESTFN = tempfile.mktemp()
|
||||
symlink_path = TESTFN + "can_symlink"
|
||||
try:
|
||||
os.symlink(TESTFN, symlink_path)
|
||||
can = True
|
||||
except (OSError, NotImplementedError, AttributeError):
|
||||
can = False
|
||||
else:
|
||||
os.remove(symlink_path)
|
||||
globals().update(can_symlink=lambda: can)
|
||||
return can
|
||||
|
||||
def has_symlink():
|
||||
bad_symlink = (
|
||||
# Windows symlink directory detection is broken on Python 3.2
|
||||
platform.system() == 'Windows' and sys.version_info[:2] == (3,2)
|
||||
)
|
||||
return hasattr(os, 'symlink') and not bad_symlink
|
||||
return can_symlink() and not bad_symlink
|
||||
|
||||
class TestFindPackages(unittest.TestCase):
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,83 @@
|
|||
"""Run some integration tests.
|
||||
|
||||
Try to install a few packages.
|
||||
"""
|
||||
|
||||
import glob
|
||||
import os
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
from setuptools.command.easy_install import easy_install
|
||||
from setuptools.command import easy_install as easy_install_pkg
|
||||
from setuptools.dist import Distribution
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def install_context(request, tmpdir, monkeypatch):
|
||||
"""Fixture to set up temporary installation directory.
|
||||
"""
|
||||
# Save old values so we can restore them.
|
||||
new_cwd = tmpdir.mkdir('cwd')
|
||||
user_base = tmpdir.mkdir('user_base')
|
||||
user_site = tmpdir.mkdir('user_site')
|
||||
install_dir = tmpdir.mkdir('install_dir')
|
||||
|
||||
def fin():
|
||||
# undo the monkeypatch, particularly needed under
|
||||
# windows because of kept handle on cwd
|
||||
monkeypatch.undo()
|
||||
new_cwd.remove()
|
||||
user_base.remove()
|
||||
user_site.remove()
|
||||
install_dir.remove()
|
||||
request.addfinalizer(fin)
|
||||
|
||||
# Change the environment and site settings to control where the
|
||||
# files are installed and ensure we do not overwrite anything.
|
||||
monkeypatch.chdir(new_cwd)
|
||||
monkeypatch.setattr(easy_install_pkg, '__file__', user_site.strpath)
|
||||
monkeypatch.setattr('site.USER_BASE', user_base.strpath)
|
||||
monkeypatch.setattr('site.USER_SITE', user_site.strpath)
|
||||
monkeypatch.setattr('sys.path', sys.path + [install_dir.strpath])
|
||||
monkeypatch.setenv('PYTHONPATH', os.path.pathsep.join(sys.path))
|
||||
|
||||
# Set up the command for performing the installation.
|
||||
dist = Distribution()
|
||||
cmd = easy_install(dist)
|
||||
cmd.install_dir = install_dir.strpath
|
||||
return cmd
|
||||
|
||||
|
||||
def _install_one(requirement, cmd, pkgname, modulename):
|
||||
cmd.args = [requirement]
|
||||
cmd.ensure_finalized()
|
||||
cmd.run()
|
||||
target = cmd.install_dir
|
||||
dest_path = glob.glob(os.path.join(target, pkgname + '*.egg'))
|
||||
assert dest_path
|
||||
assert os.path.exists(os.path.join(dest_path[0], pkgname, modulename))
|
||||
|
||||
|
||||
def test_stevedore(install_context):
|
||||
_install_one('stevedore', install_context,
|
||||
'stevedore', 'extension.py')
|
||||
|
||||
|
||||
@pytest.mark.xfail
|
||||
def test_virtualenvwrapper(install_context):
|
||||
_install_one('virtualenvwrapper', install_context,
|
||||
'virtualenvwrapper', 'hook_loader.py')
|
||||
|
||||
|
||||
@pytest.mark.xfail
|
||||
def test_pbr(install_context):
|
||||
_install_one('pbr', install_context,
|
||||
'pbr', 'core.py')
|
||||
|
||||
|
||||
@pytest.mark.xfail
|
||||
def test_python_novaclient(install_context):
|
||||
_install_one('python-novaclient', install_context,
|
||||
'novaclient', 'base.py')
|
||||
|
|
@ -15,12 +15,8 @@ from pkg_resources import (parse_requirements, VersionConflict, parse_version,
|
|||
|
||||
from setuptools.command.easy_install import (get_script_header, is_sh,
|
||||
nt_quote_arg)
|
||||
from setuptools.compat import StringIO, iteritems
|
||||
|
||||
try:
|
||||
frozenset
|
||||
except NameError:
|
||||
from sets import ImmutableSet as frozenset
|
||||
from setuptools.compat import StringIO, iteritems, PY3
|
||||
from .py26compat import skipIf
|
||||
|
||||
def safe_repr(obj, short=False):
|
||||
""" copied from Python2.7"""
|
||||
|
|
@ -522,8 +518,7 @@ class ScriptHeaderTests(TestCase):
|
|||
|
||||
def test_get_script_header_jython_workaround(self):
|
||||
# This test doesn't work with Python 3 in some locales
|
||||
if (sys.version_info >= (3,) and os.environ.get("LC_CTYPE")
|
||||
in (None, "C", "POSIX")):
|
||||
if PY3 and os.environ.get("LC_CTYPE") in (None, "C", "POSIX"):
|
||||
return
|
||||
|
||||
class java:
|
||||
|
|
@ -576,13 +571,8 @@ class NamespaceTests(TestCase):
|
|||
pkg_resources._namespace_packages = self._ns_pkgs.copy()
|
||||
sys.path = self._prev_sys_path[:]
|
||||
|
||||
def _assertIn(self, member, container):
|
||||
""" assertIn and assertTrue does not exist in Python2.3"""
|
||||
if member not in container:
|
||||
standardMsg = '%s not found in %s' % (safe_repr(member),
|
||||
safe_repr(container))
|
||||
self.fail(self._formatMessage(msg, standardMsg))
|
||||
|
||||
msg = "Test fails when /tmp is a symlink. See #231"
|
||||
@skipIf(os.path.islink(tempfile.gettempdir()), msg)
|
||||
def test_two_levels_deep(self):
|
||||
"""
|
||||
Test nested namespace packages
|
||||
|
|
@ -606,15 +596,17 @@ class NamespaceTests(TestCase):
|
|||
pkg2_init.write(ns_str)
|
||||
pkg2_init.close()
|
||||
import pkg1
|
||||
self._assertIn("pkg1", pkg_resources._namespace_packages.keys())
|
||||
assert "pkg1" in pkg_resources._namespace_packages
|
||||
try:
|
||||
import pkg1.pkg2
|
||||
except ImportError:
|
||||
self.fail("Setuptools tried to import the parent namespace package")
|
||||
# check the _namespace_packages dict
|
||||
self._assertIn("pkg1.pkg2", pkg_resources._namespace_packages.keys())
|
||||
self.assertEqual(pkg_resources._namespace_packages["pkg1"], ["pkg1.pkg2"])
|
||||
assert "pkg1.pkg2" in pkg_resources._namespace_packages
|
||||
assert pkg_resources._namespace_packages["pkg1"] == ["pkg1.pkg2"]
|
||||
# check the __path__ attribute contains both paths
|
||||
self.assertEqual(pkg1.pkg2.__path__, [
|
||||
expected = [
|
||||
os.path.join(self._tmpdir, "site-pkgs", "pkg1", "pkg2"),
|
||||
os.path.join(self._tmpdir, "site-pkgs2", "pkg1", "pkg2")])
|
||||
os.path.join(self._tmpdir, "site-pkgs2", "pkg1", "pkg2"),
|
||||
]
|
||||
assert pkg1.pkg2.__path__ == expected
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ def has_win32com():
|
|||
if not sys.platform.startswith('win32'):
|
||||
return False
|
||||
try:
|
||||
mod = __import__('win32com')
|
||||
__import__('win32com')
|
||||
except ImportError:
|
||||
return False
|
||||
return True
|
||||
|
|
@ -33,8 +33,6 @@ class TestSandbox(unittest.TestCase):
|
|||
shutil.rmtree(self.dir)
|
||||
|
||||
def test_devnull(self):
|
||||
if sys.version < '2.4':
|
||||
return
|
||||
sandbox = DirectorySandbox(self.dir)
|
||||
sandbox.run(self._file_writer(os.devnull))
|
||||
|
||||
|
|
@ -72,8 +70,14 @@ class TestSandbox(unittest.TestCase):
|
|||
target = pkg_resources.resource_filename(__name__,
|
||||
'script-with-bom.py')
|
||||
namespace = types.ModuleType('namespace')
|
||||
setuptools.sandbox.execfile(target, vars(namespace))
|
||||
setuptools.sandbox._execfile(target, vars(namespace))
|
||||
assert namespace.result == 'passed'
|
||||
|
||||
def test_setup_py_with_CRLF(self):
|
||||
setup_py = os.path.join(self.dir, 'setup.py')
|
||||
with open(setup_py, 'wb') as stream:
|
||||
stream.write(b'"degenerate script"\r\n')
|
||||
setuptools.sandbox._execfile(setup_py, globals())
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
|
|
|||
|
|
@ -9,11 +9,11 @@ import tempfile
|
|||
import unittest
|
||||
import unicodedata
|
||||
import re
|
||||
import contextlib
|
||||
from setuptools.tests import environment, test_svn
|
||||
from setuptools.tests.py26compat import skipIf
|
||||
|
||||
from setuptools.compat import StringIO, unicode
|
||||
from setuptools.tests.py26compat import skipIf
|
||||
from setuptools.compat import StringIO, unicode, PY3, PY2
|
||||
from setuptools.command.sdist import sdist, walk_revctrl
|
||||
from setuptools.command.egg_info import manifest_maker
|
||||
from setuptools.dist import Distribution
|
||||
|
|
@ -34,32 +34,33 @@ setup(**%r)
|
|||
""" % SETUP_ATTRS
|
||||
|
||||
|
||||
if sys.version_info >= (3,):
|
||||
if PY3:
|
||||
LATIN1_FILENAME = 'smörbröd.py'.encode('latin-1')
|
||||
else:
|
||||
LATIN1_FILENAME = 'sm\xf6rbr\xf6d.py'
|
||||
|
||||
|
||||
# Cannot use context manager because of Python 2.4
|
||||
@contextlib.contextmanager
|
||||
def quiet():
|
||||
global old_stdout, old_stderr
|
||||
old_stdout, old_stderr = sys.stdout, sys.stderr
|
||||
sys.stdout, sys.stderr = StringIO(), StringIO()
|
||||
|
||||
def unquiet():
|
||||
sys.stdout, sys.stderr = old_stdout, old_stderr
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
sys.stdout, sys.stderr = old_stdout, old_stderr
|
||||
|
||||
|
||||
# Fake byte literals for Python <= 2.5
|
||||
def b(s, encoding='utf-8'):
|
||||
if sys.version_info >= (3,):
|
||||
if PY3:
|
||||
return s.encode(encoding)
|
||||
return s
|
||||
|
||||
|
||||
# Convert to POSIX path
|
||||
def posix(path):
|
||||
if sys.version_info >= (3,) and not isinstance(path, str):
|
||||
if PY3 and not isinstance(path, str):
|
||||
return path.replace(os.sep.encode('ascii'), b('/'))
|
||||
else:
|
||||
return path.replace(os.sep, '/')
|
||||
|
|
@ -74,7 +75,7 @@ def decompose(path):
|
|||
path = unicodedata.normalize('NFD', path)
|
||||
path = path.encode('utf-8')
|
||||
except UnicodeError:
|
||||
pass # Not UTF-8
|
||||
pass # Not UTF-8
|
||||
return path
|
||||
|
||||
|
||||
|
|
@ -112,12 +113,8 @@ class TestSdistTest(unittest.TestCase):
|
|||
cmd = sdist(dist)
|
||||
cmd.ensure_finalized()
|
||||
|
||||
# squelch output
|
||||
quiet()
|
||||
try:
|
||||
with quiet():
|
||||
cmd.run()
|
||||
finally:
|
||||
unquiet()
|
||||
|
||||
manifest = cmd.filelist.files
|
||||
self.assertTrue(os.path.join('sdist_test', 'a.txt') in manifest)
|
||||
|
|
@ -135,14 +132,14 @@ class TestSdistTest(unittest.TestCase):
|
|||
# UTF-8 filename
|
||||
filename = os.path.join('sdist_test', 'smörbröd.py')
|
||||
|
||||
# Must create the file or it will get stripped.
|
||||
open(filename, 'w').close()
|
||||
|
||||
# Add UTF-8 filename and write manifest
|
||||
quiet()
|
||||
try:
|
||||
with quiet():
|
||||
mm.run()
|
||||
mm.filelist.files.append(filename)
|
||||
mm.filelist.append(filename)
|
||||
mm.write_manifest()
|
||||
finally:
|
||||
unquiet()
|
||||
|
||||
manifest = open(mm.manifest, 'rbU')
|
||||
contents = manifest.read()
|
||||
|
|
@ -156,13 +153,14 @@ class TestSdistTest(unittest.TestCase):
|
|||
self.fail(e)
|
||||
|
||||
# The manifest should contain the UTF-8 filename
|
||||
if sys.version_info >= (3,):
|
||||
self.assertTrue(posix(filename) in u_contents)
|
||||
else:
|
||||
self.assertTrue(posix(filename) in contents)
|
||||
if PY2:
|
||||
fs_enc = sys.getfilesystemencoding()
|
||||
filename = filename.decode(fs_enc)
|
||||
|
||||
self.assertTrue(posix(filename) in u_contents)
|
||||
|
||||
# Python 3 only
|
||||
if sys.version_info >= (3,):
|
||||
if PY3:
|
||||
|
||||
def test_write_manifest_allows_utf8_filenames(self):
|
||||
# Test for #303.
|
||||
|
|
@ -175,16 +173,16 @@ class TestSdistTest(unittest.TestCase):
|
|||
# UTF-8 filename
|
||||
filename = os.path.join(b('sdist_test'), b('smörbröd.py'))
|
||||
|
||||
# Must touch the file or risk removal
|
||||
open(filename, "w").close()
|
||||
|
||||
# Add filename and write manifest
|
||||
quiet()
|
||||
try:
|
||||
with quiet():
|
||||
mm.run()
|
||||
u_filename = filename.decode('utf-8')
|
||||
mm.filelist.files.append(u_filename)
|
||||
# Re-write manifest
|
||||
mm.write_manifest()
|
||||
finally:
|
||||
unquiet()
|
||||
|
||||
manifest = open(mm.manifest, 'rbU')
|
||||
contents = manifest.read()
|
||||
|
|
@ -204,7 +202,12 @@ class TestSdistTest(unittest.TestCase):
|
|||
self.assertTrue(u_filename in mm.filelist.files)
|
||||
|
||||
def test_write_manifest_skips_non_utf8_filenames(self):
|
||||
# Test for #303.
|
||||
"""
|
||||
Files that cannot be encoded to UTF-8 (specifically, those that
|
||||
weren't originally successfully decoded and have surrogate
|
||||
escapes) should be omitted from the manifest.
|
||||
See https://bitbucket.org/tarek/distribute/issue/303 for history.
|
||||
"""
|
||||
dist = Distribution(SETUP_ATTRS)
|
||||
dist.script_name = 'setup.py'
|
||||
mm = manifest_maker(dist)
|
||||
|
|
@ -215,15 +218,12 @@ class TestSdistTest(unittest.TestCase):
|
|||
filename = os.path.join(b('sdist_test'), LATIN1_FILENAME)
|
||||
|
||||
# Add filename with surrogates and write manifest
|
||||
quiet()
|
||||
try:
|
||||
with quiet():
|
||||
mm.run()
|
||||
u_filename = filename.decode('utf-8', 'surrogateescape')
|
||||
mm.filelist.files.append(u_filename)
|
||||
mm.filelist.append(u_filename)
|
||||
# Re-write manifest
|
||||
mm.write_manifest()
|
||||
finally:
|
||||
unquiet()
|
||||
|
||||
manifest = open(mm.manifest, 'rbU')
|
||||
contents = manifest.read()
|
||||
|
|
@ -250,17 +250,14 @@ class TestSdistTest(unittest.TestCase):
|
|||
cmd.ensure_finalized()
|
||||
|
||||
# Create manifest
|
||||
quiet()
|
||||
try:
|
||||
with quiet():
|
||||
cmd.run()
|
||||
finally:
|
||||
unquiet()
|
||||
|
||||
# Add UTF-8 filename to manifest
|
||||
filename = os.path.join(b('sdist_test'), b('smörbröd.py'))
|
||||
cmd.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt')
|
||||
manifest = open(cmd.manifest, 'ab')
|
||||
manifest.write(b('\n')+filename)
|
||||
manifest.write(b('\n') + filename)
|
||||
manifest.close()
|
||||
|
||||
# The file must exist to be included in the filelist
|
||||
|
|
@ -268,19 +265,16 @@ class TestSdistTest(unittest.TestCase):
|
|||
|
||||
# Re-read manifest
|
||||
cmd.filelist.files = []
|
||||
quiet()
|
||||
try:
|
||||
with quiet():
|
||||
cmd.read_manifest()
|
||||
finally:
|
||||
unquiet()
|
||||
|
||||
# The filelist should contain the UTF-8 filename
|
||||
if sys.version_info >= (3,):
|
||||
if PY3:
|
||||
filename = filename.decode('utf-8')
|
||||
self.assertTrue(filename in cmd.filelist.files)
|
||||
|
||||
# Python 3 only
|
||||
if sys.version_info >= (3,):
|
||||
if PY3:
|
||||
|
||||
def test_read_manifest_skips_non_utf8_filenames(self):
|
||||
# Test for #303.
|
||||
|
|
@ -290,17 +284,14 @@ class TestSdistTest(unittest.TestCase):
|
|||
cmd.ensure_finalized()
|
||||
|
||||
# Create manifest
|
||||
quiet()
|
||||
try:
|
||||
with quiet():
|
||||
cmd.run()
|
||||
finally:
|
||||
unquiet()
|
||||
|
||||
# Add Latin-1 filename to manifest
|
||||
filename = os.path.join(b('sdist_test'), LATIN1_FILENAME)
|
||||
cmd.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt')
|
||||
manifest = open(cmd.manifest, 'ab')
|
||||
manifest.write(b('\n')+filename)
|
||||
manifest.write(b('\n') + filename)
|
||||
manifest.close()
|
||||
|
||||
# The file must exist to be included in the filelist
|
||||
|
|
@ -308,21 +299,18 @@ class TestSdistTest(unittest.TestCase):
|
|||
|
||||
# Re-read manifest
|
||||
cmd.filelist.files = []
|
||||
quiet()
|
||||
try:
|
||||
with quiet():
|
||||
try:
|
||||
cmd.read_manifest()
|
||||
except UnicodeDecodeError:
|
||||
e = sys.exc_info()[1]
|
||||
self.fail(e)
|
||||
finally:
|
||||
unquiet()
|
||||
|
||||
# The Latin-1 filename should have been skipped
|
||||
filename = filename.decode('latin-1')
|
||||
self.assertFalse(filename in cmd.filelist.files)
|
||||
|
||||
@skipIf(sys.version_info >= (3,) and locale.getpreferredencoding() != 'UTF-8',
|
||||
@skipIf(PY3 and locale.getpreferredencoding() != 'UTF-8',
|
||||
'Unittest fails if locale is not utf-8 but the manifests is recorded correctly')
|
||||
def test_sdist_with_utf8_encoded_filename(self):
|
||||
# Test for #303.
|
||||
|
|
@ -335,16 +323,13 @@ class TestSdistTest(unittest.TestCase):
|
|||
filename = os.path.join(b('sdist_test'), b('smörbröd.py'))
|
||||
open(filename, 'w').close()
|
||||
|
||||
quiet()
|
||||
try:
|
||||
with quiet():
|
||||
cmd.run()
|
||||
finally:
|
||||
unquiet()
|
||||
|
||||
if sys.platform == 'darwin':
|
||||
filename = decompose(filename)
|
||||
|
||||
if sys.version_info >= (3,):
|
||||
if PY3:
|
||||
fs_enc = sys.getfilesystemencoding()
|
||||
|
||||
if sys.platform == 'win32':
|
||||
|
|
@ -373,14 +358,11 @@ class TestSdistTest(unittest.TestCase):
|
|||
open(filename, 'w').close()
|
||||
self.assertTrue(os.path.isfile(filename))
|
||||
|
||||
quiet()
|
||||
try:
|
||||
with quiet():
|
||||
cmd.run()
|
||||
finally:
|
||||
unquiet()
|
||||
|
||||
if sys.version_info >= (3,):
|
||||
#not all windows systems have a default FS encoding of cp1252
|
||||
if PY3:
|
||||
# not all windows systems have a default FS encoding of cp1252
|
||||
if sys.platform == 'win32':
|
||||
# Latin-1 is similar to Windows-1252 however
|
||||
# on mbcs filesys it is not in latin-1 encoding
|
||||
|
|
@ -396,10 +378,17 @@ class TestSdistTest(unittest.TestCase):
|
|||
filename = filename.decode('latin-1')
|
||||
self.assertFalse(filename in cmd.filelist.files)
|
||||
else:
|
||||
# No conversion takes place under Python 2 and the file
|
||||
# is included. We shall keep it that way for BBB.
|
||||
self.assertTrue(filename in cmd.filelist.files)
|
||||
|
||||
# Under Python 2 there seems to be no decoded string in the
|
||||
# filelist. However, due to decode and encoding of the
|
||||
# file name to get utf-8 Manifest the latin1 maybe excluded
|
||||
try:
|
||||
# fs_enc should match how one is expect the decoding to
|
||||
# be proformed for the manifest output.
|
||||
fs_enc = sys.getfilesystemencoding()
|
||||
filename.decode(fs_enc)
|
||||
self.assertTrue(filename in cmd.filelist.files)
|
||||
except UnicodeDecodeError:
|
||||
self.assertFalse(filename in cmd.filelist.files)
|
||||
|
||||
class TestDummyOutput(environment.ZippedEnvironment):
|
||||
|
||||
|
|
@ -484,7 +473,7 @@ class TestSvn(environment.ZippedEnvironment):
|
|||
elif self.base_version < (1, 3):
|
||||
raise ValueError('Insufficient SVN Version %s' % version)
|
||||
elif self.base_version >= (1, 9):
|
||||
#trying the latest version
|
||||
# trying the latest version
|
||||
self.base_version = (1, 8)
|
||||
|
||||
self.dataname = "svn%i%i_example" % self.base_version
|
||||
|
|
@ -501,7 +490,7 @@ class TestSvn(environment.ZippedEnvironment):
|
|||
folder2 = 'third_party2'
|
||||
folder3 = 'third_party3'
|
||||
|
||||
#TODO is this right
|
||||
# TODO is this right
|
||||
expected = set([
|
||||
os.path.join('a file'),
|
||||
os.path.join(folder2, 'Changes.txt'),
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ import tempfile
|
|||
import unittest
|
||||
|
||||
from distutils.errors import DistutilsError
|
||||
from setuptools.compat import StringIO
|
||||
from setuptools.compat import StringIO, PY2
|
||||
from setuptools.command.test import test
|
||||
from setuptools.command import easy_install as easy_install_pkg
|
||||
from setuptools.dist import Distribution
|
||||
|
|
@ -34,7 +34,7 @@ except ImportError:
|
|||
__path__ = extend_path(__path__, __name__)
|
||||
"""
|
||||
# Make sure this is Latin-1 binary, before writing:
|
||||
if sys.version_info < (3,):
|
||||
if PY2:
|
||||
NS_INIT = NS_INIT.decode('UTF-8')
|
||||
NS_INIT = NS_INIT.encode('Latin-1')
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,41 @@
|
|||
import unicodedata
|
||||
import sys
|
||||
from setuptools.compat import unicode as decoded_string
|
||||
|
||||
|
||||
# HFS Plus uses decomposed UTF-8
|
||||
def decompose(path):
|
||||
if isinstance(path, decoded_string):
|
||||
return unicodedata.normalize('NFD', path)
|
||||
try:
|
||||
path = path.decode('utf-8')
|
||||
path = unicodedata.normalize('NFD', path)
|
||||
path = path.encode('utf-8')
|
||||
except UnicodeError:
|
||||
pass # Not UTF-8
|
||||
return path
|
||||
|
||||
|
||||
def filesys_decode(path):
|
||||
"""
|
||||
Ensure that the given path is decoded,
|
||||
NONE when no expected encoding works
|
||||
"""
|
||||
|
||||
fs_enc = sys.getfilesystemencoding()
|
||||
if isinstance(path, decoded_string):
|
||||
return path
|
||||
|
||||
for enc in (fs_enc, "utf-8"):
|
||||
try:
|
||||
return path.decode(enc)
|
||||
except UnicodeDecodeError:
|
||||
continue
|
||||
|
||||
|
||||
def try_encode(string, enc):
|
||||
"turn unicode encoding into a functional routine"
|
||||
try:
|
||||
return string.encode(enc)
|
||||
except UnicodeEncodeError:
|
||||
return None
|
||||
|
|
@ -1 +1 @@
|
|||
__version__ = '3.5.1'
|
||||
__version__ = '5.5.1'
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue