update to tornado 4.0 and requests 2.3.0
This commit is contained in:
parent
060f459965
commit
f187000dc9
239 changed files with 19071 additions and 20369 deletions
|
|
@ -5,10 +5,11 @@ __all__ = [
|
|||
'register', 'bdist_wininst', 'upload_docs',
|
||||
]
|
||||
|
||||
from setuptools.command import install_scripts
|
||||
from distutils.command.bdist import bdist
|
||||
import sys
|
||||
|
||||
from distutils.command.bdist import bdist
|
||||
from setuptools.command import install_scripts
|
||||
|
||||
|
||||
if 'egg' not in bdist.format_commands:
|
||||
bdist.format_command['egg'] = ('bdist_egg', "Python .egg file")
|
||||
|
|
|
|||
|
|
@ -2,10 +2,12 @@ from distutils.errors import DistutilsOptionError
|
|||
|
||||
from setuptools.command.setopt import edit_config, option_base, config_file
|
||||
|
||||
|
||||
def shquote(arg):
|
||||
"""Quote an argument for later parsing by shlex.split()"""
|
||||
for c in '"', "'", "\\", "#":
|
||||
if c in arg: return repr(arg)
|
||||
if c in arg:
|
||||
return repr(arg)
|
||||
if arg.split() != [arg]:
|
||||
return repr(arg)
|
||||
return arg
|
||||
|
|
@ -18,7 +20,7 @@ class alias(option_base):
|
|||
command_consumes_arguments = True
|
||||
|
||||
user_options = [
|
||||
('remove', 'r', 'remove (unset) the alias'),
|
||||
('remove', 'r', 'remove (unset) the alias'),
|
||||
] + option_base.user_options
|
||||
|
||||
boolean_options = option_base.boolean_options + ['remove']
|
||||
|
|
@ -46,7 +48,7 @@ class alias(option_base):
|
|||
print("setup.py alias", format_alias(alias, aliases))
|
||||
return
|
||||
|
||||
elif len(self.args)==1:
|
||||
elif len(self.args) == 1:
|
||||
alias, = self.args
|
||||
if self.remove:
|
||||
command = None
|
||||
|
|
@ -58,9 +60,9 @@ class alias(option_base):
|
|||
return
|
||||
else:
|
||||
alias = self.args[0]
|
||||
command = ' '.join(map(shquote,self.args[1:]))
|
||||
command = ' '.join(map(shquote, self.args[1:]))
|
||||
|
||||
edit_config(self.filename, {'aliases': {alias:command}}, self.dry_run)
|
||||
edit_config(self.filename, {'aliases': {alias: command}}, self.dry_run)
|
||||
|
||||
|
||||
def format_alias(name, aliases):
|
||||
|
|
@ -73,4 +75,4 @@ def format_alias(name, aliases):
|
|||
source = ''
|
||||
else:
|
||||
source = '--filename=%r' % source
|
||||
return source+name+' '+command
|
||||
return source + name + ' ' + command
|
||||
|
|
|
|||
|
|
@ -3,29 +3,33 @@
|
|||
Build .egg distributions"""
|
||||
|
||||
# This module should be kept compatible with Python 2.3
|
||||
from distutils.errors import DistutilsSetupError
|
||||
from distutils.dir_util import remove_tree, mkpath
|
||||
from distutils import log
|
||||
from types import CodeType
|
||||
import sys
|
||||
import os
|
||||
import marshal
|
||||
import textwrap
|
||||
|
||||
from pkg_resources import get_build_platform, Distribution, ensure_directory
|
||||
from pkg_resources import EntryPoint
|
||||
from setuptools.compat import basestring
|
||||
from setuptools.extension import Library
|
||||
from setuptools import Command
|
||||
from distutils.dir_util import remove_tree, mkpath
|
||||
|
||||
try:
|
||||
# Python 2.7 or >=3.2
|
||||
from sysconfig import get_path, get_python_version
|
||||
|
||||
def _get_purelib():
|
||||
return get_path("purelib")
|
||||
except ImportError:
|
||||
from distutils.sysconfig import get_python_lib, get_python_version
|
||||
|
||||
def _get_purelib():
|
||||
return get_python_lib(False)
|
||||
|
||||
from distutils import log
|
||||
from distutils.errors import DistutilsSetupError
|
||||
from pkg_resources import get_build_platform, Distribution, ensure_directory
|
||||
from pkg_resources import EntryPoint
|
||||
from types import CodeType
|
||||
from setuptools.compat import basestring, next
|
||||
from setuptools.extension import Library
|
||||
|
||||
def strip_module(filename):
|
||||
if '.' in filename:
|
||||
|
|
@ -34,6 +38,7 @@ def strip_module(filename):
|
|||
filename = filename[:-6]
|
||||
return filename
|
||||
|
||||
|
||||
def write_stub(resource, pyfile):
|
||||
_stub_template = textwrap.dedent("""
|
||||
def __bootstrap__():
|
||||
|
|
@ -49,23 +54,22 @@ def write_stub(resource, pyfile):
|
|||
|
||||
|
||||
class bdist_egg(Command):
|
||||
|
||||
description = "create an \"egg\" distribution"
|
||||
|
||||
user_options = [
|
||||
('bdist-dir=', 'b',
|
||||
"temporary directory for creating the distribution"),
|
||||
"temporary directory for creating the distribution"),
|
||||
('plat-name=', 'p', "platform name to embed in generated filenames "
|
||||
"(default: %s)" % get_build_platform()),
|
||||
"(default: %s)" % get_build_platform()),
|
||||
('exclude-source-files', None,
|
||||
"remove all .py files from the generated egg"),
|
||||
"remove all .py files from the generated egg"),
|
||||
('keep-temp', 'k',
|
||||
"keep the pseudo-installation tree around after " +
|
||||
"creating the distribution archive"),
|
||||
"keep the pseudo-installation tree around after " +
|
||||
"creating the distribution archive"),
|
||||
('dist-dir=', 'd',
|
||||
"directory to put final built distributions in"),
|
||||
"directory to put final built distributions in"),
|
||||
('skip-build', None,
|
||||
"skip rebuilding everything (for testing/debugging)"),
|
||||
"skip rebuilding everything (for testing/debugging)"),
|
||||
]
|
||||
|
||||
boolean_options = [
|
||||
|
|
@ -92,7 +96,7 @@ class bdist_egg(Command):
|
|||
if self.plat_name is None:
|
||||
self.plat_name = get_build_platform()
|
||||
|
||||
self.set_undefined_options('bdist',('dist_dir', 'dist_dir'))
|
||||
self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
|
||||
|
||||
if self.egg_output is None:
|
||||
|
||||
|
|
@ -103,25 +107,25 @@ class bdist_egg(Command):
|
|||
self.distribution.has_ext_modules() and self.plat_name
|
||||
).egg_name()
|
||||
|
||||
self.egg_output = os.path.join(self.dist_dir, basename+'.egg')
|
||||
self.egg_output = os.path.join(self.dist_dir, basename + '.egg')
|
||||
|
||||
def do_install_data(self):
|
||||
# Hack for packages that install data to install's --install-lib
|
||||
self.get_finalized_command('install').install_lib = self.bdist_dir
|
||||
|
||||
site_packages = os.path.normcase(os.path.realpath(_get_purelib()))
|
||||
old, self.distribution.data_files = self.distribution.data_files,[]
|
||||
old, self.distribution.data_files = self.distribution.data_files, []
|
||||
|
||||
for item in old:
|
||||
if isinstance(item,tuple) and len(item)==2:
|
||||
if isinstance(item, tuple) and len(item) == 2:
|
||||
if os.path.isabs(item[0]):
|
||||
realpath = os.path.realpath(item[0])
|
||||
normalized = os.path.normcase(realpath)
|
||||
if normalized==site_packages or normalized.startswith(
|
||||
site_packages+os.sep
|
||||
if normalized == site_packages or normalized.startswith(
|
||||
site_packages + os.sep
|
||||
):
|
||||
item = realpath[len(site_packages)+1:], item[1]
|
||||
# XXX else: raise ???
|
||||
item = realpath[len(site_packages) + 1:], item[1]
|
||||
# XXX else: raise ???
|
||||
self.distribution.data_files.append(item)
|
||||
|
||||
try:
|
||||
|
|
@ -133,11 +137,11 @@ class bdist_egg(Command):
|
|||
def get_outputs(self):
|
||||
return [self.egg_output]
|
||||
|
||||
def call_command(self,cmdname,**kw):
|
||||
def call_command(self, cmdname, **kw):
|
||||
"""Invoke reinitialized command `cmdname` with keyword args"""
|
||||
for dirname in INSTALL_DIRECTORY_ATTRS:
|
||||
kw.setdefault(dirname,self.bdist_dir)
|
||||
kw.setdefault('skip_build',self.skip_build)
|
||||
kw.setdefault(dirname, self.bdist_dir)
|
||||
kw.setdefault('skip_build', self.skip_build)
|
||||
kw.setdefault('dry_run', self.dry_run)
|
||||
cmd = self.reinitialize_command(cmdname, **kw)
|
||||
self.run_command(cmdname)
|
||||
|
|
@ -160,15 +164,16 @@ class bdist_egg(Command):
|
|||
all_outputs, ext_outputs = self.get_ext_outputs()
|
||||
self.stubs = []
|
||||
to_compile = []
|
||||
for (p,ext_name) in enumerate(ext_outputs):
|
||||
filename,ext = os.path.splitext(ext_name)
|
||||
pyfile = os.path.join(self.bdist_dir, strip_module(filename)+'.py')
|
||||
for (p, ext_name) in enumerate(ext_outputs):
|
||||
filename, ext = os.path.splitext(ext_name)
|
||||
pyfile = os.path.join(self.bdist_dir, strip_module(filename) +
|
||||
'.py')
|
||||
self.stubs.append(pyfile)
|
||||
log.info("creating stub loader for %s" % ext_name)
|
||||
if not self.dry_run:
|
||||
write_stub(os.path.basename(ext_name), pyfile)
|
||||
to_compile.append(pyfile)
|
||||
ext_outputs[p] = ext_name.replace(os.sep,'/')
|
||||
ext_outputs[p] = ext_name.replace(os.sep, '/')
|
||||
|
||||
if to_compile:
|
||||
cmd.byte_compile(to_compile)
|
||||
|
|
@ -177,12 +182,13 @@ class bdist_egg(Command):
|
|||
|
||||
# Make the EGG-INFO directory
|
||||
archive_root = self.bdist_dir
|
||||
egg_info = os.path.join(archive_root,'EGG-INFO')
|
||||
egg_info = os.path.join(archive_root, 'EGG-INFO')
|
||||
self.mkpath(egg_info)
|
||||
if self.distribution.scripts:
|
||||
script_dir = os.path.join(egg_info, 'scripts')
|
||||
log.info("installing scripts to %s" % script_dir)
|
||||
self.call_command('install_scripts',install_dir=script_dir,no_ep=1)
|
||||
self.call_command('install_scripts', install_dir=script_dir,
|
||||
no_ep=1)
|
||||
|
||||
self.copy_metadata_to(egg_info)
|
||||
native_libs = os.path.join(egg_info, "native_libs.txt")
|
||||
|
|
@ -200,10 +206,10 @@ class bdist_egg(Command):
|
|||
os.unlink(native_libs)
|
||||
|
||||
write_safety_flag(
|
||||
os.path.join(archive_root,'EGG-INFO'), self.zip_safe()
|
||||
os.path.join(archive_root, 'EGG-INFO'), self.zip_safe()
|
||||
)
|
||||
|
||||
if os.path.exists(os.path.join(self.egg_info,'depends.txt')):
|
||||
if os.path.exists(os.path.join(self.egg_info, 'depends.txt')):
|
||||
log.warn(
|
||||
"WARNING: 'depends.txt' will not be used by setuptools 0.6!\n"
|
||||
"Use the install_requires/extras_require setup() args instead."
|
||||
|
|
@ -214,25 +220,25 @@ class bdist_egg(Command):
|
|||
|
||||
# Make the archive
|
||||
make_zipfile(self.egg_output, archive_root, verbose=self.verbose,
|
||||
dry_run=self.dry_run, mode=self.gen_header())
|
||||
dry_run=self.dry_run, mode=self.gen_header())
|
||||
if not self.keep_temp:
|
||||
remove_tree(self.bdist_dir, dry_run=self.dry_run)
|
||||
|
||||
# Add to 'Distribution.dist_files' so that the "upload" command works
|
||||
getattr(self.distribution,'dist_files',[]).append(
|
||||
('bdist_egg',get_python_version(),self.egg_output))
|
||||
getattr(self.distribution, 'dist_files', []).append(
|
||||
('bdist_egg', get_python_version(), self.egg_output))
|
||||
|
||||
def zap_pyfiles(self):
|
||||
log.info("Removing .py files from temporary directory")
|
||||
for base,dirs,files in walk_egg(self.bdist_dir):
|
||||
for base, dirs, files in walk_egg(self.bdist_dir):
|
||||
for name in files:
|
||||
if name.endswith('.py'):
|
||||
path = os.path.join(base,name)
|
||||
path = os.path.join(base, name)
|
||||
log.debug("Deleting %s", path)
|
||||
os.unlink(path)
|
||||
|
||||
def zip_safe(self):
|
||||
safe = getattr(self.distribution,'zip_safe',None)
|
||||
safe = getattr(self.distribution, 'zip_safe', None)
|
||||
if safe is not None:
|
||||
return safe
|
||||
log.warn("zip_safe flag not set; analyzing archive contents...")
|
||||
|
|
@ -240,7 +246,7 @@ class bdist_egg(Command):
|
|||
|
||||
def gen_header(self):
|
||||
epm = EntryPoint.parse_map(self.distribution.entry_points or '')
|
||||
ep = epm.get('setuptools.installation',{}).get('eggsecutable')
|
||||
ep = epm.get('setuptools.installation', {}).get('eggsecutable')
|
||||
if ep is None:
|
||||
return 'w' # not an eggsecutable, do it the usual way.
|
||||
|
||||
|
|
@ -268,7 +274,6 @@ class bdist_egg(Command):
|
|||
' echo Please rename it back to %(basename)s and try again.\n'
|
||||
' exec false\n'
|
||||
'fi\n'
|
||||
|
||||
) % locals()
|
||||
|
||||
if not self.dry_run:
|
||||
|
|
@ -283,7 +288,7 @@ class bdist_egg(Command):
|
|||
# normalize the path (so that a forward-slash in egg_info will
|
||||
# match using startswith below)
|
||||
norm_egg_info = os.path.normpath(self.egg_info)
|
||||
prefix = os.path.join(norm_egg_info,'')
|
||||
prefix = os.path.join(norm_egg_info, '')
|
||||
for path in self.ei_cmd.filelist.files:
|
||||
if path.startswith(prefix):
|
||||
target = os.path.join(target_dir, path[len(prefix):])
|
||||
|
|
@ -296,23 +301,24 @@ class bdist_egg(Command):
|
|||
all_outputs = []
|
||||
ext_outputs = []
|
||||
|
||||
paths = {self.bdist_dir:''}
|
||||
paths = {self.bdist_dir: ''}
|
||||
for base, dirs, files in os.walk(self.bdist_dir):
|
||||
for filename in files:
|
||||
if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS:
|
||||
all_outputs.append(paths[base]+filename)
|
||||
all_outputs.append(paths[base] + filename)
|
||||
for filename in dirs:
|
||||
paths[os.path.join(base,filename)] = paths[base]+filename+'/'
|
||||
paths[os.path.join(base, filename)] = (paths[base] +
|
||||
filename + '/')
|
||||
|
||||
if self.distribution.has_ext_modules():
|
||||
build_cmd = self.get_finalized_command('build_ext')
|
||||
for ext in build_cmd.extensions:
|
||||
if isinstance(ext,Library):
|
||||
if isinstance(ext, Library):
|
||||
continue
|
||||
fullname = build_cmd.get_ext_fullname(ext.name)
|
||||
filename = build_cmd.get_ext_filename(fullname)
|
||||
if not os.path.basename(filename).startswith('dl-'):
|
||||
if os.path.exists(os.path.join(self.bdist_dir,filename)):
|
||||
if os.path.exists(os.path.join(self.bdist_dir, filename)):
|
||||
ext_outputs.append(filename)
|
||||
|
||||
return all_outputs, ext_outputs
|
||||
|
|
@ -324,19 +330,21 @@ NATIVE_EXTENSIONS = dict.fromkeys('.dll .so .dylib .pyd'.split())
|
|||
def walk_egg(egg_dir):
|
||||
"""Walk an unpacked egg's contents, skipping the metadata directory"""
|
||||
walker = os.walk(egg_dir)
|
||||
base,dirs,files = next(walker)
|
||||
base, dirs, files = next(walker)
|
||||
if 'EGG-INFO' in dirs:
|
||||
dirs.remove('EGG-INFO')
|
||||
yield base,dirs,files
|
||||
yield base, dirs, files
|
||||
for bdf in walker:
|
||||
yield bdf
|
||||
|
||||
|
||||
def analyze_egg(egg_dir, stubs):
|
||||
# check for existing flag in EGG-INFO
|
||||
for flag,fn in safety_flags.items():
|
||||
if os.path.exists(os.path.join(egg_dir,'EGG-INFO',fn)):
|
||||
for flag, fn in safety_flags.items():
|
||||
if os.path.exists(os.path.join(egg_dir, 'EGG-INFO', fn)):
|
||||
return flag
|
||||
if not can_scan(): return False
|
||||
if not can_scan():
|
||||
return False
|
||||
safe = True
|
||||
for base, dirs, files in walk_egg(egg_dir):
|
||||
for name in files:
|
||||
|
|
@ -347,36 +355,39 @@ def analyze_egg(egg_dir, stubs):
|
|||
safe = scan_module(egg_dir, base, name, stubs) and safe
|
||||
return safe
|
||||
|
||||
|
||||
def write_safety_flag(egg_dir, safe):
|
||||
# Write or remove zip safety flag file(s)
|
||||
for flag,fn in safety_flags.items():
|
||||
for flag, fn in safety_flags.items():
|
||||
fn = os.path.join(egg_dir, fn)
|
||||
if os.path.exists(fn):
|
||||
if safe is None or bool(safe) != flag:
|
||||
os.unlink(fn)
|
||||
elif safe is not None and bool(safe)==flag:
|
||||
f = open(fn,'wt')
|
||||
elif safe is not None and bool(safe) == flag:
|
||||
f = open(fn, 'wt')
|
||||
f.write('\n')
|
||||
f.close()
|
||||
|
||||
|
||||
safety_flags = {
|
||||
True: 'zip-safe',
|
||||
False: 'not-zip-safe',
|
||||
}
|
||||
|
||||
|
||||
def scan_module(egg_dir, base, name, stubs):
|
||||
"""Check whether module possibly uses unsafe-for-zipfile stuff"""
|
||||
|
||||
filename = os.path.join(base,name)
|
||||
filename = os.path.join(base, name)
|
||||
if filename[:-1] in stubs:
|
||||
return True # Extension module
|
||||
pkg = base[len(egg_dir)+1:].replace(os.sep,'.')
|
||||
module = pkg+(pkg and '.' or '')+os.path.splitext(name)[0]
|
||||
return True # Extension module
|
||||
pkg = base[len(egg_dir) + 1:].replace(os.sep, '.')
|
||||
module = pkg + (pkg and '.' or '') + os.path.splitext(name)[0]
|
||||
if sys.version_info < (3, 3):
|
||||
skip = 8 # skip magic & date
|
||||
skip = 8 # skip magic & date
|
||||
else:
|
||||
skip = 12 # skip magic & date & file size
|
||||
f = open(filename,'rb')
|
||||
f = open(filename, 'rb')
|
||||
f.read(skip)
|
||||
code = marshal.load(f)
|
||||
f.close()
|
||||
|
|
@ -396,21 +407,24 @@ def scan_module(egg_dir, base, name, stubs):
|
|||
log.warn("%s: module MAY be using inspect.%s", module, bad)
|
||||
safe = False
|
||||
if '__name__' in symbols and '__main__' in symbols and '.' not in module:
|
||||
if sys.version[:3]=="2.4": # -m works w/zipfiles in 2.5
|
||||
if sys.version[:3] == "2.4": # -m works w/zipfiles in 2.5
|
||||
log.warn("%s: top-level module may be 'python -m' script", module)
|
||||
safe = False
|
||||
return safe
|
||||
|
||||
|
||||
def iter_symbols(code):
|
||||
"""Yield names and strings used by `code` and its nested code objects"""
|
||||
for name in code.co_names: yield name
|
||||
for name in code.co_names:
|
||||
yield name
|
||||
for const in code.co_consts:
|
||||
if isinstance(const,basestring):
|
||||
if isinstance(const, basestring):
|
||||
yield const
|
||||
elif isinstance(const,CodeType):
|
||||
elif isinstance(const, CodeType):
|
||||
for name in iter_symbols(const):
|
||||
yield name
|
||||
|
||||
|
||||
def can_scan():
|
||||
if not sys.platform.startswith('java') and sys.platform != 'cli':
|
||||
# CPython, PyPy, etc.
|
||||
|
|
@ -426,8 +440,9 @@ INSTALL_DIRECTORY_ATTRS = [
|
|||
'install_lib', 'install_dir', 'install_data', 'install_base'
|
||||
]
|
||||
|
||||
|
||||
def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=None,
|
||||
mode='w'):
|
||||
mode='w'):
|
||||
"""Create a zip file from all the files under 'base_dir'. The output
|
||||
zip file will be named 'base_dir' + ".zip". Uses either the "zipfile"
|
||||
Python module (if available) or the InfoZIP "zip" utility (if installed
|
||||
|
|
@ -435,6 +450,7 @@ def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=None,
|
|||
raises DistutilsExecError. Returns the name of the output zip file.
|
||||
"""
|
||||
import zipfile
|
||||
|
||||
mkpath(os.path.dirname(zip_filename), dry_run=dry_run)
|
||||
log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir)
|
||||
|
||||
|
|
@ -442,13 +458,14 @@ def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=None,
|
|||
for name in names:
|
||||
path = os.path.normpath(os.path.join(dirname, name))
|
||||
if os.path.isfile(path):
|
||||
p = path[len(base_dir)+1:]
|
||||
p = path[len(base_dir) + 1:]
|
||||
if not dry_run:
|
||||
z.write(path, p)
|
||||
log.debug("adding '%s'" % p)
|
||||
|
||||
if compress is None:
|
||||
compress = (sys.version>="2.4") # avoid 2.3 zipimport bug when 64 bits
|
||||
# avoid 2.3 zipimport bug when 64 bits
|
||||
compress = (sys.version >= "2.4")
|
||||
|
||||
compression = [zipfile.ZIP_STORED, zipfile.ZIP_DEFLATED][bool(compress)]
|
||||
if not dry_run:
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import distutils.command.bdist_rpm as orig
|
||||
|
||||
|
||||
class bdist_rpm(orig.bdist_rpm):
|
||||
"""
|
||||
Override the default bdist_rpm behavior to do the following:
|
||||
|
|
@ -19,7 +20,7 @@ class bdist_rpm(orig.bdist_rpm):
|
|||
|
||||
def _make_spec_file(self):
|
||||
version = self.distribution.get_version()
|
||||
rpmversion = version.replace('-','_')
|
||||
rpmversion = version.replace('-', '_')
|
||||
spec = orig.bdist_rpm._make_spec_file(self)
|
||||
line23 = '%define version ' + version
|
||||
line24 = '%define version ' + rpmversion
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import distutils.command.bdist_wininst as orig
|
||||
|
||||
|
||||
class bdist_wininst(orig.bdist_wininst):
|
||||
def reinitialize_command(self, command, reinit_subcommands=0):
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -1,26 +1,29 @@
|
|||
from distutils.command.build_ext import build_ext as _du_build_ext
|
||||
from distutils.file_util import copy_file
|
||||
from distutils.ccompiler import new_compiler
|
||||
from distutils.sysconfig import customize_compiler
|
||||
from distutils.errors import DistutilsError
|
||||
from distutils import log
|
||||
import os
|
||||
import sys
|
||||
|
||||
from setuptools.extension import Library
|
||||
|
||||
try:
|
||||
# Attempt to use Pyrex for building extensions, if available
|
||||
from Pyrex.Distutils.build_ext import build_ext as _build_ext
|
||||
except ImportError:
|
||||
_build_ext = _du_build_ext
|
||||
|
||||
import os
|
||||
import sys
|
||||
from distutils.file_util import copy_file
|
||||
from setuptools.extension import Library
|
||||
from distutils.ccompiler import new_compiler
|
||||
from distutils.sysconfig import customize_compiler
|
||||
try:
|
||||
# Python 2.7 or >=3.2
|
||||
from sysconfig import _CONFIG_VARS
|
||||
except ImportError:
|
||||
from distutils.sysconfig import get_config_var
|
||||
|
||||
get_config_var("LDSHARED") # make sure _config_vars is initialized
|
||||
del get_config_var
|
||||
from distutils.sysconfig import _config_vars as _CONFIG_VARS
|
||||
from distutils import log
|
||||
from distutils.errors import DistutilsError
|
||||
|
||||
have_rtld = False
|
||||
use_stubs = False
|
||||
|
|
@ -31,11 +34,13 @@ if sys.platform == "darwin":
|
|||
elif os.name != 'nt':
|
||||
try:
|
||||
from dl import RTLD_NOW
|
||||
|
||||
have_rtld = True
|
||||
use_stubs = True
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
def if_dl(s):
|
||||
if have_rtld:
|
||||
return s
|
||||
|
|
@ -59,8 +64,9 @@ class build_ext(_build_ext):
|
|||
modpath = fullname.split('.')
|
||||
package = '.'.join(modpath[:-1])
|
||||
package_dir = build_py.get_package_dir(package)
|
||||
dest_filename = os.path.join(package_dir,os.path.basename(filename))
|
||||
src_filename = os.path.join(self.build_lib,filename)
|
||||
dest_filename = os.path.join(package_dir,
|
||||
os.path.basename(filename))
|
||||
src_filename = os.path.join(self.build_lib, filename)
|
||||
|
||||
# Always copy, even if source is older than destination, to ensure
|
||||
# that the right extensions for the current Python/platform are
|
||||
|
|
@ -72,7 +78,8 @@ class build_ext(_build_ext):
|
|||
if ext._needs_stub:
|
||||
self.write_stub(package_dir or os.curdir, ext, True)
|
||||
|
||||
if _build_ext is not _du_build_ext and not hasattr(_build_ext,'pyrex_sources'):
|
||||
if _build_ext is not _du_build_ext and not hasattr(_build_ext,
|
||||
'pyrex_sources'):
|
||||
# Workaround for problems using some Pyrex versions w/SWIG and/or 2.4
|
||||
def swig_sources(self, sources, *otherargs):
|
||||
# first do any Pyrex processing
|
||||
|
|
@ -81,15 +88,15 @@ class build_ext(_build_ext):
|
|||
return _du_build_ext.swig_sources(self, sources, *otherargs)
|
||||
|
||||
def get_ext_filename(self, fullname):
|
||||
filename = _build_ext.get_ext_filename(self,fullname)
|
||||
filename = _build_ext.get_ext_filename(self, fullname)
|
||||
if fullname in self.ext_map:
|
||||
ext = self.ext_map[fullname]
|
||||
if isinstance(ext,Library):
|
||||
if isinstance(ext, Library):
|
||||
fn, ext = os.path.splitext(filename)
|
||||
return self.shlib_compiler.library_filename(fn,libtype)
|
||||
return self.shlib_compiler.library_filename(fn, libtype)
|
||||
elif use_stubs and ext._links_to_dynamic:
|
||||
d,fn = os.path.split(filename)
|
||||
return os.path.join(d,'dl-'+fn)
|
||||
d, fn = os.path.split(filename)
|
||||
return os.path.join(d, 'dl-' + fn)
|
||||
return filename
|
||||
|
||||
def initialize_options(self):
|
||||
|
|
@ -103,7 +110,7 @@ class build_ext(_build_ext):
|
|||
self.extensions = self.extensions or []
|
||||
self.check_extensions_list(self.extensions)
|
||||
self.shlibs = [ext for ext in self.extensions
|
||||
if isinstance(ext, Library)]
|
||||
if isinstance(ext, Library)]
|
||||
if self.shlibs:
|
||||
self.setup_shlib_compiler()
|
||||
for ext in self.extensions:
|
||||
|
|
@ -118,9 +125,10 @@ class build_ext(_build_ext):
|
|||
|
||||
ltd = ext._links_to_dynamic = \
|
||||
self.shlibs and self.links_to_dynamic(ext) or False
|
||||
ext._needs_stub = ltd and use_stubs and not isinstance(ext,Library)
|
||||
ext._needs_stub = ltd and use_stubs and not isinstance(ext,
|
||||
Library)
|
||||
filename = ext._file_name = self.get_ext_filename(fullname)
|
||||
libdir = os.path.dirname(os.path.join(self.build_lib,filename))
|
||||
libdir = os.path.dirname(os.path.join(self.build_lib, filename))
|
||||
if ltd and libdir not in ext.library_dirs:
|
||||
ext.library_dirs.append(libdir)
|
||||
if ltd and use_stubs and os.curdir not in ext.runtime_library_dirs:
|
||||
|
|
@ -134,7 +142,8 @@ class build_ext(_build_ext):
|
|||
tmp = _CONFIG_VARS.copy()
|
||||
try:
|
||||
# XXX Help! I don't have any idea whether these are right...
|
||||
_CONFIG_VARS['LDSHARED'] = "gcc -Wl,-x -dynamiclib -undefined dynamic_lookup"
|
||||
_CONFIG_VARS['LDSHARED'] = (
|
||||
"gcc -Wl,-x -dynamiclib -undefined dynamic_lookup")
|
||||
_CONFIG_VARS['CCSHARED'] = " -dynamiclib"
|
||||
_CONFIG_VARS['SO'] = ".dylib"
|
||||
customize_compiler(compiler)
|
||||
|
|
@ -148,7 +157,7 @@ class build_ext(_build_ext):
|
|||
compiler.set_include_dirs(self.include_dirs)
|
||||
if self.define is not None:
|
||||
# 'define' option is a list of (name,value) tuples
|
||||
for (name,value) in self.define:
|
||||
for (name, value) in self.define:
|
||||
compiler.define_macro(name, value)
|
||||
if self.undef is not None:
|
||||
for macro in self.undef:
|
||||
|
|
@ -166,16 +175,16 @@ class build_ext(_build_ext):
|
|||
compiler.link_shared_object = link_shared_object.__get__(compiler)
|
||||
|
||||
def get_export_symbols(self, ext):
|
||||
if isinstance(ext,Library):
|
||||
if isinstance(ext, Library):
|
||||
return ext.export_symbols
|
||||
return _build_ext.get_export_symbols(self,ext)
|
||||
return _build_ext.get_export_symbols(self, ext)
|
||||
|
||||
def build_extension(self, ext):
|
||||
_compiler = self.compiler
|
||||
try:
|
||||
if isinstance(ext,Library):
|
||||
if isinstance(ext, Library):
|
||||
self.compiler = self.shlib_compiler
|
||||
_build_ext.build_extension(self,ext)
|
||||
_build_ext.build_extension(self, ext)
|
||||
if ext._needs_stub:
|
||||
self.write_stub(
|
||||
self.get_finalized_command('build_py').build_lib, ext
|
||||
|
|
@ -189,9 +198,10 @@ class build_ext(_build_ext):
|
|||
# XXX as dynamic, and not just using a locally-found version or a
|
||||
# XXX static-compiled version
|
||||
libnames = dict.fromkeys([lib._full_name for lib in self.shlibs])
|
||||
pkg = '.'.join(ext._full_name.split('.')[:-1]+[''])
|
||||
pkg = '.'.join(ext._full_name.split('.')[:-1] + [''])
|
||||
for libname in ext.libraries:
|
||||
if pkg+libname in libnames: return True
|
||||
if pkg + libname in libnames:
|
||||
return True
|
||||
return False
|
||||
|
||||
def get_outputs(self):
|
||||
|
|
@ -200,26 +210,29 @@ class build_ext(_build_ext):
|
|||
for ext in self.extensions:
|
||||
if ext._needs_stub:
|
||||
base = os.path.join(self.build_lib, *ext._full_name.split('.'))
|
||||
outputs.append(base+'.py')
|
||||
outputs.append(base+'.pyc')
|
||||
outputs.append(base + '.py')
|
||||
outputs.append(base + '.pyc')
|
||||
if optimize:
|
||||
outputs.append(base+'.pyo')
|
||||
outputs.append(base + '.pyo')
|
||||
return outputs
|
||||
|
||||
def write_stub(self, output_dir, ext, compile=False):
|
||||
log.info("writing stub loader for %s to %s",ext._full_name, output_dir)
|
||||
stub_file = os.path.join(output_dir, *ext._full_name.split('.'))+'.py'
|
||||
log.info("writing stub loader for %s to %s", ext._full_name,
|
||||
output_dir)
|
||||
stub_file = (os.path.join(output_dir, *ext._full_name.split('.')) +
|
||||
'.py')
|
||||
if compile and os.path.exists(stub_file):
|
||||
raise DistutilsError(stub_file+" already exists! Please delete.")
|
||||
raise DistutilsError(stub_file + " already exists! Please delete.")
|
||||
if not self.dry_run:
|
||||
f = open(stub_file,'w')
|
||||
f = open(stub_file, 'w')
|
||||
f.write(
|
||||
'\n'.join([
|
||||
"def __bootstrap__():",
|
||||
" global __bootstrap__, __file__, __loader__",
|
||||
" import sys, os, pkg_resources, imp"+if_dl(", dl"),
|
||||
" __file__ = pkg_resources.resource_filename(__name__,%r)"
|
||||
% os.path.basename(ext._file_name),
|
||||
" import sys, os, pkg_resources, imp" + if_dl(", dl"),
|
||||
" __file__ = pkg_resources.resource_filename"
|
||||
"(__name__,%r)"
|
||||
% os.path.basename(ext._file_name),
|
||||
" del __bootstrap__",
|
||||
" if '__loader__' in globals():",
|
||||
" del __loader__",
|
||||
|
|
@ -233,12 +246,13 @@ class build_ext(_build_ext):
|
|||
if_dl(" sys.setdlopenflags(old_flags)"),
|
||||
" os.chdir(old_dir)",
|
||||
"__bootstrap__()",
|
||||
"" # terminal \n
|
||||
"" # terminal \n
|
||||
])
|
||||
)
|
||||
f.close()
|
||||
if compile:
|
||||
from distutils.util import byte_compile
|
||||
|
||||
byte_compile([stub_file], optimize=0,
|
||||
force=True, dry_run=self.dry_run)
|
||||
optimize = self.get_finalized_command('install_lib').optimize
|
||||
|
|
@ -249,13 +263,14 @@ class build_ext(_build_ext):
|
|||
os.unlink(stub_file)
|
||||
|
||||
|
||||
if use_stubs or os.name=='nt':
|
||||
if use_stubs or os.name == 'nt':
|
||||
# Build shared libraries
|
||||
#
|
||||
def link_shared_object(self, objects, output_libname, output_dir=None,
|
||||
libraries=None, library_dirs=None, runtime_library_dirs=None,
|
||||
export_symbols=None, debug=0, extra_preargs=None,
|
||||
extra_postargs=None, build_temp=None, target_lang=None):
|
||||
def link_shared_object(
|
||||
self, objects, output_libname, output_dir=None, libraries=None,
|
||||
library_dirs=None, runtime_library_dirs=None, export_symbols=None,
|
||||
debug=0, extra_preargs=None, extra_postargs=None, build_temp=None,
|
||||
target_lang=None):
|
||||
self.link(
|
||||
self.SHARED_LIBRARY, objects, output_libname,
|
||||
output_dir, libraries, library_dirs, runtime_library_dirs,
|
||||
|
|
@ -266,18 +281,19 @@ else:
|
|||
# Build static libraries everywhere else
|
||||
libtype = 'static'
|
||||
|
||||
def link_shared_object(self, objects, output_libname, output_dir=None,
|
||||
libraries=None, library_dirs=None, runtime_library_dirs=None,
|
||||
export_symbols=None, debug=0, extra_preargs=None,
|
||||
extra_postargs=None, build_temp=None, target_lang=None):
|
||||
def link_shared_object(
|
||||
self, objects, output_libname, output_dir=None, libraries=None,
|
||||
library_dirs=None, runtime_library_dirs=None, export_symbols=None,
|
||||
debug=0, extra_preargs=None, extra_postargs=None, build_temp=None,
|
||||
target_lang=None):
|
||||
# XXX we need to either disallow these attrs on Library instances,
|
||||
# or warn/abort here if set, or something...
|
||||
#libraries=None, library_dirs=None, runtime_library_dirs=None,
|
||||
#export_symbols=None, extra_preargs=None, extra_postargs=None,
|
||||
#build_temp=None
|
||||
# or warn/abort here if set, or something...
|
||||
# libraries=None, library_dirs=None, runtime_library_dirs=None,
|
||||
# export_symbols=None, extra_preargs=None, extra_postargs=None,
|
||||
# build_temp=None
|
||||
|
||||
assert output_dir is None # distutils build_ext doesn't pass this
|
||||
output_dir,filename = os.path.split(output_libname)
|
||||
assert output_dir is None # distutils build_ext doesn't pass this
|
||||
output_dir, filename = os.path.split(output_libname)
|
||||
basename, ext = os.path.splitext(filename)
|
||||
if self.library_filename("x").startswith('lib'):
|
||||
# strip 'lib' prefix; this is kludgy if some platform uses
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
from glob import glob
|
||||
from distutils.util import convert_path
|
||||
import distutils.command.build_py as orig
|
||||
import os
|
||||
import sys
|
||||
import fnmatch
|
||||
import textwrap
|
||||
import distutils.command.build_py as orig
|
||||
from distutils.util import convert_path
|
||||
from glob import glob
|
||||
|
||||
try:
|
||||
from setuptools.lib2to3_ex import Mixin2to3
|
||||
|
|
@ -13,6 +13,7 @@ except ImportError:
|
|||
def run_2to3(self, files, doctests=True):
|
||||
"do nothing"
|
||||
|
||||
|
||||
class build_py(orig.build_py, Mixin2to3):
|
||||
"""Enhanced 'build_py' command that includes data files with packages
|
||||
|
||||
|
|
@ -22,11 +23,14 @@ class build_py(orig.build_py, Mixin2to3):
|
|||
Also, this version of the 'build_py' command allows you to specify both
|
||||
'py_modules' and 'packages' in the same setup operation.
|
||||
"""
|
||||
|
||||
def finalize_options(self):
|
||||
orig.build_py.finalize_options(self)
|
||||
self.package_data = self.distribution.package_data
|
||||
self.exclude_package_data = self.distribution.exclude_package_data or {}
|
||||
if 'data_files' in self.__dict__: del self.__dict__['data_files']
|
||||
self.exclude_package_data = (self.distribution.exclude_package_data or
|
||||
{})
|
||||
if 'data_files' in self.__dict__:
|
||||
del self.__dict__['data_files']
|
||||
self.__updated_files = []
|
||||
self.__doctests_2to3 = []
|
||||
|
||||
|
|
@ -51,13 +55,14 @@ class build_py(orig.build_py, Mixin2to3):
|
|||
self.byte_compile(orig.build_py.get_outputs(self, include_bytecode=0))
|
||||
|
||||
def __getattr__(self, attr):
|
||||
if attr=='data_files': # lazily compute data files
|
||||
if attr == 'data_files': # lazily compute data files
|
||||
self.data_files = files = self._get_data_files()
|
||||
return files
|
||||
return orig.build_py.__getattr__(self,attr)
|
||||
return orig.build_py.__getattr__(self, attr)
|
||||
|
||||
def build_module(self, module, module_file, package):
|
||||
outfile, copied = orig.build_py.build_module(self, module, module_file, package)
|
||||
outfile, copied = orig.build_py.build_module(self, module, module_file,
|
||||
package)
|
||||
if copied:
|
||||
self.__updated_files.append(outfile)
|
||||
return outfile, copied
|
||||
|
|
@ -74,12 +79,12 @@ class build_py(orig.build_py, Mixin2to3):
|
|||
build_dir = os.path.join(*([self.build_lib] + package.split('.')))
|
||||
|
||||
# Length of path to strip from found files
|
||||
plen = len(src_dir)+1
|
||||
plen = len(src_dir) + 1
|
||||
|
||||
# Strip directory from globbed filenames
|
||||
filenames = [
|
||||
file[plen:] for file in self.find_data_files(package, src_dir)
|
||||
]
|
||||
]
|
||||
data.append((package, src_dir, build_dir, filenames))
|
||||
return data
|
||||
|
||||
|
|
@ -102,7 +107,8 @@ class build_py(orig.build_py, Mixin2to3):
|
|||
srcfile = os.path.join(src_dir, filename)
|
||||
outf, copied = self.copy_file(srcfile, target)
|
||||
srcfile = os.path.abspath(srcfile)
|
||||
if copied and srcfile in self.distribution.convert_2to3_doctests:
|
||||
if (copied and
|
||||
srcfile in self.distribution.convert_2to3_doctests):
|
||||
self.__doctests_2to3.append(outf)
|
||||
|
||||
def analyze_manifest(self):
|
||||
|
|
@ -117,21 +123,22 @@ class build_py(orig.build_py, Mixin2to3):
|
|||
self.run_command('egg_info')
|
||||
ei_cmd = self.get_finalized_command('egg_info')
|
||||
for path in ei_cmd.filelist.files:
|
||||
d,f = os.path.split(assert_relative(path))
|
||||
d, f = os.path.split(assert_relative(path))
|
||||
prev = None
|
||||
oldf = f
|
||||
while d and d!=prev and d not in src_dirs:
|
||||
while d and d != prev and d not in src_dirs:
|
||||
prev = d
|
||||
d, df = os.path.split(d)
|
||||
f = os.path.join(df, f)
|
||||
if d in src_dirs:
|
||||
if path.endswith('.py') and f==oldf:
|
||||
continue # it's a module, not data
|
||||
mf.setdefault(src_dirs[d],[]).append(path)
|
||||
if path.endswith('.py') and f == oldf:
|
||||
continue # it's a module, not data
|
||||
mf.setdefault(src_dirs[d], []).append(path)
|
||||
|
||||
def get_data_files(self): pass # kludge 2.4 for lazy computation
|
||||
def get_data_files(self):
|
||||
pass # kludge 2.4 for lazy computation
|
||||
|
||||
if sys.version<"2.4": # Python 2.4 already has this code
|
||||
if sys.version < "2.4": # Python 2.4 already has this code
|
||||
def get_outputs(self, include_bytecode=1):
|
||||
"""Return complete list of files copied to the build directory
|
||||
|
||||
|
|
@ -142,9 +149,9 @@ class build_py(orig.build_py, Mixin2to3):
|
|||
"""
|
||||
return orig.build_py.get_outputs(self, include_bytecode) + [
|
||||
os.path.join(build_dir, filename)
|
||||
for package, src_dir, build_dir,filenames in self.data_files
|
||||
for package, src_dir, build_dir, filenames in self.data_files
|
||||
for filename in filenames
|
||||
]
|
||||
]
|
||||
|
||||
def check_package(self, package, package_dir):
|
||||
"""Check namespace packages' __init__ for declare_namespace"""
|
||||
|
|
@ -160,25 +167,26 @@ class build_py(orig.build_py, Mixin2to3):
|
|||
return init_py
|
||||
|
||||
for pkg in self.distribution.namespace_packages:
|
||||
if pkg==package or pkg.startswith(package+'.'):
|
||||
if pkg == package or pkg.startswith(package + '.'):
|
||||
break
|
||||
else:
|
||||
return init_py
|
||||
|
||||
f = open(init_py,'rbU')
|
||||
f = open(init_py, 'rbU')
|
||||
if 'declare_namespace'.encode() not in f.read():
|
||||
from distutils.errors import DistutilsError
|
||||
|
||||
raise DistutilsError(
|
||||
"Namespace package problem: %s is a namespace package, but its\n"
|
||||
"__init__.py does not call declare_namespace()! Please fix it.\n"
|
||||
'(See the setuptools manual under "Namespace Packages" for '
|
||||
"details.)\n" % (package,)
|
||||
"Namespace package problem: %s is a namespace package, but "
|
||||
"its\n__init__.py does not call declare_namespace()! Please "
|
||||
'fix it.\n(See the setuptools manual under '
|
||||
'"Namespace Packages" for details.)\n"' % (package,)
|
||||
)
|
||||
f.close()
|
||||
return init_py
|
||||
|
||||
def initialize_options(self):
|
||||
self.packages_checked={}
|
||||
self.packages_checked = {}
|
||||
orig.build_py.initialize_options(self)
|
||||
|
||||
def get_package_dir(self, package):
|
||||
|
|
@ -202,7 +210,7 @@ class build_py(orig.build_py, Mixin2to3):
|
|||
seen = {}
|
||||
return [
|
||||
f for f in files if f not in bad
|
||||
and f not in seen and seen.setdefault(f,1) # ditch dupes
|
||||
and f not in seen and seen.setdefault(f, 1) # ditch dupes
|
||||
]
|
||||
|
||||
|
||||
|
|
@ -210,6 +218,7 @@ def assert_relative(path):
|
|||
if not os.path.isabs(path):
|
||||
return path
|
||||
from distutils.errors import DistutilsSetupError
|
||||
|
||||
msg = textwrap.dedent("""
|
||||
Error: setup script specifies an absolute path:
|
||||
|
||||
|
|
|
|||
|
|
@ -1,9 +1,14 @@
|
|||
from setuptools.command.easy_install import easy_install
|
||||
from distutils.util import convert_path, subst_vars
|
||||
from pkg_resources import Distribution, PathMetadata, normalize_path
|
||||
from distutils.util import convert_path
|
||||
from distutils import log
|
||||
from distutils.errors import DistutilsError, DistutilsOptionError
|
||||
import os, sys, setuptools, glob
|
||||
import os
|
||||
import glob
|
||||
|
||||
from pkg_resources import Distribution, PathMetadata, normalize_path
|
||||
from setuptools.command.easy_install import easy_install
|
||||
from setuptools.compat import PY3
|
||||
import setuptools
|
||||
|
||||
|
||||
class develop(easy_install):
|
||||
"""Set up package for development"""
|
||||
|
|
@ -32,59 +37,56 @@ class develop(easy_install):
|
|||
self.egg_path = None
|
||||
easy_install.initialize_options(self)
|
||||
self.setup_path = None
|
||||
self.always_copy_from = '.' # always copy eggs installed in curdir
|
||||
|
||||
|
||||
self.always_copy_from = '.' # always copy eggs installed in curdir
|
||||
|
||||
def finalize_options(self):
|
||||
ei = self.get_finalized_command("egg_info")
|
||||
if ei.broken_egg_info:
|
||||
raise DistutilsError(
|
||||
"Please rename %r to %r before using 'develop'"
|
||||
% (ei.egg_info, ei.broken_egg_info)
|
||||
)
|
||||
template = "Please rename %r to %r before using 'develop'"
|
||||
args = ei.egg_info, ei.broken_egg_info
|
||||
raise DistutilsError(template % args)
|
||||
self.args = [ei.egg_name]
|
||||
|
||||
|
||||
|
||||
|
||||
easy_install.finalize_options(self)
|
||||
self.expand_basedirs()
|
||||
self.expand_dirs()
|
||||
# pick up setup-dir .egg files only: no .egg-info
|
||||
self.package_index.scan(glob.glob('*.egg'))
|
||||
|
||||
self.egg_link = os.path.join(self.install_dir, ei.egg_name+'.egg-link')
|
||||
self.egg_link = os.path.join(self.install_dir, ei.egg_name +
|
||||
'.egg-link')
|
||||
self.egg_base = ei.egg_base
|
||||
if self.egg_path is None:
|
||||
self.egg_path = os.path.abspath(ei.egg_base)
|
||||
|
||||
target = normalize_path(self.egg_base)
|
||||
if normalize_path(os.path.join(self.install_dir, self.egg_path)) != target:
|
||||
egg_path = normalize_path(os.path.join(self.install_dir,
|
||||
self.egg_path))
|
||||
if egg_path != target:
|
||||
raise DistutilsOptionError(
|
||||
"--egg-path must be a relative path from the install"
|
||||
" directory to "+target
|
||||
)
|
||||
" directory to " + target
|
||||
)
|
||||
|
||||
# Make a distribution for the package's source
|
||||
self.dist = Distribution(
|
||||
target,
|
||||
PathMetadata(target, os.path.abspath(ei.egg_info)),
|
||||
project_name = ei.egg_name
|
||||
project_name=ei.egg_name
|
||||
)
|
||||
|
||||
p = self.egg_base.replace(os.sep,'/')
|
||||
if p!= os.curdir:
|
||||
p = '../' * (p.count('/')+1)
|
||||
p = self.egg_base.replace(os.sep, '/')
|
||||
if p != os.curdir:
|
||||
p = '../' * (p.count('/') + 1)
|
||||
self.setup_path = p
|
||||
p = normalize_path(os.path.join(self.install_dir, self.egg_path, p))
|
||||
if p != normalize_path(os.curdir):
|
||||
if p != normalize_path(os.curdir):
|
||||
raise DistutilsOptionError(
|
||||
"Can't get a consistent path to setup script from"
|
||||
" installation directory", p, normalize_path(os.curdir))
|
||||
|
||||
def install_for_development(self):
|
||||
if sys.version_info >= (3,) and getattr(self.distribution, 'use_2to3', False):
|
||||
if PY3 and getattr(self.distribution, 'use_2to3', False):
|
||||
# If we run 2to3 we can not do this inplace:
|
||||
|
||||
# Ensure metadata is up-to-date
|
||||
|
|
@ -99,12 +101,13 @@ class develop(easy_install):
|
|||
|
||||
self.reinitialize_command('build_ext', inplace=0)
|
||||
self.run_command('build_ext')
|
||||
|
||||
|
||||
# Fixup egg-link and easy-install.pth
|
||||
ei_cmd = self.get_finalized_command("egg_info")
|
||||
self.egg_path = build_path
|
||||
self.dist.location = build_path
|
||||
self.dist._provider = PathMetadata(build_path, ei_cmd.egg_info) # XXX
|
||||
# XXX
|
||||
self.dist._provider = PathMetadata(build_path, ei_cmd.egg_info)
|
||||
else:
|
||||
# Without 2to3 inplace works fine:
|
||||
self.run_command('egg_info')
|
||||
|
|
@ -112,7 +115,7 @@ class develop(easy_install):
|
|||
# Build extensions in-place
|
||||
self.reinitialize_command('build_ext', inplace=1)
|
||||
self.run_command('build_ext')
|
||||
|
||||
|
||||
self.install_site_py() # ensure that target dir is site-safe
|
||||
if setuptools.bootstrap_install_from:
|
||||
self.easy_install(setuptools.bootstrap_install_from)
|
||||
|
|
@ -121,21 +124,21 @@ class develop(easy_install):
|
|||
# create an .egg-link in the installation dir, pointing to our egg
|
||||
log.info("Creating %s (link to %s)", self.egg_link, self.egg_base)
|
||||
if not self.dry_run:
|
||||
f = open(self.egg_link,"w")
|
||||
f = open(self.egg_link, "w")
|
||||
f.write(self.egg_path + "\n" + self.setup_path)
|
||||
f.close()
|
||||
# postprocess the installed distro, fixing up .pth, installing scripts,
|
||||
# and handling requirements
|
||||
self.process_distribution(None, self.dist, not self.no_deps)
|
||||
|
||||
|
||||
def uninstall_link(self):
|
||||
if os.path.exists(self.egg_link):
|
||||
log.info("Removing %s (link to %s)", self.egg_link, self.egg_base)
|
||||
egg_link_file = open(self.egg_link)
|
||||
contents = [line.rstrip() for line in egg_link_file]
|
||||
egg_link_file.close()
|
||||
if contents not in ([self.egg_path], [self.egg_path, self.setup_path]):
|
||||
if contents not in ([self.egg_path],
|
||||
[self.egg_path, self.setup_path]):
|
||||
log.warn("Link points to %s: uninstall aborted", contents)
|
||||
return
|
||||
if not self.dry_run:
|
||||
|
|
@ -149,7 +152,7 @@ class develop(easy_install):
|
|||
def install_egg_scripts(self, dist):
|
||||
if dist is not self.dist:
|
||||
# Installing a dependency, so fall back to normal behavior
|
||||
return easy_install.install_egg_scripts(self,dist)
|
||||
return easy_install.install_egg_scripts(self, dist)
|
||||
|
||||
# create wrapper scripts in the script dir, pointing to dist.scripts
|
||||
|
||||
|
|
@ -160,8 +163,7 @@ class develop(easy_install):
|
|||
for script_name in self.distribution.scripts or []:
|
||||
script_path = os.path.abspath(convert_path(script_name))
|
||||
script_name = os.path.basename(script_path)
|
||||
f = open(script_path,'rU')
|
||||
f = open(script_path, 'rU')
|
||||
script_text = f.read()
|
||||
f.close()
|
||||
self.install_script(dist, script_name, script_text, script_path)
|
||||
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -2,21 +2,23 @@
|
|||
|
||||
Create a distribution's .egg-info directory and contents"""
|
||||
|
||||
from distutils.filelist import FileList as _FileList
|
||||
from distutils.util import convert_path
|
||||
from distutils import log
|
||||
import distutils.errors
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
from setuptools import Command
|
||||
import distutils.errors
|
||||
from distutils import log
|
||||
from setuptools.command.sdist import sdist
|
||||
from setuptools.compat import basestring
|
||||
from setuptools.compat import basestring, PY3, StringIO
|
||||
from setuptools import svn_utils
|
||||
from distutils.util import convert_path
|
||||
from distutils.filelist import FileList as _FileList
|
||||
from pkg_resources import (parse_requirements, safe_name, parse_version,
|
||||
safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename)
|
||||
from setuptools.command.sdist import walk_revctrl
|
||||
from pkg_resources import (
|
||||
parse_requirements, safe_name, parse_version,
|
||||
safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename)
|
||||
import setuptools.unicode_utils as unicode_utils
|
||||
|
||||
|
||||
class egg_info(Command):
|
||||
|
|
@ -26,11 +28,11 @@ class egg_info(Command):
|
|||
('egg-base=', 'e', "directory containing .egg-info directories"
|
||||
" (default: top of the source tree)"),
|
||||
('tag-svn-revision', 'r',
|
||||
"Add subversion revision ID to version number"),
|
||||
"Add subversion revision ID to version number"),
|
||||
('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"),
|
||||
('tag-build=', 'b', "Specify explicit tag to add to version number"),
|
||||
('no-svn-revision', 'R',
|
||||
"Don't add subversion revision ID [default]"),
|
||||
"Don't add subversion revision ID [default]"),
|
||||
('no-date', 'D', "Don't include date stamp [default]"),
|
||||
]
|
||||
|
||||
|
|
@ -51,6 +53,7 @@ class egg_info(Command):
|
|||
|
||||
def save_version_info(self, filename):
|
||||
from setuptools.command.setopt import edit_config
|
||||
|
||||
values = dict(
|
||||
egg_info=dict(
|
||||
tag_svn_revision=0,
|
||||
|
|
@ -67,23 +70,25 @@ class egg_info(Command):
|
|||
|
||||
try:
|
||||
list(
|
||||
parse_requirements('%s==%s' % (self.egg_name,self.egg_version))
|
||||
parse_requirements('%s==%s' % (self.egg_name,
|
||||
self.egg_version))
|
||||
)
|
||||
except ValueError:
|
||||
raise distutils.errors.DistutilsOptionError(
|
||||
"Invalid distribution name or version syntax: %s-%s" %
|
||||
(self.egg_name,self.egg_version)
|
||||
(self.egg_name, self.egg_version)
|
||||
)
|
||||
|
||||
if self.egg_base is None:
|
||||
dirs = self.distribution.package_dir
|
||||
self.egg_base = (dirs or {}).get('',os.curdir)
|
||||
self.egg_base = (dirs or {}).get('', os.curdir)
|
||||
|
||||
self.ensure_dirname('egg_base')
|
||||
self.egg_info = to_filename(self.egg_name)+'.egg-info'
|
||||
self.egg_info = to_filename(self.egg_name) + '.egg-info'
|
||||
if self.egg_base != os.curdir:
|
||||
self.egg_info = os.path.join(self.egg_base, self.egg_info)
|
||||
if '-' in self.egg_name: self.check_broken_egg_info()
|
||||
if '-' in self.egg_name:
|
||||
self.check_broken_egg_info()
|
||||
|
||||
# Set package version for the benefit of dumber commands
|
||||
# (e.g. sdist, bdist_wininst, etc.)
|
||||
|
|
@ -95,7 +100,7 @@ class egg_info(Command):
|
|||
# to the version info
|
||||
#
|
||||
pd = self.distribution._patched_dist
|
||||
if pd is not None and pd.key==self.egg_name.lower():
|
||||
if pd is not None and pd.key == self.egg_name.lower():
|
||||
pd._version = self.egg_version
|
||||
pd._parsed_version = parse_version(self.egg_version)
|
||||
self.distribution._patched_dist = None
|
||||
|
|
@ -127,7 +132,7 @@ class egg_info(Command):
|
|||
to the file.
|
||||
"""
|
||||
log.info("writing %s to %s", what, filename)
|
||||
if sys.version_info >= (3,):
|
||||
if PY3:
|
||||
data = data.encode("utf-8")
|
||||
if not self.dry_run:
|
||||
f = open(filename, 'wb')
|
||||
|
|
@ -153,7 +158,7 @@ class egg_info(Command):
|
|||
installer = self.distribution.fetch_build_egg
|
||||
for ep in iter_entry_points('egg_info.writers'):
|
||||
writer = ep.load(installer=installer)
|
||||
writer(self, ep.name, os.path.join(self.egg_info,ep.name))
|
||||
writer(self, ep.name, os.path.join(self.egg_info, ep.name))
|
||||
|
||||
# Get rid of native_libs.txt if it was put there by older bdist_egg
|
||||
nl = os.path.join(self.egg_info, "native_libs.txt")
|
||||
|
|
@ -165,12 +170,14 @@ class egg_info(Command):
|
|||
def tags(self):
|
||||
version = ''
|
||||
if self.tag_build:
|
||||
version+=self.tag_build
|
||||
if self.tag_svn_revision and (
|
||||
os.path.exists('.svn') or os.path.exists('PKG-INFO')
|
||||
): version += '-r%s' % self.get_svn_revision()
|
||||
version += self.tag_build
|
||||
if self.tag_svn_revision:
|
||||
rev = self.get_svn_revision()
|
||||
if rev: # is 0 if it's not an svn working copy
|
||||
version += '-r%s' % rev
|
||||
if self.tag_date:
|
||||
import time
|
||||
|
||||
version += time.strftime("-%Y%m%d")
|
||||
return version
|
||||
|
||||
|
|
@ -180,53 +187,77 @@ class egg_info(Command):
|
|||
|
||||
def find_sources(self):
|
||||
"""Generate SOURCES.txt manifest file"""
|
||||
manifest_filename = os.path.join(self.egg_info,"SOURCES.txt")
|
||||
manifest_filename = os.path.join(self.egg_info, "SOURCES.txt")
|
||||
mm = manifest_maker(self.distribution)
|
||||
mm.manifest = manifest_filename
|
||||
mm.run()
|
||||
self.filelist = mm.filelist
|
||||
|
||||
def check_broken_egg_info(self):
|
||||
bei = self.egg_name+'.egg-info'
|
||||
bei = self.egg_name + '.egg-info'
|
||||
if self.egg_base != os.curdir:
|
||||
bei = os.path.join(self.egg_base, bei)
|
||||
if os.path.exists(bei):
|
||||
log.warn(
|
||||
"-"*78+'\n'
|
||||
"-" * 78 + '\n'
|
||||
"Note: Your current .egg-info directory has a '-' in its name;"
|
||||
'\nthis will not work correctly with "setup.py develop".\n\n'
|
||||
'Please rename %s to %s to correct this problem.\n'+'-'*78,
|
||||
'Please rename %s to %s to correct this problem.\n' + '-' * 78,
|
||||
bei, self.egg_info
|
||||
)
|
||||
self.broken_egg_info = self.egg_info
|
||||
self.egg_info = bei # make it work for now
|
||||
self.egg_info = bei # make it work for now
|
||||
|
||||
|
||||
class FileList(_FileList):
|
||||
"""File list that accepts only existing, platform-independent paths"""
|
||||
|
||||
def append(self, item):
|
||||
if item.endswith('\r'): # Fix older sdists built on Windows
|
||||
if item.endswith('\r'): # Fix older sdists built on Windows
|
||||
item = item[:-1]
|
||||
path = convert_path(item)
|
||||
|
||||
if sys.version_info >= (3,):
|
||||
try:
|
||||
if os.path.exists(path) or os.path.exists(path.encode('utf-8')):
|
||||
self.files.append(path)
|
||||
except UnicodeEncodeError:
|
||||
# Accept UTF-8 filenames even if LANG=C
|
||||
if os.path.exists(path.encode('utf-8')):
|
||||
self.files.append(path)
|
||||
else:
|
||||
log.warn("'%s' not %s encodable -- skipping", path,
|
||||
sys.getfilesystemencoding())
|
||||
else:
|
||||
if os.path.exists(path):
|
||||
self.files.append(path)
|
||||
if self._safe_path(path):
|
||||
self.files.append(path)
|
||||
|
||||
def extend(self, paths):
|
||||
self.files.extend(filter(self._safe_path, paths))
|
||||
|
||||
def _repair(self):
|
||||
"""
|
||||
Replace self.files with only safe paths
|
||||
|
||||
Because some owners of FileList manipulate the underlying
|
||||
``files`` attribute directly, this method must be called to
|
||||
repair those paths.
|
||||
"""
|
||||
self.files = list(filter(self._safe_path, self.files))
|
||||
|
||||
def _safe_path(self, path):
|
||||
enc_warn = "'%s' not %s encodable -- skipping"
|
||||
|
||||
# To avoid accidental trans-codings errors, first to unicode
|
||||
u_path = unicode_utils.filesys_decode(path)
|
||||
if u_path is None:
|
||||
log.warn("'%s' in unexpected encoding -- skipping" % path)
|
||||
return False
|
||||
|
||||
# Must ensure utf-8 encodability
|
||||
utf8_path = unicode_utils.try_encode(u_path, "utf-8")
|
||||
if utf8_path is None:
|
||||
log.warn(enc_warn, path, 'utf-8')
|
||||
return False
|
||||
|
||||
try:
|
||||
# accept is either way checks out
|
||||
if os.path.exists(u_path) or os.path.exists(utf8_path):
|
||||
return True
|
||||
# this will catch any encode errors decoding u_path
|
||||
except UnicodeEncodeError:
|
||||
log.warn(enc_warn, path, sys.getfilesystemencoding())
|
||||
|
||||
|
||||
class manifest_maker(sdist):
|
||||
|
||||
template = "MANIFEST.in"
|
||||
|
||||
def initialize_options(self):
|
||||
|
|
@ -241,7 +272,7 @@ class manifest_maker(sdist):
|
|||
def run(self):
|
||||
self.filelist = FileList()
|
||||
if not os.path.exists(self.manifest):
|
||||
self.write_manifest() # it must exist so it'll get in the list
|
||||
self.write_manifest() # it must exist so it'll get in the list
|
||||
self.filelist.findall()
|
||||
self.add_defaults()
|
||||
if os.path.exists(self.template):
|
||||
|
|
@ -251,30 +282,23 @@ class manifest_maker(sdist):
|
|||
self.filelist.remove_duplicates()
|
||||
self.write_manifest()
|
||||
|
||||
def _manifest_normalize(self, path):
|
||||
path = unicode_utils.filesys_decode(path)
|
||||
return path.replace(os.sep, '/')
|
||||
|
||||
def write_manifest(self):
|
||||
"""Write the file list in 'self.filelist' (presumably as filled in
|
||||
by 'add_defaults()' and 'read_template()') to the manifest file
|
||||
"""
|
||||
Write the file list in 'self.filelist' to the manifest file
|
||||
named by 'self.manifest'.
|
||||
"""
|
||||
# The manifest must be UTF-8 encodable. See #303.
|
||||
if sys.version_info >= (3,):
|
||||
files = []
|
||||
for file in self.filelist.files:
|
||||
try:
|
||||
file.encode("utf-8")
|
||||
except UnicodeEncodeError:
|
||||
log.warn("'%s' not UTF-8 encodable -- skipping" % file)
|
||||
else:
|
||||
files.append(file)
|
||||
self.filelist.files = files
|
||||
self.filelist._repair()
|
||||
|
||||
files = self.filelist.files
|
||||
if os.sep!='/':
|
||||
files = [f.replace(os.sep,'/') for f in files]
|
||||
self.execute(write_file, (self.manifest, files),
|
||||
"writing manifest file '%s'" % self.manifest)
|
||||
# Now _repairs should encodability, but not unicode
|
||||
files = [self._manifest_normalize(f) for f in self.filelist.files]
|
||||
msg = "writing manifest file '%s'" % self.manifest
|
||||
self.execute(write_file, (self.manifest, files), msg)
|
||||
|
||||
def warn(self, msg): # suppress missing-file warnings from sdist
|
||||
def warn(self, msg): # suppress missing-file warnings from sdist
|
||||
if not msg.startswith("standard file not found:"):
|
||||
sdist.warn(self, msg)
|
||||
|
||||
|
|
@ -296,7 +320,8 @@ class manifest_maker(sdist):
|
|||
self.filelist.exclude_pattern(None, prefix=build.build_base)
|
||||
self.filelist.exclude_pattern(None, prefix=base_dir)
|
||||
sep = re.escape(os.sep)
|
||||
self.filelist.exclude_pattern(sep+r'(RCS|CVS|\.svn)'+sep, is_regex=1)
|
||||
self.filelist.exclude_pattern(r'(^|' + sep + r')(RCS|CVS|\.svn)' + sep,
|
||||
is_regex=1)
|
||||
|
||||
|
||||
def write_file(filename, contents):
|
||||
|
|
@ -304,11 +329,13 @@ def write_file(filename, contents):
|
|||
sequence of strings without line terminators) to it.
|
||||
"""
|
||||
contents = "\n".join(contents)
|
||||
if sys.version_info >= (3,):
|
||||
contents = contents.encode("utf-8")
|
||||
f = open(filename, "wb") # always write POSIX-style manifest
|
||||
f.write(contents)
|
||||
f.close()
|
||||
|
||||
# assuming the contents has been vetted for utf-8 encoding
|
||||
contents = contents.encode("utf-8")
|
||||
|
||||
with open(filename, "wb") as f: # always write POSIX-style manifest
|
||||
f.write(contents)
|
||||
|
||||
|
||||
def write_pkg_info(cmd, basename, filename):
|
||||
log.info("writing %s", filename)
|
||||
|
|
@ -323,10 +350,12 @@ def write_pkg_info(cmd, basename, filename):
|
|||
finally:
|
||||
metadata.name, metadata.version = oldname, oldver
|
||||
|
||||
safe = getattr(cmd.distribution,'zip_safe',None)
|
||||
safe = getattr(cmd.distribution, 'zip_safe', None)
|
||||
from setuptools.command import bdist_egg
|
||||
|
||||
bdist_egg.write_safety_flag(cmd.egg_info, safe)
|
||||
|
||||
|
||||
def warn_depends_obsolete(cmd, basename, filename):
|
||||
if os.path.exists(filename):
|
||||
log.warn(
|
||||
|
|
@ -335,55 +364,69 @@ def warn_depends_obsolete(cmd, basename, filename):
|
|||
)
|
||||
|
||||
|
||||
def _write_requirements(stream, reqs):
|
||||
lines = yield_lines(reqs or ())
|
||||
append_cr = lambda line: line + '\n'
|
||||
lines = map(append_cr, lines)
|
||||
stream.writelines(lines)
|
||||
|
||||
|
||||
def write_requirements(cmd, basename, filename):
|
||||
dist = cmd.distribution
|
||||
data = ['\n'.join(yield_lines(dist.install_requires or ()))]
|
||||
for extra,reqs in (dist.extras_require or {}).items():
|
||||
data.append('\n\n[%s]\n%s' % (extra, '\n'.join(yield_lines(reqs))))
|
||||
cmd.write_or_delete_file("requirements", filename, ''.join(data))
|
||||
data = StringIO()
|
||||
_write_requirements(data, dist.install_requires)
|
||||
extras_require = dist.extras_require or {}
|
||||
for extra in sorted(extras_require):
|
||||
data.write('\n[{extra}]\n'.format(**vars()))
|
||||
_write_requirements(data, extras_require[extra])
|
||||
cmd.write_or_delete_file("requirements", filename, data.getvalue())
|
||||
|
||||
|
||||
def write_toplevel_names(cmd, basename, filename):
|
||||
pkgs = dict.fromkeys(
|
||||
[
|
||||
k.split('.',1)[0]
|
||||
k.split('.', 1)[0]
|
||||
for k in cmd.distribution.iter_distribution_names()
|
||||
]
|
||||
)
|
||||
cmd.write_file("top-level names", filename, '\n'.join(pkgs)+'\n')
|
||||
cmd.write_file("top-level names", filename, '\n'.join(pkgs) + '\n')
|
||||
|
||||
|
||||
def overwrite_arg(cmd, basename, filename):
|
||||
write_arg(cmd, basename, filename, True)
|
||||
|
||||
|
||||
def write_arg(cmd, basename, filename, force=False):
|
||||
argname = os.path.splitext(basename)[0]
|
||||
value = getattr(cmd.distribution, argname, None)
|
||||
if value is not None:
|
||||
value = '\n'.join(value)+'\n'
|
||||
value = '\n'.join(value) + '\n'
|
||||
cmd.write_or_delete_file(argname, filename, value, force)
|
||||
|
||||
|
||||
def write_entries(cmd, basename, filename):
|
||||
ep = cmd.distribution.entry_points
|
||||
|
||||
if isinstance(ep,basestring) or ep is None:
|
||||
if isinstance(ep, basestring) or ep is None:
|
||||
data = ep
|
||||
elif ep is not None:
|
||||
data = []
|
||||
for section, contents in sorted(ep.items()):
|
||||
if not isinstance(contents,basestring):
|
||||
if not isinstance(contents, basestring):
|
||||
contents = EntryPoint.parse_group(section, contents)
|
||||
contents = '\n'.join(sorted(map(str,contents.values())))
|
||||
data.append('[%s]\n%s\n\n' % (section,contents))
|
||||
contents = '\n'.join(sorted(map(str, contents.values())))
|
||||
data.append('[%s]\n%s\n\n' % (section, contents))
|
||||
data = ''.join(data)
|
||||
|
||||
cmd.write_or_delete_file('entry points', filename, data, True)
|
||||
|
||||
|
||||
def get_pkg_info_revision():
|
||||
# See if we can get a -r### off of PKG-INFO, in case this is an sdist of
|
||||
# a subversion revision
|
||||
#
|
||||
if os.path.exists('PKG-INFO'):
|
||||
f = open('PKG-INFO','rU')
|
||||
f = open('PKG-INFO', 'rU')
|
||||
for line in f:
|
||||
match = re.match(r"Version:.*-r(\d+)\s*$", line)
|
||||
if match:
|
||||
|
|
|
|||
|
|
@ -1,22 +1,24 @@
|
|||
import setuptools
|
||||
from distutils.errors import DistutilsArgError
|
||||
import inspect
|
||||
import glob
|
||||
import warnings
|
||||
import platform
|
||||
import distutils.command.install as orig
|
||||
from distutils.errors import DistutilsArgError
|
||||
|
||||
import setuptools
|
||||
|
||||
# Prior to numpy 1.9, NumPy relies on the '_install' name, so provide it for
|
||||
# now. See https://bitbucket.org/pypa/setuptools/issue/199/
|
||||
# now. See https://bitbucket.org/pypa/setuptools/issue/199/
|
||||
_install = orig.install
|
||||
|
||||
|
||||
class install(orig.install):
|
||||
"""Use easy_install to install the package, w/dependencies"""
|
||||
|
||||
user_options = orig.install.user_options + [
|
||||
('old-and-unmanageable', None, "Try not to use this!"),
|
||||
('single-version-externally-managed', None,
|
||||
"used by system package builders to create 'flat' eggs"),
|
||||
"used by system package builders to create 'flat' eggs"),
|
||||
]
|
||||
boolean_options = orig.install.boolean_options + [
|
||||
'old-and-unmanageable', 'single-version-externally-managed',
|
||||
|
|
@ -115,7 +117,9 @@ class install(orig.install):
|
|||
cmd.run()
|
||||
setuptools.bootstrap_install_from = None
|
||||
|
||||
|
||||
# XXX Python 3.1 doesn't see _nc if this is inside the class
|
||||
install.sub_commands = [
|
||||
cmd for cmd in orig.install.sub_commands if cmd[0] not in install._nc
|
||||
] + install.new_commands
|
||||
install.sub_commands = (
|
||||
[cmd for cmd in orig.install.sub_commands if cmd[0] not in install._nc] +
|
||||
install.new_commands
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,10 @@
|
|||
from distutils import log, dir_util
|
||||
import os
|
||||
|
||||
from setuptools import Command
|
||||
from setuptools.archive_util import unpack_archive
|
||||
from distutils import log, dir_util
|
||||
import os, pkg_resources
|
||||
import pkg_resources
|
||||
|
||||
|
||||
class install_egg_info(Command):
|
||||
"""Install an .egg-info directory for the package"""
|
||||
|
|
@ -16,26 +19,26 @@ class install_egg_info(Command):
|
|||
self.install_dir = None
|
||||
|
||||
def finalize_options(self):
|
||||
self.set_undefined_options('install_lib',('install_dir','install_dir'))
|
||||
self.set_undefined_options('install_lib',
|
||||
('install_dir', 'install_dir'))
|
||||
ei_cmd = self.get_finalized_command("egg_info")
|
||||
basename = pkg_resources.Distribution(
|
||||
None, None, ei_cmd.egg_name, ei_cmd.egg_version
|
||||
).egg_name()+'.egg-info'
|
||||
).egg_name() + '.egg-info'
|
||||
self.source = ei_cmd.egg_info
|
||||
self.target = os.path.join(self.install_dir, basename)
|
||||
self.outputs = [self.target]
|
||||
|
||||
def run(self):
|
||||
self.run_command('egg_info')
|
||||
target = self.target
|
||||
if os.path.isdir(self.target) and not os.path.islink(self.target):
|
||||
dir_util.remove_tree(self.target, dry_run=self.dry_run)
|
||||
elif os.path.exists(self.target):
|
||||
self.execute(os.unlink,(self.target,),"Removing "+self.target)
|
||||
self.execute(os.unlink, (self.target,), "Removing " + self.target)
|
||||
if not self.dry_run:
|
||||
pkg_resources.ensure_directory(self.target)
|
||||
self.execute(self.copytree, (),
|
||||
"Copying %s to %s" % (self.source, self.target)
|
||||
self.execute(
|
||||
self.copytree, (), "Copying %s to %s" % (self.source, self.target)
|
||||
)
|
||||
self.install_namespaces()
|
||||
|
||||
|
|
@ -44,82 +47,70 @@ class install_egg_info(Command):
|
|||
|
||||
def copytree(self):
|
||||
# Copy the .egg-info tree to site-packages
|
||||
def skimmer(src,dst):
|
||||
def skimmer(src, dst):
|
||||
# filter out source-control directories; note that 'src' is always
|
||||
# a '/'-separated path, regardless of platform. 'dst' is a
|
||||
# platform-specific path.
|
||||
for skip in '.svn/','CVS/':
|
||||
if src.startswith(skip) or '/'+skip in src:
|
||||
for skip in '.svn/', 'CVS/':
|
||||
if src.startswith(skip) or '/' + skip in src:
|
||||
return None
|
||||
self.outputs.append(dst)
|
||||
log.debug("Copying %s to %s", src, dst)
|
||||
return dst
|
||||
|
||||
unpack_archive(self.source, self.target, skimmer)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def install_namespaces(self):
|
||||
nsp = self._get_all_ns_packages()
|
||||
if not nsp: return
|
||||
filename,ext = os.path.splitext(self.target)
|
||||
filename += '-nspkg.pth'; self.outputs.append(filename)
|
||||
log.info("Installing %s",filename)
|
||||
if not self.dry_run:
|
||||
f = open(filename,'wt')
|
||||
for pkg in nsp:
|
||||
# ensure pkg is not a unicode string under Python 2.7
|
||||
pkg = str(pkg)
|
||||
pth = tuple(pkg.split('.'))
|
||||
trailer = '\n'
|
||||
if '.' in pkg:
|
||||
trailer = (
|
||||
"; m and setattr(sys.modules[%r], %r, m)\n"
|
||||
% ('.'.join(pth[:-1]), pth[-1])
|
||||
)
|
||||
f.write(
|
||||
"import sys,types,os; "
|
||||
"p = os.path.join(sys._getframe(1).f_locals['sitedir'], "
|
||||
"*%(pth)r); "
|
||||
"ie = os.path.exists(os.path.join(p,'__init__.py')); "
|
||||
"m = not ie and "
|
||||
"sys.modules.setdefault(%(pkg)r,types.ModuleType(%(pkg)r)); "
|
||||
"mp = (m or []) and m.__dict__.setdefault('__path__',[]); "
|
||||
"(p not in mp) and mp.append(p)%(trailer)s"
|
||||
% locals()
|
||||
)
|
||||
f.close()
|
||||
if not nsp:
|
||||
return
|
||||
filename, ext = os.path.splitext(self.target)
|
||||
filename += '-nspkg.pth'
|
||||
self.outputs.append(filename)
|
||||
log.info("Installing %s", filename)
|
||||
lines = map(self._gen_nspkg_line, nsp)
|
||||
|
||||
if self.dry_run:
|
||||
# always generate the lines, even in dry run
|
||||
list(lines)
|
||||
return
|
||||
|
||||
with open(filename, 'wt') as f:
|
||||
f.writelines(lines)
|
||||
|
||||
_nspkg_tmpl = (
|
||||
"import sys, types, os",
|
||||
"p = os.path.join(sys._getframe(1).f_locals['sitedir'], *%(pth)r)",
|
||||
"ie = os.path.exists(os.path.join(p,'__init__.py'))",
|
||||
"m = not ie and "
|
||||
"sys.modules.setdefault(%(pkg)r, types.ModuleType(%(pkg)r))",
|
||||
"mp = (m or []) and m.__dict__.setdefault('__path__',[])",
|
||||
"(p not in mp) and mp.append(p)",
|
||||
)
|
||||
"lines for the namespace installer"
|
||||
|
||||
_nspkg_tmpl_multi = (
|
||||
'm and setattr(sys.modules[%(parent)r], %(child)r, m)',
|
||||
)
|
||||
"additional line(s) when a parent package is indicated"
|
||||
|
||||
@classmethod
|
||||
def _gen_nspkg_line(cls, pkg):
|
||||
# ensure pkg is not a unicode string under Python 2.7
|
||||
pkg = str(pkg)
|
||||
pth = tuple(pkg.split('.'))
|
||||
tmpl_lines = cls._nspkg_tmpl
|
||||
parent, sep, child = pkg.rpartition('.')
|
||||
if parent:
|
||||
tmpl_lines += cls._nspkg_tmpl_multi
|
||||
return ';'.join(tmpl_lines) % locals() + '\n'
|
||||
|
||||
def _get_all_ns_packages(self):
|
||||
nsp = {}
|
||||
"""Return sorted list of all package namespaces"""
|
||||
nsp = set()
|
||||
for pkg in self.distribution.namespace_packages or []:
|
||||
pkg = pkg.split('.')
|
||||
while pkg:
|
||||
nsp['.'.join(pkg)] = 1
|
||||
nsp.add('.'.join(pkg))
|
||||
pkg.pop()
|
||||
nsp=list(nsp)
|
||||
nsp.sort() # set up shorter names first
|
||||
return nsp
|
||||
|
||||
|
||||
return sorted(nsp)
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import distutils.command.install_lib as orig
|
||||
import os
|
||||
|
||||
|
||||
class install_lib(orig.install_lib):
|
||||
"""Don't add compiled flags to filenames of non-Python files"""
|
||||
|
||||
|
|
@ -15,20 +16,20 @@ class install_lib(orig.install_lib):
|
|||
exclude = {}
|
||||
nsp = self.distribution.namespace_packages
|
||||
svem = (nsp and self.get_finalized_command('install')
|
||||
.single_version_externally_managed)
|
||||
.single_version_externally_managed)
|
||||
if svem:
|
||||
for pkg in nsp:
|
||||
parts = pkg.split('.')
|
||||
while parts:
|
||||
pkgdir = os.path.join(self.install_dir, *parts)
|
||||
for f in '__init__.py', '__init__.pyc', '__init__.pyo':
|
||||
exclude[os.path.join(pkgdir,f)] = 1
|
||||
exclude[os.path.join(pkgdir, f)] = 1
|
||||
parts.pop()
|
||||
return exclude
|
||||
|
||||
def copy_tree(
|
||||
self, infile, outfile,
|
||||
preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1
|
||||
self, infile, outfile,
|
||||
preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1
|
||||
):
|
||||
assert preserve_mode and preserve_times and not preserve_symlinks
|
||||
exclude = self.get_exclusions()
|
||||
|
|
@ -45,7 +46,8 @@ class install_lib(orig.install_lib):
|
|||
|
||||
def pf(src, dst):
|
||||
if dst in exclude:
|
||||
log.warn("Skipping installation of %s (namespace package)",dst)
|
||||
log.warn("Skipping installation of %s (namespace package)",
|
||||
dst)
|
||||
return False
|
||||
|
||||
log.info("copying %s -> %s", src, os.path.dirname(dst))
|
||||
|
|
|
|||
|
|
@ -1,7 +1,9 @@
|
|||
import distutils.command.install_scripts as orig
|
||||
from pkg_resources import Distribution, PathMetadata, ensure_directory
|
||||
import os
|
||||
from distutils import log
|
||||
import distutils.command.install_scripts as orig
|
||||
import os
|
||||
|
||||
from pkg_resources import Distribution, PathMetadata, ensure_directory
|
||||
|
||||
|
||||
class install_scripts(orig.install_scripts):
|
||||
"""Do normal script install, plus any egg_info wrapper scripts"""
|
||||
|
|
@ -29,7 +31,7 @@ class install_scripts(orig.install_scripts):
|
|||
ei_cmd.egg_name, ei_cmd.egg_version,
|
||||
)
|
||||
bs_cmd = self.get_finalized_command('build_scripts')
|
||||
executable = getattr(bs_cmd,'executable',sys_executable)
|
||||
executable = getattr(bs_cmd, 'executable', sys_executable)
|
||||
is_wininst = getattr(
|
||||
self.get_finalized_command("bdist_wininst"), '_is_running', False
|
||||
)
|
||||
|
|
@ -39,6 +41,7 @@ class install_scripts(orig.install_scripts):
|
|||
def write_script(self, script_name, contents, mode="t", *ignored):
|
||||
"""Write an executable file to the scripts directory"""
|
||||
from setuptools.command.easy_install import chmod, current_umask
|
||||
|
||||
log.info("Installing %s script to %s", script_name, self.install_dir)
|
||||
target = os.path.join(self.install_dir, script_name)
|
||||
self.outfiles.append(target)
|
||||
|
|
@ -46,7 +49,7 @@ class install_scripts(orig.install_scripts):
|
|||
mask = current_umask()
|
||||
if not self.dry_run:
|
||||
ensure_directory(target)
|
||||
f = open(target,"w"+mode)
|
||||
f = open(target, "w" + mode)
|
||||
f.write(contents)
|
||||
f.close()
|
||||
chmod(target, 0x1FF-mask) # 0777
|
||||
chmod(target, 0o777 - mask)
|
||||
|
|
|
|||
|
|
@ -1,15 +1,15 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
|
||||
<assemblyIdentity version="1.0.0.0"
|
||||
processorArchitecture="X86"
|
||||
name="%(name)s"
|
||||
type="win32"/>
|
||||
<assemblyIdentity version="1.0.0.0"
|
||||
processorArchitecture="X86"
|
||||
name="%(name)s"
|
||||
type="win32"/>
|
||||
<!-- Identify the application security requirements. -->
|
||||
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
|
||||
<security>
|
||||
<requestedPrivileges>
|
||||
<requestedExecutionLevel level="asInvoker" uiAccess="false"/>
|
||||
</requestedPrivileges>
|
||||
</security>
|
||||
<security>
|
||||
<requestedPrivileges>
|
||||
<requestedExecutionLevel level="asInvoker" uiAccess="false"/>
|
||||
</requestedPrivileges>
|
||||
</security>
|
||||
</trustInfo>
|
||||
</assembly>
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import distutils.command.register as orig
|
||||
|
||||
|
||||
class register(orig.register):
|
||||
__doc__ = orig.register.__doc__
|
||||
|
||||
|
|
|
|||
|
|
@ -1,18 +1,20 @@
|
|||
import os
|
||||
from setuptools import Command
|
||||
from setuptools.compat import basestring
|
||||
from distutils.util import convert_path
|
||||
from distutils import log
|
||||
from distutils.errors import DistutilsOptionError
|
||||
import os
|
||||
|
||||
from setuptools import Command
|
||||
from setuptools.compat import basestring
|
||||
|
||||
|
||||
class rotate(Command):
|
||||
"""Delete older distributions"""
|
||||
|
||||
description = "delete older distributions, keeping N newest files"
|
||||
user_options = [
|
||||
('match=', 'm', "patterns to match (required)"),
|
||||
('match=', 'm', "patterns to match (required)"),
|
||||
('dist-dir=', 'd', "directory where the distributions are"),
|
||||
('keep=', 'k', "number of matching distributions to keep"),
|
||||
('keep=', 'k', "number of matching distributions to keep"),
|
||||
]
|
||||
|
||||
boolean_options = []
|
||||
|
|
@ -38,21 +40,22 @@ class rotate(Command):
|
|||
self.match = [
|
||||
convert_path(p.strip()) for p in self.match.split(',')
|
||||
]
|
||||
self.set_undefined_options('bdist',('dist_dir', 'dist_dir'))
|
||||
self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
|
||||
|
||||
def run(self):
|
||||
self.run_command("egg_info")
|
||||
from glob import glob
|
||||
|
||||
for pattern in self.match:
|
||||
pattern = self.distribution.get_name()+'*'+pattern
|
||||
files = glob(os.path.join(self.dist_dir,pattern))
|
||||
files = [(os.path.getmtime(f),f) for f in files]
|
||||
pattern = self.distribution.get_name() + '*' + pattern
|
||||
files = glob(os.path.join(self.dist_dir, pattern))
|
||||
files = [(os.path.getmtime(f), f) for f in files]
|
||||
files.sort()
|
||||
files.reverse()
|
||||
|
||||
log.info("%d file(s) matching %s", len(files), pattern)
|
||||
files = files[self.keep:]
|
||||
for (t,f) in files:
|
||||
for (t, f) in files:
|
||||
log.info("Deleting %s", f)
|
||||
if not self.dry_run:
|
||||
os.unlink(f)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
import distutils, os
|
||||
from setuptools import Command
|
||||
from setuptools.command.setopt import edit_config, option_base
|
||||
|
||||
|
||||
class saveopts(option_base):
|
||||
"""Save command-line options to a file"""
|
||||
|
||||
|
|
@ -13,12 +12,11 @@ class saveopts(option_base):
|
|||
|
||||
for cmd in dist.command_options:
|
||||
|
||||
if cmd=='saveopts':
|
||||
continue # don't save our own options!
|
||||
if cmd == 'saveopts':
|
||||
continue # don't save our own options!
|
||||
|
||||
for opt,(src,val) in dist.get_option_dict(cmd).items():
|
||||
if src=="command line":
|
||||
settings.setdefault(cmd,{})[opt] = val
|
||||
for opt, (src, val) in dist.get_option_dict(cmd).items():
|
||||
if src == "command line":
|
||||
settings.setdefault(cmd, {})[opt] = val
|
||||
|
||||
edit_config(self.filename, settings, self.dry_run)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,13 +1,14 @@
|
|||
from glob import glob
|
||||
from distutils.util import convert_path
|
||||
from distutils import log
|
||||
import distutils.command.sdist as orig
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from glob import glob
|
||||
|
||||
import pkg_resources
|
||||
import distutils.command.sdist as orig
|
||||
from distutils.util import convert_path
|
||||
from distutils import log
|
||||
from setuptools import svn_utils
|
||||
from setuptools.compat import PY3
|
||||
import pkg_resources
|
||||
|
||||
READMES = ('README', 'README.rst', 'README.txt')
|
||||
|
||||
|
|
@ -19,7 +20,7 @@ def walk_revctrl(dirname=''):
|
|||
yield item
|
||||
|
||||
|
||||
#TODO will need test case
|
||||
# TODO will need test case
|
||||
class re_finder(object):
|
||||
"""
|
||||
Finder that locates files based on entries in a file matched by a
|
||||
|
|
@ -32,7 +33,7 @@ class re_finder(object):
|
|||
self.entries_path = convert_path(path)
|
||||
|
||||
def _finder(self, dirname, filename):
|
||||
f = open(filename,'rU')
|
||||
f = open(filename, 'rU')
|
||||
try:
|
||||
data = f.read()
|
||||
finally:
|
||||
|
|
@ -50,12 +51,13 @@ class re_finder(object):
|
|||
if not os.path.isfile(path):
|
||||
# entries file doesn't exist
|
||||
return
|
||||
for path in self._finder(dirname,path):
|
||||
for path in self._finder(dirname, path):
|
||||
if os.path.isfile(path):
|
||||
yield path
|
||||
elif os.path.isdir(path):
|
||||
for item in self.find(path):
|
||||
yield item
|
||||
|
||||
__call__ = find
|
||||
|
||||
|
||||
|
|
@ -84,7 +86,7 @@ class sdist(orig.sdist):
|
|||
('dist-dir=', 'd',
|
||||
"directory to put the source distribution archive(s) in "
|
||||
"[default: dist]"),
|
||||
]
|
||||
]
|
||||
|
||||
negative_opt = {}
|
||||
|
||||
|
|
@ -92,7 +94,7 @@ class sdist(orig.sdist):
|
|||
self.run_command('egg_info')
|
||||
ei_cmd = self.get_finalized_command('egg_info')
|
||||
self.filelist = ei_cmd.filelist
|
||||
self.filelist.append(os.path.join(ei_cmd.egg_info,'SOURCES.txt'))
|
||||
self.filelist.append(os.path.join(ei_cmd.egg_info, 'SOURCES.txt'))
|
||||
self.check_readme()
|
||||
|
||||
# Run sub commands
|
||||
|
|
@ -102,12 +104,13 @@ class sdist(orig.sdist):
|
|||
# Call check_metadata only if no 'check' command
|
||||
# (distutils <= 2.6)
|
||||
import distutils.command
|
||||
|
||||
if 'check' not in distutils.command.__all__:
|
||||
self.check_metadata()
|
||||
|
||||
self.make_distribution()
|
||||
|
||||
dist_files = getattr(self.distribution,'dist_files',[])
|
||||
dist_files = getattr(self.distribution, 'dist_files', [])
|
||||
for file in self.archive_files:
|
||||
data = ('sdist', '', file)
|
||||
if data not in dist_files:
|
||||
|
|
@ -123,13 +126,14 @@ class sdist(orig.sdist):
|
|||
except:
|
||||
sys.exc_info()[2].tb_next.tb_frame.f_locals['template'].close()
|
||||
raise
|
||||
|
||||
# Beginning with Python 2.7.2, 3.1.4, and 3.2.1, this leaky file handle
|
||||
# has been fixed, so only override the method if we're using an earlier
|
||||
# Python.
|
||||
has_leaky_handle = (
|
||||
sys.version_info < (2,7,2)
|
||||
or (3,0) <= sys.version_info < (3,1,4)
|
||||
or (3,2) <= sys.version_info < (3,2,1)
|
||||
sys.version_info < (2, 7, 2)
|
||||
or (3, 0) <= sys.version_info < (3, 1, 4)
|
||||
or (3, 2) <= sys.version_info < (3, 2, 1)
|
||||
)
|
||||
if has_leaky_handle:
|
||||
read_template = __read_template_hack
|
||||
|
|
@ -193,7 +197,8 @@ class sdist(orig.sdist):
|
|||
return
|
||||
else:
|
||||
self.warn(
|
||||
"standard file not found: should have one of " +', '.join(READMES)
|
||||
"standard file not found: should have one of " +
|
||||
', '.join(READMES)
|
||||
)
|
||||
|
||||
def make_release_tree(self, base_dir, files):
|
||||
|
|
@ -201,7 +206,7 @@ class sdist(orig.sdist):
|
|||
|
||||
# Save any egg_info command line options used to create this sdist
|
||||
dest = os.path.join(base_dir, 'setup.cfg')
|
||||
if hasattr(os,'link') and os.path.exists(dest):
|
||||
if hasattr(os, 'link') and os.path.exists(dest):
|
||||
# unlink and re-copy, since it might be hard-linked, and
|
||||
# we don't want to change the source version
|
||||
os.unlink(dest)
|
||||
|
|
@ -219,7 +224,8 @@ class sdist(orig.sdist):
|
|||
first_line = fp.readline()
|
||||
finally:
|
||||
fp.close()
|
||||
return first_line != '# file GENERATED by distutils, do NOT edit\n'.encode()
|
||||
return (first_line !=
|
||||
'# file GENERATED by distutils, do NOT edit\n'.encode())
|
||||
|
||||
def read_manifest(self):
|
||||
"""Read the manifest file (named by 'self.manifest') and use it to
|
||||
|
|
@ -230,7 +236,7 @@ class sdist(orig.sdist):
|
|||
manifest = open(self.manifest, 'rbU')
|
||||
for line in manifest:
|
||||
# The manifest must contain UTF-8. See #303.
|
||||
if sys.version_info >= (3,):
|
||||
if PY3:
|
||||
try:
|
||||
line = line.decode('UTF-8')
|
||||
except UnicodeDecodeError:
|
||||
|
|
|
|||
|
|
@ -1,9 +1,11 @@
|
|||
import os
|
||||
import distutils
|
||||
from setuptools import Command
|
||||
from distutils.util import convert_path
|
||||
from distutils import log
|
||||
from distutils.errors import DistutilsOptionError
|
||||
import distutils
|
||||
import os
|
||||
|
||||
from setuptools import Command
|
||||
|
||||
|
||||
__all__ = ['config_file', 'edit_config', 'option_base', 'setopt']
|
||||
|
||||
|
|
@ -13,19 +15,20 @@ def config_file(kind="local"):
|
|||
|
||||
`kind` must be one of "local", "global", or "user"
|
||||
"""
|
||||
if kind=='local':
|
||||
if kind == 'local':
|
||||
return 'setup.cfg'
|
||||
if kind=='global':
|
||||
if kind == 'global':
|
||||
return os.path.join(
|
||||
os.path.dirname(distutils.__file__),'distutils.cfg'
|
||||
os.path.dirname(distutils.__file__), 'distutils.cfg'
|
||||
)
|
||||
if kind=='user':
|
||||
dot = os.name=='posix' and '.' or ''
|
||||
if kind == 'user':
|
||||
dot = os.name == 'posix' and '.' or ''
|
||||
return os.path.expanduser(convert_path("~/%spydistutils.cfg" % dot))
|
||||
raise ValueError(
|
||||
"config_file() type must be 'local', 'global', or 'user'", kind
|
||||
)
|
||||
|
||||
|
||||
def edit_config(filename, settings, dry_run=False):
|
||||
"""Edit a configuration file to include `settings`
|
||||
|
||||
|
|
@ -35,6 +38,7 @@ def edit_config(filename, settings, dry_run=False):
|
|||
A setting of ``None`` means to delete that setting.
|
||||
"""
|
||||
from setuptools.compat import ConfigParser
|
||||
|
||||
log.debug("Reading configuration from %s", filename)
|
||||
opts = ConfigParser.RawConfigParser()
|
||||
opts.read([filename])
|
||||
|
|
@ -46,39 +50,40 @@ def edit_config(filename, settings, dry_run=False):
|
|||
if not opts.has_section(section):
|
||||
log.debug("Adding new section [%s] to %s", section, filename)
|
||||
opts.add_section(section)
|
||||
for option,value in options.items():
|
||||
for option, value in options.items():
|
||||
if value is None:
|
||||
log.debug(
|
||||
"Deleting %s.%s from %s",
|
||||
section, option, filename
|
||||
)
|
||||
opts.remove_option(section,option)
|
||||
opts.remove_option(section, option)
|
||||
if not opts.options(section):
|
||||
log.info("Deleting empty [%s] section from %s",
|
||||
section, filename)
|
||||
section, filename)
|
||||
opts.remove_section(section)
|
||||
else:
|
||||
log.debug(
|
||||
"Setting %s.%s to %r in %s",
|
||||
section, option, value, filename
|
||||
)
|
||||
opts.set(section,option,value)
|
||||
opts.set(section, option, value)
|
||||
|
||||
log.info("Writing %s", filename)
|
||||
if not dry_run:
|
||||
with open(filename, 'w') as f:
|
||||
opts.write(f)
|
||||
|
||||
|
||||
class option_base(Command):
|
||||
"""Abstract base class for commands that mess with config files"""
|
||||
|
||||
user_options = [
|
||||
('global-config', 'g',
|
||||
"save options to the site-wide distutils.cfg file"),
|
||||
"save options to the site-wide distutils.cfg file"),
|
||||
('user-config', 'u',
|
||||
"save options to the current user's pydistutils.cfg file"),
|
||||
"save options to the current user's pydistutils.cfg file"),
|
||||
('filename=', 'f',
|
||||
"configuration file to use (default=setup.cfg)"),
|
||||
"configuration file to use (default=setup.cfg)"),
|
||||
]
|
||||
|
||||
boolean_options = [
|
||||
|
|
@ -100,7 +105,7 @@ class option_base(Command):
|
|||
filenames.append(self.filename)
|
||||
if not filenames:
|
||||
filenames.append(config_file('local'))
|
||||
if len(filenames)>1:
|
||||
if len(filenames) > 1:
|
||||
raise DistutilsOptionError(
|
||||
"Must specify only one configuration file option",
|
||||
filenames
|
||||
|
|
@ -115,9 +120,9 @@ class setopt(option_base):
|
|||
|
||||
user_options = [
|
||||
('command=', 'c', 'command to set an option for'),
|
||||
('option=', 'o', 'option to set'),
|
||||
('set-value=', 's', 'value of the option'),
|
||||
('remove', 'r', 'remove (unset) the value'),
|
||||
('option=', 'o', 'option to set'),
|
||||
('set-value=', 's', 'value of the option'),
|
||||
('remove', 'r', 'remove (unset) the value'),
|
||||
] + option_base.user_options
|
||||
|
||||
boolean_options = option_base.boolean_options + ['remove']
|
||||
|
|
@ -139,7 +144,7 @@ class setopt(option_base):
|
|||
def run(self):
|
||||
edit_config(
|
||||
self.filename, {
|
||||
self.command: {self.option.replace('-','_'):self.set_value}
|
||||
self.command: {self.option.replace('-', '_'): self.set_value}
|
||||
},
|
||||
self.dry_run
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,18 +1,17 @@
|
|||
import unittest
|
||||
from unittest import TestLoader
|
||||
|
||||
from setuptools import Command
|
||||
from distutils.errors import DistutilsOptionError
|
||||
from unittest import TestLoader
|
||||
import unittest
|
||||
import sys
|
||||
from pkg_resources import (resource_listdir, resource_exists,
|
||||
normalize_path, working_set, _namespace_packages, add_activation_listener,
|
||||
require, EntryPoint)
|
||||
|
||||
from pkg_resources import (resource_listdir, resource_exists, normalize_path,
|
||||
working_set, _namespace_packages,
|
||||
add_activation_listener, require, EntryPoint)
|
||||
from setuptools import Command
|
||||
from setuptools.compat import PY3
|
||||
from setuptools.py31compat import unittest_main
|
||||
|
||||
|
||||
class ScanningLoader(TestLoader):
|
||||
|
||||
def loadTestsFromModule(self, module):
|
||||
"""Return a suite of all tests cases contained in the given module
|
||||
|
||||
|
|
@ -21,8 +20,7 @@ class ScanningLoader(TestLoader):
|
|||
the return value to the tests.
|
||||
"""
|
||||
tests = []
|
||||
if module.__name__ != 'setuptools.tests.doctest': # ugh
|
||||
tests.append(TestLoader.loadTestsFromModule(self, module))
|
||||
tests.append(TestLoader.loadTestsFromModule(self, module))
|
||||
|
||||
if hasattr(module, "additional_tests"):
|
||||
tests.append(module.additional_tests())
|
||||
|
|
@ -33,7 +31,7 @@ class ScanningLoader(TestLoader):
|
|||
submodule = module.__name__ + '.' + file[:-3]
|
||||
else:
|
||||
if resource_exists(module.__name__, file + '/__init__.py'):
|
||||
submodule = module.__name__+'.'+file
|
||||
submodule = module.__name__ + '.' + file
|
||||
else:
|
||||
continue
|
||||
tests.append(self.loadTestsFromName(submodule))
|
||||
|
|
@ -41,19 +39,18 @@ class ScanningLoader(TestLoader):
|
|||
if len(tests) != 1:
|
||||
return self.suiteClass(tests)
|
||||
else:
|
||||
return tests[0] # don't create a nested suite for only one return
|
||||
return tests[0] # don't create a nested suite for only one return
|
||||
|
||||
|
||||
class test(Command):
|
||||
|
||||
"""Command to run unit tests after in-place build"""
|
||||
|
||||
description = "run unit tests after in-place build"
|
||||
|
||||
user_options = [
|
||||
('test-module=','m', "Run 'test_suite' in specified module"),
|
||||
('test-suite=','s',
|
||||
"Test suite to run (e.g. 'some_module.test_suite')"),
|
||||
('test-module=', 'm', "Run 'test_suite' in specified module"),
|
||||
('test-suite=', 's',
|
||||
"Test suite to run (e.g. 'some_module.test_suite')"),
|
||||
('test-runner=', 'r', "Test runner to use"),
|
||||
]
|
||||
|
||||
|
|
@ -78,7 +75,7 @@ class test(Command):
|
|||
self.test_args = [self.test_suite]
|
||||
|
||||
if self.verbose:
|
||||
self.test_args.insert(0,'--verbose')
|
||||
self.test_args.insert(0, '--verbose')
|
||||
if self.test_loader is None:
|
||||
self.test_loader = getattr(self.distribution, 'test_loader', None)
|
||||
if self.test_loader is None:
|
||||
|
|
@ -87,10 +84,8 @@ class test(Command):
|
|||
self.test_runner = getattr(self.distribution, 'test_runner', None)
|
||||
|
||||
def with_project_on_sys_path(self, func):
|
||||
with_2to3 = (
|
||||
sys.version_info >= (3,)
|
||||
and getattr(self.distribution, 'use_2to3', False)
|
||||
)
|
||||
with_2to3 = PY3 and getattr(self.distribution, 'use_2to3', False)
|
||||
|
||||
if with_2to3:
|
||||
# If we run 2to3 we can not do this inplace:
|
||||
|
||||
|
|
@ -133,7 +128,8 @@ class test(Command):
|
|||
|
||||
def run(self):
|
||||
if self.distribution.install_requires:
|
||||
self.distribution.fetch_build_eggs(self.distribution.install_requires)
|
||||
self.distribution.fetch_build_eggs(
|
||||
self.distribution.install_requires)
|
||||
if self.distribution.tests_require:
|
||||
self.distribution.fetch_build_eggs(self.distribution.tests_require)
|
||||
|
||||
|
|
@ -149,7 +145,7 @@ class test(Command):
|
|||
# Purge modules under test from sys.modules. The test loader will
|
||||
# re-import them from the build location. Required when 2to3 is used
|
||||
# with namespace packages.
|
||||
if sys.version_info >= (3,) and getattr(self.distribution, 'use_2to3', False):
|
||||
if PY3 and getattr(self.distribution, 'use_2to3', False):
|
||||
module = self.test_args[-1].split('.')[0]
|
||||
if module in _namespace_packages:
|
||||
del_modules = []
|
||||
|
|
@ -162,7 +158,7 @@ class test(Command):
|
|||
list(map(sys.modules.__delitem__, del_modules))
|
||||
|
||||
unittest_main(
|
||||
None, None, [unittest.__file__]+self.test_args,
|
||||
None, None, [unittest.__file__] + self.test_args,
|
||||
testLoader=self._resolve_as_ep(self.test_loader),
|
||||
testRunner=self._resolve_as_ep(self.test_runner),
|
||||
)
|
||||
|
|
|
|||
|
|
@ -5,6 +5,10 @@ Implements a Distutils 'upload_docs' subcommand (upload documentation to
|
|||
PyPI's pythonhosted.org).
|
||||
"""
|
||||
|
||||
from base64 import standard_b64encode
|
||||
from distutils import log
|
||||
from distutils.errors import DistutilsOptionError
|
||||
from distutils.command.upload import upload
|
||||
import os
|
||||
import socket
|
||||
import zipfile
|
||||
|
|
@ -12,14 +16,9 @@ import tempfile
|
|||
import sys
|
||||
import shutil
|
||||
|
||||
from base64 import standard_b64encode
|
||||
from setuptools.compat import httplib, urlparse, unicode, iteritems, PY3
|
||||
from pkg_resources import iter_entry_points
|
||||
|
||||
from distutils import log
|
||||
from distutils.errors import DistutilsOptionError
|
||||
from distutils.command.upload import upload
|
||||
|
||||
from setuptools.compat import httplib, urlparse, unicode, iteritems, PY3
|
||||
|
||||
errors = 'surrogateescape' if PY3 else 'strict'
|
||||
|
||||
|
|
@ -33,7 +32,6 @@ def b(s, encoding='utf-8'):
|
|||
|
||||
|
||||
class upload_docs(upload):
|
||||
|
||||
description = 'Upload documentation to PyPI'
|
||||
|
||||
user_options = [
|
||||
|
|
@ -42,7 +40,7 @@ class upload_docs(upload):
|
|||
('show-response', None,
|
||||
'display full response text from server'),
|
||||
('upload-dir=', None, 'directory to upload'),
|
||||
]
|
||||
]
|
||||
boolean_options = upload.boolean_options
|
||||
|
||||
def has_sphinx(self):
|
||||
|
|
@ -159,7 +157,7 @@ class upload_docs(upload):
|
|||
elif schema == 'https':
|
||||
conn = httplib.HTTPSConnection(netloc)
|
||||
else:
|
||||
raise AssertionError("unsupported schema "+schema)
|
||||
raise AssertionError("unsupported schema " + schema)
|
||||
|
||||
data = ''
|
||||
try:
|
||||
|
|
@ -190,4 +188,4 @@ class upload_docs(upload):
|
|||
self.announce('Upload failed (%s): %s' % (r.status, r.reason),
|
||||
log.ERROR)
|
||||
if self.show_response:
|
||||
print('-'*75, r.read(), '-'*75)
|
||||
print('-' * 75, r.read(), '-' * 75)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue