update Shared
This commit is contained in:
parent
e7ebbedd38
commit
6881f3471a
184 changed files with 13080 additions and 13691 deletions
|
|
@ -1,7 +1,7 @@
|
|||
"""Extensions to the 'distutils' for large or complex distributions"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import functools
|
||||
import distutils.core
|
||||
import distutils.filelist
|
||||
from distutils.core import Command as _Command
|
||||
|
|
@ -74,21 +74,24 @@ class PackageFinder(object):
|
|||
yield pkg
|
||||
|
||||
@staticmethod
|
||||
def _all_dirs(base_path):
|
||||
def _candidate_dirs(base_path):
|
||||
"""
|
||||
Return all dirs in base_path, relative to base_path
|
||||
Return all dirs in base_path that might be packages.
|
||||
"""
|
||||
has_dot = lambda name: '.' in name
|
||||
for root, dirs, files in os.walk(base_path, followlinks=True):
|
||||
# Exclude directories that contain a period, as they cannot be
|
||||
# packages. Mutate the list to avoid traversal.
|
||||
dirs[:] = filterfalse(has_dot, dirs)
|
||||
for dir in dirs:
|
||||
yield os.path.relpath(os.path.join(root, dir), base_path)
|
||||
|
||||
@classmethod
|
||||
def _find_packages_iter(cls, base_path):
|
||||
dirs = cls._all_dirs(base_path)
|
||||
suitable = filterfalse(lambda n: '.' in n, dirs)
|
||||
candidates = cls._candidate_dirs(base_path)
|
||||
return (
|
||||
path.replace(os.path.sep, '.')
|
||||
for path in suitable
|
||||
for path in candidates
|
||||
if cls._looks_like_package(os.path.join(base_path, path))
|
||||
)
|
||||
|
||||
|
|
@ -121,34 +124,45 @@ class Command(_Command):
|
|||
command_consumes_arguments = False
|
||||
|
||||
def __init__(self, dist, **kw):
|
||||
# Add support for keyword arguments
|
||||
_Command.__init__(self,dist)
|
||||
for k,v in kw.items():
|
||||
setattr(self,k,v)
|
||||
"""
|
||||
Construct the command for dist, updating
|
||||
vars(self) with any keyword parameters.
|
||||
"""
|
||||
_Command.__init__(self, dist)
|
||||
vars(self).update(kw)
|
||||
|
||||
def reinitialize_command(self, command, reinit_subcommands=0, **kw):
|
||||
cmd = _Command.reinitialize_command(self, command, reinit_subcommands)
|
||||
for k,v in kw.items():
|
||||
setattr(cmd,k,v) # update command with keywords
|
||||
vars(cmd).update(kw)
|
||||
return cmd
|
||||
|
||||
distutils.core.Command = Command # we can't patch distutils.cmd, alas
|
||||
# we can't patch distutils.cmd, alas
|
||||
distutils.core.Command = Command
|
||||
|
||||
def findall(dir = os.curdir):
|
||||
"""Find all files under 'dir' and return the list of full filenames
|
||||
(relative to 'dir').
|
||||
|
||||
def _find_all_simple(path):
|
||||
"""
|
||||
all_files = []
|
||||
for base, dirs, files in os.walk(dir):
|
||||
if base==os.curdir or base.startswith(os.curdir+os.sep):
|
||||
base = base[2:]
|
||||
if base:
|
||||
files = [os.path.join(base, f) for f in files]
|
||||
all_files.extend(filter(os.path.isfile, files))
|
||||
return all_files
|
||||
Find all files under 'path'
|
||||
"""
|
||||
results = (
|
||||
os.path.join(base, file)
|
||||
for base, dirs, files in os.walk(path, followlinks=True)
|
||||
for file in files
|
||||
)
|
||||
return filter(os.path.isfile, results)
|
||||
|
||||
distutils.filelist.findall = findall # fix findall bug in distutils.
|
||||
|
||||
# sys.dont_write_bytecode was introduced in Python 2.6.
|
||||
_dont_write_bytecode = getattr(sys, 'dont_write_bytecode',
|
||||
bool(os.environ.get("PYTHONDONTWRITEBYTECODE")))
|
||||
def findall(dir=os.curdir):
|
||||
"""
|
||||
Find all files under 'dir' and return the list of full filenames.
|
||||
Unless dir is '.', return full filenames with dir prepended.
|
||||
"""
|
||||
files = _find_all_simple(dir)
|
||||
if dir == os.curdir:
|
||||
make_rel = functools.partial(os.path.relpath, start=dir)
|
||||
files = map(make_rel, files)
|
||||
return list(files)
|
||||
|
||||
|
||||
# fix findall bug in distutils (http://bugs.python.org/issue12885)
|
||||
distutils.filelist.findall = findall
|
||||
|
|
|
|||
|
|
@ -1,31 +0,0 @@
|
|||
# Copyright 2014 Donald Stufft
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__all__ = [
|
||||
"__title__", "__summary__", "__uri__", "__version__", "__author__",
|
||||
"__email__", "__license__", "__copyright__",
|
||||
]
|
||||
|
||||
__title__ = "packaging"
|
||||
__summary__ = "Core utilities for Python packages"
|
||||
__uri__ = "https://github.com/pypa/packaging"
|
||||
|
||||
__version__ = "14.2"
|
||||
|
||||
__author__ = "Donald Stufft"
|
||||
__email__ = "donald@stufft.io"
|
||||
|
||||
__license__ = "Apache License, Version 2.0"
|
||||
__copyright__ = "Copyright 2014 %s" % __author__
|
||||
|
|
@ -1,24 +0,0 @@
|
|||
# Copyright 2014 Donald Stufft
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from .__about__ import (
|
||||
__author__, __copyright__, __email__, __license__, __summary__, __title__,
|
||||
__uri__, __version__
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"__title__", "__summary__", "__uri__", "__version__", "__author__",
|
||||
"__email__", "__license__", "__copyright__",
|
||||
]
|
||||
|
|
@ -1,27 +0,0 @@
|
|||
# Copyright 2014 Donald Stufft
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import sys
|
||||
|
||||
|
||||
PY2 = sys.version_info[0] == 2
|
||||
PY3 = sys.version_info[0] == 3
|
||||
|
||||
# flake8: noqa
|
||||
|
||||
if PY3:
|
||||
string_types = str,
|
||||
else:
|
||||
string_types = basestring,
|
||||
|
|
@ -1,78 +0,0 @@
|
|||
# Copyright 2014 Donald Stufft
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
|
||||
class Infinity(object):
|
||||
|
||||
def __repr__(self):
|
||||
return "Infinity"
|
||||
|
||||
def __hash__(self):
|
||||
return hash(repr(self))
|
||||
|
||||
def __lt__(self, other):
|
||||
return False
|
||||
|
||||
def __le__(self, other):
|
||||
return False
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, self.__class__)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not isinstance(other, self.__class__)
|
||||
|
||||
def __gt__(self, other):
|
||||
return True
|
||||
|
||||
def __ge__(self, other):
|
||||
return True
|
||||
|
||||
def __neg__(self):
|
||||
return NegativeInfinity
|
||||
|
||||
Infinity = Infinity()
|
||||
|
||||
|
||||
class NegativeInfinity(object):
|
||||
|
||||
def __repr__(self):
|
||||
return "-Infinity"
|
||||
|
||||
def __hash__(self):
|
||||
return hash(repr(self))
|
||||
|
||||
def __lt__(self, other):
|
||||
return True
|
||||
|
||||
def __le__(self, other):
|
||||
return True
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, self.__class__)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not isinstance(other, self.__class__)
|
||||
|
||||
def __gt__(self, other):
|
||||
return False
|
||||
|
||||
def __ge__(self, other):
|
||||
return False
|
||||
|
||||
def __neg__(self):
|
||||
return Infinity
|
||||
|
||||
NegativeInfinity = NegativeInfinity()
|
||||
|
|
@ -1,786 +0,0 @@
|
|||
# Copyright 2014 Donald Stufft
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import collections
|
||||
import itertools
|
||||
import re
|
||||
|
||||
from ._compat import string_types
|
||||
from ._structures import Infinity
|
||||
|
||||
|
||||
__all__ = [
|
||||
"parse", "Version", "LegacyVersion", "InvalidVersion", "Specifier",
|
||||
"InvalidSpecifier",
|
||||
]
|
||||
|
||||
|
||||
_Version = collections.namedtuple(
|
||||
"_Version",
|
||||
["epoch", "release", "dev", "pre", "post", "local"],
|
||||
)
|
||||
|
||||
|
||||
def parse(version):
|
||||
"""
|
||||
Parse the given version string and return either a :class:`Version` object
|
||||
or a :class:`LegacyVersion` object depending on if the given version is
|
||||
a valid PEP 440 version or a legacy version.
|
||||
"""
|
||||
try:
|
||||
return Version(version)
|
||||
except InvalidVersion:
|
||||
return LegacyVersion(version)
|
||||
|
||||
|
||||
class InvalidVersion(ValueError):
|
||||
"""
|
||||
An invalid version was found, users should refer to PEP 440.
|
||||
"""
|
||||
|
||||
|
||||
class _BaseVersion(object):
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self._key)
|
||||
|
||||
def __lt__(self, other):
|
||||
return self._compare(other, lambda s, o: s < o)
|
||||
|
||||
def __le__(self, other):
|
||||
return self._compare(other, lambda s, o: s <= o)
|
||||
|
||||
def __eq__(self, other):
|
||||
return self._compare(other, lambda s, o: s == o)
|
||||
|
||||
def __ge__(self, other):
|
||||
return self._compare(other, lambda s, o: s >= o)
|
||||
|
||||
def __gt__(self, other):
|
||||
return self._compare(other, lambda s, o: s > o)
|
||||
|
||||
def __ne__(self, other):
|
||||
return self._compare(other, lambda s, o: s != o)
|
||||
|
||||
def _compare(self, other, method):
|
||||
if not isinstance(other, _BaseVersion):
|
||||
return NotImplemented
|
||||
|
||||
return method(self._key, other._key)
|
||||
|
||||
|
||||
class LegacyVersion(_BaseVersion):
|
||||
|
||||
def __init__(self, version):
|
||||
self._version = str(version)
|
||||
self._key = _legacy_cmpkey(self._version)
|
||||
|
||||
def __str__(self):
|
||||
return self._version
|
||||
|
||||
def __repr__(self):
|
||||
return "<LegacyVersion({0})>".format(repr(str(self)))
|
||||
|
||||
@property
|
||||
def public(self):
|
||||
return self._version
|
||||
|
||||
@property
|
||||
def local(self):
|
||||
return None
|
||||
|
||||
@property
|
||||
def is_prerelease(self):
|
||||
return False
|
||||
|
||||
|
||||
_legacy_version_component_re = re.compile(
|
||||
r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE,
|
||||
)
|
||||
|
||||
_legacy_version_replacement_map = {
|
||||
"pre": "c", "preview": "c", "-": "final-", "rc": "c", "dev": "@",
|
||||
}
|
||||
|
||||
|
||||
def _parse_version_parts(s):
|
||||
for part in _legacy_version_component_re.split(s):
|
||||
part = _legacy_version_replacement_map.get(part, part)
|
||||
|
||||
if not part or part == ".":
|
||||
continue
|
||||
|
||||
if part[:1] in "0123456789":
|
||||
# pad for numeric comparison
|
||||
yield part.zfill(8)
|
||||
else:
|
||||
yield "*" + part
|
||||
|
||||
# ensure that alpha/beta/candidate are before final
|
||||
yield "*final"
|
||||
|
||||
|
||||
def _legacy_cmpkey(version):
|
||||
# We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch
|
||||
# greater than or equal to 0. This will effectively put the LegacyVersion,
|
||||
# which uses the defacto standard originally implemented by setuptools,
|
||||
# as before all PEP 440 versions.
|
||||
epoch = -1
|
||||
|
||||
# This scheme is taken from pkg_resources.parse_version setuptools prior to
|
||||
# it's adoption of the packaging library.
|
||||
parts = []
|
||||
for part in _parse_version_parts(version.lower()):
|
||||
if part.startswith("*"):
|
||||
# remove "-" before a prerelease tag
|
||||
if part < "*final":
|
||||
while parts and parts[-1] == "*final-":
|
||||
parts.pop()
|
||||
|
||||
# remove trailing zeros from each series of numeric parts
|
||||
while parts and parts[-1] == "00000000":
|
||||
parts.pop()
|
||||
|
||||
parts.append(part)
|
||||
parts = tuple(parts)
|
||||
|
||||
return epoch, parts
|
||||
|
||||
|
||||
class Version(_BaseVersion):
|
||||
|
||||
_regex = re.compile(
|
||||
r"""
|
||||
^
|
||||
\s*
|
||||
v?
|
||||
(?:
|
||||
(?:(?P<epoch>[0-9]+)!)? # epoch
|
||||
(?P<release>[0-9]+(?:\.[0-9]+)*) # release segment
|
||||
(?P<pre> # pre-release
|
||||
[-_\.]?
|
||||
(?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview))
|
||||
[-_\.]?
|
||||
(?P<pre_n>[0-9]+)?
|
||||
)?
|
||||
(?P<post> # post release
|
||||
(?:-(?P<post_n1>[0-9]+))
|
||||
|
|
||||
(?:
|
||||
[-_\.]?
|
||||
(?P<post_l>post|rev|r)
|
||||
[-_\.]?
|
||||
(?P<post_n2>[0-9]+)?
|
||||
)
|
||||
)?
|
||||
(?P<dev> # dev release
|
||||
[-_\.]?
|
||||
(?P<dev_l>dev)
|
||||
[-_\.]?
|
||||
(?P<dev_n>[0-9]+)?
|
||||
)?
|
||||
)
|
||||
(?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version
|
||||
\s*
|
||||
$
|
||||
""",
|
||||
re.VERBOSE | re.IGNORECASE,
|
||||
)
|
||||
|
||||
def __init__(self, version):
|
||||
# Validate the version and parse it into pieces
|
||||
match = self._regex.search(version)
|
||||
if not match:
|
||||
raise InvalidVersion("Invalid version: '{0}'".format(version))
|
||||
|
||||
# Store the parsed out pieces of the version
|
||||
self._version = _Version(
|
||||
epoch=int(match.group("epoch")) if match.group("epoch") else 0,
|
||||
release=tuple(int(i) for i in match.group("release").split(".")),
|
||||
pre=_parse_letter_version(
|
||||
match.group("pre_l"),
|
||||
match.group("pre_n"),
|
||||
),
|
||||
post=_parse_letter_version(
|
||||
match.group("post_l"),
|
||||
match.group("post_n1") or match.group("post_n2"),
|
||||
),
|
||||
dev=_parse_letter_version(
|
||||
match.group("dev_l"),
|
||||
match.group("dev_n"),
|
||||
),
|
||||
local=_parse_local_version(match.group("local")),
|
||||
)
|
||||
|
||||
# Generate a key which will be used for sorting
|
||||
self._key = _cmpkey(
|
||||
self._version.epoch,
|
||||
self._version.release,
|
||||
self._version.pre,
|
||||
self._version.post,
|
||||
self._version.dev,
|
||||
self._version.local,
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return "<Version({0})>".format(repr(str(self)))
|
||||
|
||||
def __str__(self):
|
||||
parts = []
|
||||
|
||||
# Epoch
|
||||
if self._version.epoch != 0:
|
||||
parts.append("{0}!".format(self._version.epoch))
|
||||
|
||||
# Release segment
|
||||
parts.append(".".join(str(x) for x in self._version.release))
|
||||
|
||||
# Pre-release
|
||||
if self._version.pre is not None:
|
||||
parts.append("".join(str(x) for x in self._version.pre))
|
||||
|
||||
# Post-release
|
||||
if self._version.post is not None:
|
||||
parts.append(".post{0}".format(self._version.post[1]))
|
||||
|
||||
# Development release
|
||||
if self._version.dev is not None:
|
||||
parts.append(".dev{0}".format(self._version.dev[1]))
|
||||
|
||||
# Local version segment
|
||||
if self._version.local is not None:
|
||||
parts.append(
|
||||
"+{0}".format(".".join(str(x) for x in self._version.local))
|
||||
)
|
||||
|
||||
return "".join(parts)
|
||||
|
||||
@property
|
||||
def public(self):
|
||||
return str(self).split("+", 1)[0]
|
||||
|
||||
@property
|
||||
def local(self):
|
||||
version_string = str(self)
|
||||
if "+" in version_string:
|
||||
return version_string.split("+", 1)[1]
|
||||
|
||||
@property
|
||||
def is_prerelease(self):
|
||||
return bool(self._version.dev or self._version.pre)
|
||||
|
||||
|
||||
def _parse_letter_version(letter, number):
|
||||
if letter:
|
||||
# We consider there to be an implicit 0 in a pre-release if there is
|
||||
# not a numeral associated with it.
|
||||
if number is None:
|
||||
number = 0
|
||||
|
||||
# We normalize any letters to their lower case form
|
||||
letter = letter.lower()
|
||||
|
||||
# We consider some words to be alternate spellings of other words and
|
||||
# in those cases we want to normalize the spellings to our preferred
|
||||
# spelling.
|
||||
if letter == "alpha":
|
||||
letter = "a"
|
||||
elif letter == "beta":
|
||||
letter = "b"
|
||||
elif letter in ["rc", "pre", "preview"]:
|
||||
letter = "c"
|
||||
|
||||
return letter, int(number)
|
||||
if not letter and number:
|
||||
# We assume if we are given a number, but we are not given a letter
|
||||
# then this is using the implicit post release syntax (e.g. 1.0-1)
|
||||
letter = "post"
|
||||
|
||||
return letter, int(number)
|
||||
|
||||
|
||||
_local_version_seperators = re.compile(r"[\._-]")
|
||||
|
||||
|
||||
def _parse_local_version(local):
|
||||
"""
|
||||
Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
|
||||
"""
|
||||
if local is not None:
|
||||
return tuple(
|
||||
part.lower() if not part.isdigit() else int(part)
|
||||
for part in _local_version_seperators.split(local)
|
||||
)
|
||||
|
||||
|
||||
def _cmpkey(epoch, release, pre, post, dev, local):
|
||||
# When we compare a release version, we want to compare it with all of the
|
||||
# trailing zeros removed. So we'll use a reverse the list, drop all the now
|
||||
# leading zeros until we come to something non zero, then take the rest
|
||||
# re-reverse it back into the correct order and make it a tuple and use
|
||||
# that for our sorting key.
|
||||
release = tuple(
|
||||
reversed(list(
|
||||
itertools.dropwhile(
|
||||
lambda x: x == 0,
|
||||
reversed(release),
|
||||
)
|
||||
))
|
||||
)
|
||||
|
||||
# We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
|
||||
# We'll do this by abusing the pre segment, but we _only_ want to do this
|
||||
# if there is not a pre or a post segment. If we have one of those then
|
||||
# the normal sorting rules will handle this case correctly.
|
||||
if pre is None and post is None and dev is not None:
|
||||
pre = -Infinity
|
||||
# Versions without a pre-release (except as noted above) should sort after
|
||||
# those with one.
|
||||
elif pre is None:
|
||||
pre = Infinity
|
||||
|
||||
# Versions without a post segment should sort before those with one.
|
||||
if post is None:
|
||||
post = -Infinity
|
||||
|
||||
# Versions without a development segment should sort after those with one.
|
||||
if dev is None:
|
||||
dev = Infinity
|
||||
|
||||
if local is None:
|
||||
# Versions without a local segment should sort before those with one.
|
||||
local = -Infinity
|
||||
else:
|
||||
# Versions with a local segment need that segment parsed to implement
|
||||
# the sorting rules in PEP440.
|
||||
# - Alpha numeric segments sort before numeric segments
|
||||
# - Alpha numeric segments sort lexicographically
|
||||
# - Numeric segments sort numerically
|
||||
# - Shorter versions sort before longer versions when the prefixes
|
||||
# match exactly
|
||||
local = tuple(
|
||||
(i, "") if isinstance(i, int) else (-Infinity, i)
|
||||
for i in local
|
||||
)
|
||||
|
||||
return epoch, release, pre, post, dev, local
|
||||
|
||||
|
||||
class InvalidSpecifier(ValueError):
|
||||
"""
|
||||
An invalid specifier was found, users should refer to PEP 440.
|
||||
"""
|
||||
|
||||
|
||||
class Specifier(object):
|
||||
|
||||
_regex = re.compile(
|
||||
r"""
|
||||
^
|
||||
\s*
|
||||
(?P<operator>(~=|==|!=|<=|>=|<|>|===))
|
||||
(?P<version>
|
||||
(?:
|
||||
# The identity operators allow for an escape hatch that will
|
||||
# do an exact string match of the version you wish to install.
|
||||
# This will not be parsed by PEP 440 and we cannot determine
|
||||
# any semantic meaning from it. This operator is discouraged
|
||||
# but included entirely as an escape hatch.
|
||||
(?<====) # Only match for the identity operator
|
||||
\s*
|
||||
[^\s]* # We just match everything, except for whitespace
|
||||
# since we are only testing for strict identity.
|
||||
)
|
||||
|
|
||||
(?:
|
||||
# The (non)equality operators allow for wild card and local
|
||||
# versions to be specified so we have to define these two
|
||||
# operators separately to enable that.
|
||||
(?<===|!=) # Only match for equals and not equals
|
||||
|
||||
\s*
|
||||
v?
|
||||
(?:[0-9]+!)? # epoch
|
||||
[0-9]+(?:\.[0-9]+)* # release
|
||||
(?: # pre release
|
||||
[-_\.]?
|
||||
(a|b|c|rc|alpha|beta|pre|preview)
|
||||
[-_\.]?
|
||||
[0-9]*
|
||||
)?
|
||||
(?: # post release
|
||||
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
|
||||
)?
|
||||
|
||||
# You cannot use a wild card and a dev or local version
|
||||
# together so group them with a | and make them optional.
|
||||
(?:
|
||||
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
|
||||
(?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local
|
||||
|
|
||||
\.\* # Wild card syntax of .*
|
||||
)?
|
||||
)
|
||||
|
|
||||
(?:
|
||||
# The compatible operator requires at least two digits in the
|
||||
# release segment.
|
||||
(?<=~=) # Only match for the compatible operator
|
||||
|
||||
\s*
|
||||
v?
|
||||
(?:[0-9]+!)? # epoch
|
||||
[0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *)
|
||||
(?: # pre release
|
||||
[-_\.]?
|
||||
(a|b|c|rc|alpha|beta|pre|preview)
|
||||
[-_\.]?
|
||||
[0-9]*
|
||||
)?
|
||||
(?: # post release
|
||||
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
|
||||
)?
|
||||
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
|
||||
)
|
||||
|
|
||||
(?:
|
||||
# All other operators only allow a sub set of what the
|
||||
# (non)equality operators do. Specifically they do not allow
|
||||
# local versions to be specified nor do they allow the prefix
|
||||
# matching wild cards.
|
||||
(?<!==|!=|~=) # We have special cases for these
|
||||
# operators so we want to make sure they
|
||||
# don't match here.
|
||||
|
||||
\s*
|
||||
v?
|
||||
(?:[0-9]+!)? # epoch
|
||||
[0-9]+(?:\.[0-9]+)* # release
|
||||
(?: # pre release
|
||||
[-_\.]?
|
||||
(a|b|c|rc|alpha|beta|pre|preview)
|
||||
[-_\.]?
|
||||
[0-9]*
|
||||
)?
|
||||
(?: # post release
|
||||
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
|
||||
)?
|
||||
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
|
||||
)
|
||||
)
|
||||
\s*
|
||||
$
|
||||
""",
|
||||
re.VERBOSE | re.IGNORECASE,
|
||||
)
|
||||
|
||||
_operators = {
|
||||
"~=": "compatible",
|
||||
"==": "equal",
|
||||
"!=": "not_equal",
|
||||
"<=": "less_than_equal",
|
||||
">=": "greater_than_equal",
|
||||
"<": "less_than",
|
||||
">": "greater_than",
|
||||
"===": "arbitrary",
|
||||
}
|
||||
|
||||
def __init__(self, specs="", prereleases=None):
|
||||
# Split on comma to get each individual specification
|
||||
_specs = set()
|
||||
for spec in (s for s in specs.split(",") if s):
|
||||
match = self._regex.search(spec)
|
||||
if not match:
|
||||
raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec))
|
||||
|
||||
_specs.add(
|
||||
(
|
||||
match.group("operator").strip(),
|
||||
match.group("version").strip(),
|
||||
)
|
||||
)
|
||||
|
||||
# Set a frozen set for our specifications
|
||||
self._specs = frozenset(_specs)
|
||||
|
||||
# Store whether or not this Specifier should accept prereleases
|
||||
self._prereleases = prereleases
|
||||
|
||||
def __repr__(self):
|
||||
return "<Specifier({0})>".format(repr(str(self)))
|
||||
|
||||
def __str__(self):
|
||||
return ",".join(["".join(s) for s in sorted(self._specs)])
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self._specs)
|
||||
|
||||
def __and__(self, other):
|
||||
if isinstance(other, string_types):
|
||||
other = Specifier(other)
|
||||
elif not isinstance(other, Specifier):
|
||||
return NotImplemented
|
||||
|
||||
return self.__class__(",".join([str(self), str(other)]))
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, string_types):
|
||||
other = Specifier(other)
|
||||
elif not isinstance(other, Specifier):
|
||||
return NotImplemented
|
||||
|
||||
return self._specs == other._specs
|
||||
|
||||
def __ne__(self, other):
|
||||
if isinstance(other, string_types):
|
||||
other = Specifier(other)
|
||||
elif not isinstance(other, Specifier):
|
||||
return NotImplemented
|
||||
|
||||
return self._specs != other._specs
|
||||
|
||||
def _get_operator(self, op):
|
||||
return getattr(self, "_compare_{0}".format(self._operators[op]))
|
||||
|
||||
def _compare_compatible(self, prospective, spec):
|
||||
# Compatible releases have an equivalent combination of >= and ==. That
|
||||
# is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to
|
||||
# implement this in terms of the other specifiers instead of
|
||||
# implementing it ourselves. The only thing we need to do is construct
|
||||
# the other specifiers.
|
||||
|
||||
# We want everything but the last item in the version, but we want to
|
||||
# ignore post and dev releases and we want to treat the pre-release as
|
||||
# it's own separate segment.
|
||||
prefix = ".".join(
|
||||
list(
|
||||
itertools.takewhile(
|
||||
lambda x: (not x.startswith("post")
|
||||
and not x.startswith("dev")),
|
||||
_version_split(spec),
|
||||
)
|
||||
)[:-1]
|
||||
)
|
||||
|
||||
# Add the prefix notation to the end of our string
|
||||
prefix += ".*"
|
||||
|
||||
return (self._get_operator(">=")(prospective, spec)
|
||||
and self._get_operator("==")(prospective, prefix))
|
||||
|
||||
def _compare_equal(self, prospective, spec):
|
||||
# We need special logic to handle prefix matching
|
||||
if spec.endswith(".*"):
|
||||
# Split the spec out by dots, and pretend that there is an implicit
|
||||
# dot in between a release segment and a pre-release segment.
|
||||
spec = _version_split(spec[:-2]) # Remove the trailing .*
|
||||
|
||||
# Split the prospective version out by dots, and pretend that there
|
||||
# is an implicit dot in between a release segment and a pre-release
|
||||
# segment.
|
||||
prospective = _version_split(str(prospective))
|
||||
|
||||
# Shorten the prospective version to be the same length as the spec
|
||||
# so that we can determine if the specifier is a prefix of the
|
||||
# prospective version or not.
|
||||
prospective = prospective[:len(spec)]
|
||||
|
||||
# Pad out our two sides with zeros so that they both equal the same
|
||||
# length.
|
||||
spec, prospective = _pad_version(spec, prospective)
|
||||
else:
|
||||
# Convert our spec string into a Version
|
||||
spec = Version(spec)
|
||||
|
||||
# If the specifier does not have a local segment, then we want to
|
||||
# act as if the prospective version also does not have a local
|
||||
# segment.
|
||||
if not spec.local:
|
||||
prospective = Version(prospective.public)
|
||||
|
||||
return prospective == spec
|
||||
|
||||
def _compare_not_equal(self, prospective, spec):
|
||||
return not self._compare_equal(prospective, spec)
|
||||
|
||||
def _compare_less_than_equal(self, prospective, spec):
|
||||
return prospective <= Version(spec)
|
||||
|
||||
def _compare_greater_than_equal(self, prospective, spec):
|
||||
return prospective >= Version(spec)
|
||||
|
||||
def _compare_less_than(self, prospective, spec):
|
||||
# Less than are defined as exclusive operators, this implies that
|
||||
# pre-releases do not match for the same series as the spec. This is
|
||||
# implemented by making <V imply !=V.*.
|
||||
return (prospective < Version(spec)
|
||||
and self._get_operator("!=")(prospective, spec + ".*"))
|
||||
|
||||
def _compare_greater_than(self, prospective, spec):
|
||||
# Greater than are defined as exclusive operators, this implies that
|
||||
# pre-releases do not match for the same series as the spec. This is
|
||||
# implemented by making >V imply !=V.*.
|
||||
return (prospective > Version(spec)
|
||||
and self._get_operator("!=")(prospective, spec + ".*"))
|
||||
|
||||
def _compare_arbitrary(self, prospective, spec):
|
||||
return str(prospective).lower() == str(spec).lower()
|
||||
|
||||
@property
|
||||
def prereleases(self):
|
||||
# If there is an explicit prereleases set for this, then we'll just
|
||||
# blindly use that.
|
||||
if self._prereleases is not None:
|
||||
return self._prereleases
|
||||
|
||||
# Look at all of our specifiers and determine if they are inclusive
|
||||
# operators, and if they are if they are including an explicit
|
||||
# prerelease.
|
||||
for spec, version in self._specs:
|
||||
if spec in ["==", ">=", "<=", "~="]:
|
||||
# The == specifier can include a trailing .*, if it does we
|
||||
# want to remove before parsing.
|
||||
if spec == "==" and version.endswith(".*"):
|
||||
version = version[:-2]
|
||||
|
||||
# Parse the version, and if it is a pre-release than this
|
||||
# specifier allows pre-releases.
|
||||
if parse(version).is_prerelease:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
@prereleases.setter
|
||||
def prereleases(self, value):
|
||||
self._prereleases = value
|
||||
|
||||
def contains(self, item, prereleases=None):
|
||||
# Determine if prereleases are to be allowed or not.
|
||||
if prereleases is None:
|
||||
prereleases = self.prereleases
|
||||
|
||||
# Normalize item to a Version or LegacyVersion, this allows us to have
|
||||
# a shortcut for ``"2.0" in Specifier(">=2")
|
||||
if isinstance(item, (Version, LegacyVersion)):
|
||||
version_item = item
|
||||
else:
|
||||
try:
|
||||
version_item = Version(item)
|
||||
except ValueError:
|
||||
version_item = LegacyVersion(item)
|
||||
|
||||
# Determine if we should be supporting prereleases in this specifier
|
||||
# or not, if we do not support prereleases than we can short circuit
|
||||
# logic if this version is a prereleases.
|
||||
if version_item.is_prerelease and not prereleases:
|
||||
return False
|
||||
|
||||
# Detect if we have any specifiers, if we do not then anything matches
|
||||
# and we can short circuit all this logic.
|
||||
if not self._specs:
|
||||
return True
|
||||
|
||||
# If we're operating on a LegacyVersion, then we can only support
|
||||
# arbitrary comparison so do a quick check to see if the spec contains
|
||||
# any non arbitrary specifiers
|
||||
if isinstance(version_item, LegacyVersion):
|
||||
if any(op != "===" for op, _ in self._specs):
|
||||
return False
|
||||
|
||||
# Ensure that the passed in version matches all of our version
|
||||
# specifiers
|
||||
return all(
|
||||
self._get_operator(op)(
|
||||
version_item if op != "===" else item,
|
||||
spec,
|
||||
)
|
||||
for op, spec, in self._specs
|
||||
)
|
||||
|
||||
def filter(self, iterable, prereleases=None):
|
||||
iterable = list(iterable)
|
||||
yielded = False
|
||||
found_prereleases = []
|
||||
|
||||
kw = {"prereleases": prereleases if prereleases is not None else True}
|
||||
|
||||
# Attempt to iterate over all the values in the iterable and if any of
|
||||
# them match, yield them.
|
||||
for version in iterable:
|
||||
if not isinstance(version, (Version, LegacyVersion)):
|
||||
parsed_version = parse(version)
|
||||
else:
|
||||
parsed_version = version
|
||||
|
||||
if self.contains(parsed_version, **kw):
|
||||
# If our version is a prerelease, and we were not set to allow
|
||||
# prereleases, then we'll store it for later incase nothing
|
||||
# else matches this specifier.
|
||||
if (parsed_version.is_prerelease
|
||||
and not (prereleases or self.prereleases)):
|
||||
found_prereleases.append(version)
|
||||
# Either this is not a prerelease, or we should have been
|
||||
# accepting prereleases from the begining.
|
||||
else:
|
||||
yielded = True
|
||||
yield version
|
||||
|
||||
# Now that we've iterated over everything, determine if we've yielded
|
||||
# any values, and if we have not and we have any prereleases stored up
|
||||
# then we will go ahead and yield the prereleases.
|
||||
if not yielded and found_prereleases:
|
||||
for version in found_prereleases:
|
||||
yield version
|
||||
|
||||
|
||||
_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$")
|
||||
|
||||
|
||||
def _version_split(version):
|
||||
result = []
|
||||
for item in version.split("."):
|
||||
match = _prefix_regex.search(item)
|
||||
if match:
|
||||
result.extend(match.groups())
|
||||
else:
|
||||
result.append(item)
|
||||
return result
|
||||
|
||||
|
||||
def _pad_version(left, right):
|
||||
left_split, right_split = [], []
|
||||
|
||||
# Get the release segment of our versions
|
||||
left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left)))
|
||||
right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right)))
|
||||
|
||||
# Get the rest of our versions
|
||||
left_split.append(left[len(left_split):])
|
||||
right_split.append(left[len(right_split):])
|
||||
|
||||
# Insert our padding
|
||||
left_split.insert(
|
||||
1,
|
||||
["0"] * max(0, len(right_split[0]) - len(left_split[0])),
|
||||
)
|
||||
right_split.insert(
|
||||
1,
|
||||
["0"] * max(0, len(left_split[0]) - len(right_split[0])),
|
||||
)
|
||||
|
||||
return (
|
||||
list(itertools.chain(*left_split)),
|
||||
list(itertools.chain(*right_split)),
|
||||
)
|
||||
|
|
@ -64,20 +64,23 @@ def unpack_directory(filename, extract_dir, progress_filter=default_filter):
|
|||
Raises ``UnrecognizedFormat`` if `filename` is not a directory
|
||||
"""
|
||||
if not os.path.isdir(filename):
|
||||
raise UnrecognizedFormat("%s is not a directory" % (filename,))
|
||||
raise UnrecognizedFormat("%s is not a directory" % filename)
|
||||
|
||||
paths = {filename:('',extract_dir)}
|
||||
paths = {
|
||||
filename: ('', extract_dir),
|
||||
}
|
||||
for base, dirs, files in os.walk(filename):
|
||||
src,dst = paths[base]
|
||||
src, dst = paths[base]
|
||||
for d in dirs:
|
||||
paths[os.path.join(base,d)] = src+d+'/', os.path.join(dst,d)
|
||||
paths[os.path.join(base, d)] = src + d + '/', os.path.join(dst, d)
|
||||
for f in files:
|
||||
target = os.path.join(dst,f)
|
||||
target = progress_filter(src+f, target)
|
||||
target = os.path.join(dst, f)
|
||||
target = progress_filter(src + f, target)
|
||||
if not target:
|
||||
continue # skip non-files
|
||||
# skip non-files
|
||||
continue
|
||||
ensure_directory(target)
|
||||
f = os.path.join(base,f)
|
||||
f = os.path.join(base, f)
|
||||
shutil.copyfile(f, target)
|
||||
shutil.copystat(f, target)
|
||||
|
||||
|
|
@ -112,12 +115,8 @@ def unpack_zipfile(filename, extract_dir, progress_filter=default_filter):
|
|||
# file
|
||||
ensure_directory(target)
|
||||
data = z.read(info.filename)
|
||||
f = open(target,'wb')
|
||||
try:
|
||||
with open(target, 'wb') as f:
|
||||
f.write(data)
|
||||
finally:
|
||||
f.close()
|
||||
del data
|
||||
unix_attributes = info.external_attr >> 16
|
||||
if unix_attributes:
|
||||
os.chmod(target, unix_attributes)
|
||||
|
|
@ -137,18 +136,21 @@ def unpack_tarfile(filename, extract_dir, progress_filter=default_filter):
|
|||
"%s is not a compressed or uncompressed tar file" % (filename,)
|
||||
)
|
||||
with contextlib.closing(tarobj):
|
||||
tarobj.chown = lambda *args: None # don't do any chowning!
|
||||
# don't do any chowning!
|
||||
tarobj.chown = lambda *args: None
|
||||
for member in tarobj:
|
||||
name = member.name
|
||||
# don't extract absolute paths or ones with .. in them
|
||||
if not name.startswith('/') and '..' not in name.split('/'):
|
||||
prelim_dst = os.path.join(extract_dir, *name.split('/'))
|
||||
|
||||
# resolve any links and to extract the link targets as normal files
|
||||
# resolve any links and to extract the link targets as normal
|
||||
# files
|
||||
while member is not None and (member.islnk() or member.issym()):
|
||||
linkpath = member.linkname
|
||||
if member.issym():
|
||||
linkpath = posixpath.join(posixpath.dirname(member.name), linkpath)
|
||||
base = posixpath.dirname(member.name)
|
||||
linkpath = posixpath.join(base, linkpath)
|
||||
linkpath = posixpath.normpath(linkpath)
|
||||
member = tarobj._getmember(linkpath)
|
||||
|
||||
|
|
@ -158,9 +160,11 @@ def unpack_tarfile(filename, extract_dir, progress_filter=default_filter):
|
|||
if final_dst.endswith(os.sep):
|
||||
final_dst = final_dst[:-1]
|
||||
try:
|
||||
tarobj._extract_member(member, final_dst) # XXX Ugh
|
||||
# XXX Ugh
|
||||
tarobj._extract_member(member, final_dst)
|
||||
except tarfile.ExtractError:
|
||||
pass # chown/chmod/mkfifo/mknode/makedev failed
|
||||
# chown/chmod/mkfifo/mknode/makedev failed
|
||||
pass
|
||||
return True
|
||||
|
||||
extraction_drivers = unpack_directory, unpack_zipfile, unpack_tarfile
|
||||
|
|
|
|||
0
Shared/lib/python3.4/site-packages/setuptools/cli-32.exe
Executable file → Normal file
0
Shared/lib/python3.4/site-packages/setuptools/cli-32.exe
Executable file → Normal file
0
Shared/lib/python3.4/site-packages/setuptools/cli-64.exe
Executable file → Normal file
0
Shared/lib/python3.4/site-packages/setuptools/cli-64.exe
Executable file → Normal file
0
Shared/lib/python3.4/site-packages/setuptools/cli-arm-32.exe
Executable file → Normal file
0
Shared/lib/python3.4/site-packages/setuptools/cli-arm-32.exe
Executable file → Normal file
0
Shared/lib/python3.4/site-packages/setuptools/cli.exe
Executable file → Normal file
0
Shared/lib/python3.4/site-packages/setuptools/cli.exe
Executable file → Normal file
|
|
@ -2,7 +2,6 @@
|
|||
|
||||
Build .egg distributions"""
|
||||
|
||||
# This module should be kept compatible with Python 2.3
|
||||
from distutils.errors import DistutilsSetupError
|
||||
from distutils.dir_util import remove_tree, mkpath
|
||||
from distutils import log
|
||||
|
|
@ -406,10 +405,6 @@ def scan_module(egg_dir, base, name, stubs):
|
|||
if bad in symbols:
|
||||
log.warn("%s: module MAY be using inspect.%s", module, bad)
|
||||
safe = False
|
||||
if '__name__' in symbols and '__main__' in symbols and '.' not in module:
|
||||
if sys.version[:3] == "2.4": # -m works w/zipfiles in 2.5
|
||||
log.warn("%s: top-level module may be 'python -m' script", module)
|
||||
safe = False
|
||||
return safe
|
||||
|
||||
|
||||
|
|
@ -441,7 +436,7 @@ INSTALL_DIRECTORY_ATTRS = [
|
|||
]
|
||||
|
||||
|
||||
def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=None,
|
||||
def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=True,
|
||||
mode='w'):
|
||||
"""Create a zip file from all the files under 'base_dir'. The output
|
||||
zip file will be named 'base_dir' + ".zip". Uses either the "zipfile"
|
||||
|
|
@ -463,11 +458,7 @@ def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=None,
|
|||
z.write(path, p)
|
||||
log.debug("adding '%s'" % p)
|
||||
|
||||
if compress is None:
|
||||
# avoid 2.3 zipimport bug when 64 bits
|
||||
compress = (sys.version >= "2.4")
|
||||
|
||||
compression = [zipfile.ZIP_STORED, zipfile.ZIP_DEFLATED][bool(compress)]
|
||||
compression = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED
|
||||
if not dry_run:
|
||||
z = zipfile.ZipFile(zip_filename, mode, compression=compression)
|
||||
for dirname, dirs, files in os.walk(base_dir):
|
||||
|
|
|
|||
|
|
@ -6,12 +6,13 @@ from distutils.errors import DistutilsError
|
|||
from distutils import log
|
||||
import os
|
||||
import sys
|
||||
import itertools
|
||||
|
||||
from setuptools.extension import Library
|
||||
|
||||
try:
|
||||
# Attempt to use Pyrex for building extensions, if available
|
||||
from Pyrex.Distutils.build_ext import build_ext as _build_ext
|
||||
# Attempt to use Cython for building extensions, if available
|
||||
from Cython.Distutils.build_ext import build_ext as _build_ext
|
||||
except ImportError:
|
||||
_build_ext = _du_build_ext
|
||||
|
||||
|
|
@ -33,19 +34,13 @@ if sys.platform == "darwin":
|
|||
use_stubs = True
|
||||
elif os.name != 'nt':
|
||||
try:
|
||||
from dl import RTLD_NOW
|
||||
|
||||
have_rtld = True
|
||||
use_stubs = True
|
||||
import dl
|
||||
use_stubs = have_rtld = hasattr(dl, 'RTLD_NOW')
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
def if_dl(s):
|
||||
if have_rtld:
|
||||
return s
|
||||
return ''
|
||||
|
||||
if_dl = lambda s: s if have_rtld else ''
|
||||
|
||||
class build_ext(_build_ext):
|
||||
def run(self):
|
||||
|
|
@ -78,15 +73,6 @@ class build_ext(_build_ext):
|
|||
if ext._needs_stub:
|
||||
self.write_stub(package_dir or os.curdir, ext, True)
|
||||
|
||||
if _build_ext is not _du_build_ext and not hasattr(_build_ext,
|
||||
'pyrex_sources'):
|
||||
# Workaround for problems using some Pyrex versions w/SWIG and/or 2.4
|
||||
def swig_sources(self, sources, *otherargs):
|
||||
# first do any Pyrex processing
|
||||
sources = _build_ext.swig_sources(self, sources) or sources
|
||||
# Then do any actual SWIG stuff on the remainder
|
||||
return _du_build_ext.swig_sources(self, sources, *otherargs)
|
||||
|
||||
def get_ext_filename(self, fullname):
|
||||
filename = _build_ext.get_ext_filename(self, fullname)
|
||||
if fullname in self.ext_map:
|
||||
|
|
@ -123,10 +109,10 @@ class build_ext(_build_ext):
|
|||
# XXX what to do with conflicts?
|
||||
self.ext_map[fullname.split('.')[-1]] = ext
|
||||
|
||||
ltd = ext._links_to_dynamic = \
|
||||
self.shlibs and self.links_to_dynamic(ext) or False
|
||||
ext._needs_stub = ltd and use_stubs and not isinstance(ext,
|
||||
Library)
|
||||
ltd = self.shlibs and self.links_to_dynamic(ext) or False
|
||||
ns = ltd and use_stubs and not isinstance(ext, Library)
|
||||
ext._links_to_dynamic = ltd
|
||||
ext._needs_stub = ns
|
||||
filename = ext._file_name = self.get_ext_filename(fullname)
|
||||
libdir = os.path.dirname(os.path.join(self.build_lib, filename))
|
||||
if ltd and libdir not in ext.library_dirs:
|
||||
|
|
@ -180,15 +166,15 @@ class build_ext(_build_ext):
|
|||
return _build_ext.get_export_symbols(self, ext)
|
||||
|
||||
def build_extension(self, ext):
|
||||
ext._convert_pyx_sources_to_lang()
|
||||
_compiler = self.compiler
|
||||
try:
|
||||
if isinstance(ext, Library):
|
||||
self.compiler = self.shlib_compiler
|
||||
_build_ext.build_extension(self, ext)
|
||||
if ext._needs_stub:
|
||||
self.write_stub(
|
||||
self.get_finalized_command('build_py').build_lib, ext
|
||||
)
|
||||
cmd = self.get_finalized_command('build_py').build_lib
|
||||
self.write_stub(cmd, ext)
|
||||
finally:
|
||||
self.compiler = _compiler
|
||||
|
||||
|
|
@ -199,22 +185,27 @@ class build_ext(_build_ext):
|
|||
# XXX static-compiled version
|
||||
libnames = dict.fromkeys([lib._full_name for lib in self.shlibs])
|
||||
pkg = '.'.join(ext._full_name.split('.')[:-1] + [''])
|
||||
for libname in ext.libraries:
|
||||
if pkg + libname in libnames:
|
||||
return True
|
||||
return False
|
||||
return any(pkg + libname in libnames for libname in ext.libraries)
|
||||
|
||||
def get_outputs(self):
|
||||
outputs = _build_ext.get_outputs(self)
|
||||
optimize = self.get_finalized_command('build_py').optimize
|
||||
for ext in self.extensions:
|
||||
if ext._needs_stub:
|
||||
base = os.path.join(self.build_lib, *ext._full_name.split('.'))
|
||||
outputs.append(base + '.py')
|
||||
outputs.append(base + '.pyc')
|
||||
if optimize:
|
||||
outputs.append(base + '.pyo')
|
||||
return outputs
|
||||
return _build_ext.get_outputs(self) + self.__get_stubs_outputs()
|
||||
|
||||
def __get_stubs_outputs(self):
|
||||
# assemble the base name for each extension that needs a stub
|
||||
ns_ext_bases = (
|
||||
os.path.join(self.build_lib, *ext._full_name.split('.'))
|
||||
for ext in self.extensions
|
||||
if ext._needs_stub
|
||||
)
|
||||
# pair each base with the extension
|
||||
pairs = itertools.product(ns_ext_bases, self.__get_output_extensions())
|
||||
return list(base + fnext for base, fnext in pairs)
|
||||
|
||||
def __get_output_extensions(self):
|
||||
yield '.py'
|
||||
yield '.pyc'
|
||||
if self.get_finalized_command('build_py').optimize:
|
||||
yield '.pyo'
|
||||
|
||||
def write_stub(self, output_dir, ext, compile=False):
|
||||
log.info("writing stub loader for %s to %s", ext._full_name,
|
||||
|
|
|
|||
|
|
@ -136,22 +136,7 @@ class build_py(orig.build_py, Mixin2to3):
|
|||
mf.setdefault(src_dirs[d], []).append(path)
|
||||
|
||||
def get_data_files(self):
|
||||
pass # kludge 2.4 for lazy computation
|
||||
|
||||
if sys.version < "2.4": # Python 2.4 already has this code
|
||||
def get_outputs(self, include_bytecode=1):
|
||||
"""Return complete list of files copied to the build directory
|
||||
|
||||
This includes both '.py' files and data files, as well as '.pyc'
|
||||
and '.pyo' files if 'include_bytecode' is true. (This method is
|
||||
needed for the 'install_lib' command to do its job properly, and to
|
||||
generate a correct installation manifest.)
|
||||
"""
|
||||
return orig.build_py.get_outputs(self, include_bytecode) + [
|
||||
os.path.join(build_dir, filename)
|
||||
for package, src_dir, build_dir, filenames in self.data_files
|
||||
for filename in filenames
|
||||
]
|
||||
pass # Lazily compute data files in _get_data_files() function.
|
||||
|
||||
def check_package(self, package, package_dir):
|
||||
"""Check namespace packages' __init__ for declare_namespace"""
|
||||
|
|
|
|||
|
|
@ -34,8 +34,12 @@ import textwrap
|
|||
import warnings
|
||||
import site
|
||||
import struct
|
||||
import contextlib
|
||||
import subprocess
|
||||
import shlex
|
||||
import io
|
||||
|
||||
from setuptools import Command, _dont_write_bytecode
|
||||
from setuptools import Command
|
||||
from setuptools.sandbox import run_setup
|
||||
from setuptools.py31compat import get_path, get_config_vars
|
||||
from setuptools.command import setopt
|
||||
|
|
@ -53,9 +57,9 @@ from pkg_resources import (
|
|||
)
|
||||
import pkg_resources
|
||||
|
||||
# Turn on PEP440Warnings
|
||||
warnings.filterwarnings("default", category=pkg_resources.PEP440Warning)
|
||||
|
||||
sys_executable = os.environ.get('__PYVENV_LAUNCHER__',
|
||||
os.path.normpath(sys.executable))
|
||||
|
||||
__all__ = [
|
||||
'samefile', 'easy_install', 'PthDistributions', 'extract_wininst_cfg',
|
||||
|
|
@ -148,12 +152,9 @@ class easy_install(Command):
|
|||
create_index = PackageIndex
|
||||
|
||||
def initialize_options(self):
|
||||
if site.ENABLE_USER_SITE:
|
||||
whereami = os.path.abspath(__file__)
|
||||
self.user = whereami.startswith(site.USER_SITE)
|
||||
else:
|
||||
self.user = 0
|
||||
|
||||
# the --user option seems to be an opt-in one,
|
||||
# so the default should be False.
|
||||
self.user = 0
|
||||
self.zip_ok = self.local_snapshots_ok = None
|
||||
self.install_dir = self.script_dir = self.exclude_scripts = None
|
||||
self.index_url = None
|
||||
|
|
@ -199,20 +200,34 @@ class easy_install(Command):
|
|||
)
|
||||
|
||||
def delete_blockers(self, blockers):
|
||||
for filename in blockers:
|
||||
if os.path.exists(filename) or os.path.islink(filename):
|
||||
log.info("Deleting %s", filename)
|
||||
if not self.dry_run:
|
||||
if (os.path.isdir(filename) and
|
||||
not os.path.islink(filename)):
|
||||
rmtree(filename)
|
||||
else:
|
||||
os.unlink(filename)
|
||||
extant_blockers = (
|
||||
filename for filename in blockers
|
||||
if os.path.exists(filename) or os.path.islink(filename)
|
||||
)
|
||||
list(map(self._delete_path, extant_blockers))
|
||||
|
||||
def _delete_path(self, path):
|
||||
log.info("Deleting %s", path)
|
||||
if self.dry_run:
|
||||
return
|
||||
|
||||
is_tree = os.path.isdir(path) and not os.path.islink(path)
|
||||
remover = rmtree if is_tree else os.unlink
|
||||
remover(path)
|
||||
|
||||
@staticmethod
|
||||
def _render_version():
|
||||
"""
|
||||
Render the Setuptools version and installation details, then exit.
|
||||
"""
|
||||
ver = sys.version[:3]
|
||||
dist = get_distribution('setuptools')
|
||||
tmpl = 'setuptools {dist.version} from {dist.location} (Python {ver})'
|
||||
print(tmpl.format(**locals()))
|
||||
raise SystemExit()
|
||||
|
||||
def finalize_options(self):
|
||||
if self.version:
|
||||
print('setuptools %s' % get_distribution('setuptools').version)
|
||||
sys.exit()
|
||||
self.version and self._render_version()
|
||||
|
||||
py_version = sys.version.split()[0]
|
||||
prefix, exec_prefix = get_config_vars('prefix', 'exec_prefix')
|
||||
|
|
@ -236,18 +251,7 @@ class easy_install(Command):
|
|||
self.config_vars['userbase'] = self.install_userbase
|
||||
self.config_vars['usersite'] = self.install_usersite
|
||||
|
||||
# fix the install_dir if "--user" was used
|
||||
# XXX: duplicate of the code in the setup command
|
||||
if self.user and site.ENABLE_USER_SITE:
|
||||
self.create_home_path()
|
||||
if self.install_userbase is None:
|
||||
raise DistutilsPlatformError(
|
||||
"User base directory is not specified")
|
||||
self.install_base = self.install_platbase = self.install_userbase
|
||||
if os.name == 'posix':
|
||||
self.select_scheme("unix_user")
|
||||
else:
|
||||
self.select_scheme(os.name + "_user")
|
||||
self._fix_install_dir_for_user_site()
|
||||
|
||||
self.expand_basedirs()
|
||||
self.expand_dirs()
|
||||
|
|
@ -342,6 +346,21 @@ class easy_install(Command):
|
|||
|
||||
self.outputs = []
|
||||
|
||||
def _fix_install_dir_for_user_site(self):
|
||||
"""
|
||||
Fix the install_dir if "--user" was used.
|
||||
"""
|
||||
if not self.user or not site.ENABLE_USER_SITE:
|
||||
return
|
||||
|
||||
self.create_home_path()
|
||||
if self.install_userbase is None:
|
||||
msg = "User base directory is not specified"
|
||||
raise DistutilsPlatformError(msg)
|
||||
self.install_base = self.install_platbase = self.install_userbase
|
||||
scheme_name = os.name.replace('posix', 'unix') + '_user'
|
||||
self.select_scheme(scheme_name)
|
||||
|
||||
def _expand_attrs(self, attrs):
|
||||
for attr in attrs:
|
||||
val = getattr(self, attr)
|
||||
|
|
@ -434,7 +453,7 @@ class easy_install(Command):
|
|||
self.pth_file = None
|
||||
|
||||
PYTHONPATH = os.environ.get('PYTHONPATH', '').split(os.pathsep)
|
||||
if instdir not in map(normalize_path, [_f for _f in PYTHONPATH if _f]):
|
||||
if instdir not in map(normalize_path, filter(None, PYTHONPATH)):
|
||||
# only PYTHONPATH dirs need a site.py, so pretend it's there
|
||||
self.sitepy_installed = True
|
||||
elif self.multi_version and not os.path.exists(pth_file):
|
||||
|
|
@ -442,43 +461,49 @@ class easy_install(Command):
|
|||
self.pth_file = None # and don't create a .pth file
|
||||
self.install_dir = instdir
|
||||
|
||||
__cant_write_msg = textwrap.dedent("""
|
||||
can't create or remove files in install directory
|
||||
|
||||
The following error occurred while trying to add or remove files in the
|
||||
installation directory:
|
||||
|
||||
%s
|
||||
|
||||
The installation directory you specified (via --install-dir, --prefix, or
|
||||
the distutils default setting) was:
|
||||
|
||||
%s
|
||||
""").lstrip()
|
||||
|
||||
__not_exists_id = textwrap.dedent("""
|
||||
This directory does not currently exist. Please create it and try again, or
|
||||
choose a different installation directory (using the -d or --install-dir
|
||||
option).
|
||||
""").lstrip()
|
||||
|
||||
__access_msg = textwrap.dedent("""
|
||||
Perhaps your account does not have write access to this directory? If the
|
||||
installation directory is a system-owned directory, you may need to sign in
|
||||
as the administrator or "root" account. If you do not have administrative
|
||||
access to this machine, you may wish to choose a different installation
|
||||
directory, preferably one that is listed in your PYTHONPATH environment
|
||||
variable.
|
||||
|
||||
For information on other options, you may wish to consult the
|
||||
documentation at:
|
||||
|
||||
https://pythonhosted.org/setuptools/easy_install.html
|
||||
|
||||
Please make the appropriate changes for your system and try again.
|
||||
""").lstrip()
|
||||
|
||||
def cant_write_to_target(self):
|
||||
template = """can't create or remove files in install directory
|
||||
|
||||
The following error occurred while trying to add or remove files in the
|
||||
installation directory:
|
||||
|
||||
%s
|
||||
|
||||
The installation directory you specified (via --install-dir, --prefix, or
|
||||
the distutils default setting) was:
|
||||
|
||||
%s
|
||||
"""
|
||||
msg = template % (sys.exc_info()[1], self.install_dir,)
|
||||
msg = self.__cant_write_msg % (sys.exc_info()[1], self.install_dir,)
|
||||
|
||||
if not os.path.exists(self.install_dir):
|
||||
msg += """
|
||||
This directory does not currently exist. Please create it and try again, or
|
||||
choose a different installation directory (using the -d or --install-dir
|
||||
option).
|
||||
"""
|
||||
msg += '\n' + self.__not_exists_id
|
||||
else:
|
||||
msg += """
|
||||
Perhaps your account does not have write access to this directory? If the
|
||||
installation directory is a system-owned directory, you may need to sign in
|
||||
as the administrator or "root" account. If you do not have administrative
|
||||
access to this machine, you may wish to choose a different installation
|
||||
directory, preferably one that is listed in your PYTHONPATH environment
|
||||
variable.
|
||||
|
||||
For information on other options, you may wish to consult the
|
||||
documentation at:
|
||||
|
||||
https://pythonhosted.org/setuptools/easy_install.html
|
||||
|
||||
Please make the appropriate changes for your system and try again.
|
||||
"""
|
||||
msg += '\n' + self.__access_msg
|
||||
raise DistutilsError(msg)
|
||||
|
||||
def check_pth_processing(self):
|
||||
|
|
@ -692,17 +717,10 @@ Please make the appropriate changes for your system and try again.
|
|||
distros = WorkingSet([]).resolve(
|
||||
[requirement], self.local_index, self.easy_install
|
||||
)
|
||||
except DistributionNotFound:
|
||||
e = sys.exc_info()[1]
|
||||
raise DistutilsError(
|
||||
"Could not find required distribution %s" % e.args
|
||||
)
|
||||
except VersionConflict:
|
||||
e = sys.exc_info()[1]
|
||||
raise DistutilsError(
|
||||
"Installed distribution %s conflicts with requirement %s"
|
||||
% e.args
|
||||
)
|
||||
except DistributionNotFound as e:
|
||||
raise DistutilsError(str(e))
|
||||
except VersionConflict as e:
|
||||
raise DistutilsError(e.report())
|
||||
if self.always_copy or self.always_copy_from:
|
||||
# Force all the relevant distros to be copied or activated
|
||||
for dist in distros:
|
||||
|
|
@ -743,7 +761,7 @@ Please make the appropriate changes for your system and try again.
|
|||
|
||||
def install_wrapper_scripts(self, dist):
|
||||
if not self.exclude_scripts:
|
||||
for args in get_script_args(dist):
|
||||
for args in ScriptWriter.best().get_args(dist):
|
||||
self.write_script(*args)
|
||||
|
||||
def install_script(self, dist, script_name, script_text, dev_path=None):
|
||||
|
|
@ -752,7 +770,7 @@ Please make the appropriate changes for your system and try again.
|
|||
is_script = is_python_script(script_text, script_name)
|
||||
|
||||
if is_script:
|
||||
script_text = (get_script_header(script_text) +
|
||||
script_text = (ScriptWriter.get_header(script_text) +
|
||||
self._load_template(dev_path) % locals())
|
||||
self.write_script(script_name, _to_ascii(script_text), 'b')
|
||||
|
||||
|
|
@ -916,9 +934,10 @@ Please make the appropriate changes for your system and try again.
|
|||
f.write('%s: %s\n' % (k.replace('_', '-').title(), v))
|
||||
f.close()
|
||||
script_dir = os.path.join(_egg_info, 'scripts')
|
||||
self.delete_blockers( # delete entry-point scripts to avoid duping
|
||||
# delete entry-point scripts to avoid duping
|
||||
self.delete_blockers(
|
||||
[os.path.join(script_dir, args[0]) for args in
|
||||
get_script_args(dist)]
|
||||
ScriptWriter.get_args(dist)]
|
||||
)
|
||||
# Build .egg file from tmpdir
|
||||
bdist_egg.make_zipfile(
|
||||
|
|
@ -980,46 +999,52 @@ Please make the appropriate changes for your system and try again.
|
|||
f.write('\n'.join(locals()[name]) + '\n')
|
||||
f.close()
|
||||
|
||||
__mv_warning = textwrap.dedent("""
|
||||
Because this distribution was installed --multi-version, before you can
|
||||
import modules from this package in an application, you will need to
|
||||
'import pkg_resources' and then use a 'require()' call similar to one of
|
||||
these examples, in order to select the desired version:
|
||||
|
||||
pkg_resources.require("%(name)s") # latest installed version
|
||||
pkg_resources.require("%(name)s==%(version)s") # this exact version
|
||||
pkg_resources.require("%(name)s>=%(version)s") # this version or higher
|
||||
""").lstrip()
|
||||
|
||||
__id_warning = textwrap.dedent("""
|
||||
Note also that the installation directory must be on sys.path at runtime for
|
||||
this to work. (e.g. by being the application's script directory, by being on
|
||||
PYTHONPATH, or by being added to sys.path by your code.)
|
||||
""")
|
||||
|
||||
def installation_report(self, req, dist, what="Installed"):
|
||||
"""Helpful installation message for display to package users"""
|
||||
msg = "\n%(what)s %(eggloc)s%(extras)s"
|
||||
if self.multi_version and not self.no_report:
|
||||
msg += """
|
||||
|
||||
Because this distribution was installed --multi-version, before you can
|
||||
import modules from this package in an application, you will need to
|
||||
'import pkg_resources' and then use a 'require()' call similar to one of
|
||||
these examples, in order to select the desired version:
|
||||
|
||||
pkg_resources.require("%(name)s") # latest installed version
|
||||
pkg_resources.require("%(name)s==%(version)s") # this exact version
|
||||
pkg_resources.require("%(name)s>=%(version)s") # this version or higher
|
||||
"""
|
||||
msg += '\n' + self.__mv_warning
|
||||
if self.install_dir not in map(normalize_path, sys.path):
|
||||
msg += """
|
||||
msg += '\n' + self.__id_warning
|
||||
|
||||
Note also that the installation directory must be on sys.path at runtime for
|
||||
this to work. (e.g. by being the application's script directory, by being on
|
||||
PYTHONPATH, or by being added to sys.path by your code.)
|
||||
"""
|
||||
eggloc = dist.location
|
||||
name = dist.project_name
|
||||
version = dist.version
|
||||
extras = '' # TODO: self.report_extras(req, dist)
|
||||
return msg % locals()
|
||||
|
||||
__editable_msg = textwrap.dedent("""
|
||||
Extracted editable version of %(spec)s to %(dirname)s
|
||||
|
||||
If it uses setuptools in its setup script, you can activate it in
|
||||
"development" mode by going to that directory and running::
|
||||
|
||||
%(python)s setup.py develop
|
||||
|
||||
See the setuptools documentation for the "develop" command for more info.
|
||||
""").lstrip()
|
||||
|
||||
def report_editable(self, spec, setup_script):
|
||||
dirname = os.path.dirname(setup_script)
|
||||
python = sys.executable
|
||||
return """\nExtracted editable version of %(spec)s to %(dirname)s
|
||||
|
||||
If it uses setuptools in its setup script, you can activate it in
|
||||
"development" mode by going to that directory and running::
|
||||
|
||||
%(python)s setup.py develop
|
||||
|
||||
See the setuptools documentation for the "develop" command for more info.
|
||||
""" % locals()
|
||||
return '\n' + self.__editable_msg % locals()
|
||||
|
||||
def run_setup(self, setup_script, setup_base, args):
|
||||
sys.modules.setdefault('distutils.command.bdist_egg', bdist_egg)
|
||||
|
|
@ -1038,8 +1063,7 @@ See the setuptools documentation for the "develop" command for more info.
|
|||
)
|
||||
try:
|
||||
run_setup(setup_script, args)
|
||||
except SystemExit:
|
||||
v = sys.exc_info()[1]
|
||||
except SystemExit as v:
|
||||
raise DistutilsError("Setup script exited with %s" % (v.args[0],))
|
||||
|
||||
def build_and_install(self, setup_script, setup_base):
|
||||
|
|
@ -1152,7 +1176,7 @@ See the setuptools documentation for the "develop" command for more info.
|
|||
chmod(f, mode)
|
||||
|
||||
def byte_compile(self, to_compile):
|
||||
if _dont_write_bytecode:
|
||||
if sys.dont_write_bytecode:
|
||||
self.warn('byte-compiling is disabled, skipping.')
|
||||
return
|
||||
|
||||
|
|
@ -1171,35 +1195,38 @@ See the setuptools documentation for the "develop" command for more info.
|
|||
finally:
|
||||
log.set_verbosity(self.verbose) # restore original verbosity
|
||||
|
||||
__no_default_msg = textwrap.dedent("""
|
||||
bad install directory or PYTHONPATH
|
||||
|
||||
You are attempting to install a package to a directory that is not
|
||||
on PYTHONPATH and which Python does not read ".pth" files from. The
|
||||
installation directory you specified (via --install-dir, --prefix, or
|
||||
the distutils default setting) was:
|
||||
|
||||
%s
|
||||
|
||||
and your PYTHONPATH environment variable currently contains:
|
||||
|
||||
%r
|
||||
|
||||
Here are some of your options for correcting the problem:
|
||||
|
||||
* You can choose a different installation directory, i.e., one that is
|
||||
on PYTHONPATH or supports .pth files
|
||||
|
||||
* You can add the installation directory to the PYTHONPATH environment
|
||||
variable. (It must then also be on PYTHONPATH whenever you run
|
||||
Python and want to use the package(s) you are installing.)
|
||||
|
||||
* You can set up the installation directory to support ".pth" files by
|
||||
using one of the approaches described here:
|
||||
|
||||
https://pythonhosted.org/setuptools/easy_install.html#custom-installation-locations
|
||||
|
||||
Please make the appropriate changes for your system and try again.""").lstrip()
|
||||
|
||||
def no_default_version_msg(self):
|
||||
template = """bad install directory or PYTHONPATH
|
||||
|
||||
You are attempting to install a package to a directory that is not
|
||||
on PYTHONPATH and which Python does not read ".pth" files from. The
|
||||
installation directory you specified (via --install-dir, --prefix, or
|
||||
the distutils default setting) was:
|
||||
|
||||
%s
|
||||
|
||||
and your PYTHONPATH environment variable currently contains:
|
||||
|
||||
%r
|
||||
|
||||
Here are some of your options for correcting the problem:
|
||||
|
||||
* You can choose a different installation directory, i.e., one that is
|
||||
on PYTHONPATH or supports .pth files
|
||||
|
||||
* You can add the installation directory to the PYTHONPATH environment
|
||||
variable. (It must then also be on PYTHONPATH whenever you run
|
||||
Python and want to use the package(s) you are installing.)
|
||||
|
||||
* You can set up the installation directory to support ".pth" files by
|
||||
using one of the approaches described here:
|
||||
|
||||
https://pythonhosted.org/setuptools/easy_install.html#custom-installation-locations
|
||||
|
||||
Please make the appropriate changes for your system and try again."""
|
||||
template = self.__no_default_msg
|
||||
return template % (self.install_dir, os.environ.get('PYTHONPATH', ''))
|
||||
|
||||
def install_site_py(self):
|
||||
|
|
@ -1399,13 +1426,8 @@ def extract_wininst_cfg(dist_filename):
|
|||
{'version': '', 'target_version': ''})
|
||||
try:
|
||||
part = f.read(cfglen)
|
||||
# part is in bytes, but we need to read up to the first null
|
||||
# byte.
|
||||
if sys.version_info >= (2, 6):
|
||||
null_byte = bytes([0])
|
||||
else:
|
||||
null_byte = chr(0)
|
||||
config = part.split(null_byte, 1)[0]
|
||||
# Read up to the first null byte.
|
||||
config = part.split(b'\0', 1)[0]
|
||||
# Now the config is in bytes, but for RawConfigParser, it should
|
||||
# be text, so decode it.
|
||||
config = config.decode(sys.getfilesystemencoding())
|
||||
|
|
@ -1517,23 +1539,16 @@ class PthDistributions(Environment):
|
|||
if not self.dirty:
|
||||
return
|
||||
|
||||
data = '\n'.join(map(self.make_relative, self.paths))
|
||||
if data:
|
||||
rel_paths = list(map(self.make_relative, self.paths))
|
||||
if rel_paths:
|
||||
log.debug("Saving %s", self.filename)
|
||||
data = (
|
||||
"import sys; sys.__plen = len(sys.path)\n"
|
||||
"%s\n"
|
||||
"import sys; new=sys.path[sys.__plen:];"
|
||||
" del sys.path[sys.__plen:];"
|
||||
" p=getattr(sys,'__egginsert',0); sys.path[p:p]=new;"
|
||||
" sys.__egginsert = p+len(new)\n"
|
||||
) % data
|
||||
lines = self._wrap_lines(rel_paths)
|
||||
data = '\n'.join(lines) + '\n'
|
||||
|
||||
if os.path.islink(self.filename):
|
||||
os.unlink(self.filename)
|
||||
f = open(self.filename, 'wt')
|
||||
f.write(data)
|
||||
f.close()
|
||||
with open(self.filename, 'wt') as f:
|
||||
f.write(data)
|
||||
|
||||
elif os.path.exists(self.filename):
|
||||
log.debug("Deleting empty %s", self.filename)
|
||||
|
|
@ -1541,12 +1556,20 @@ class PthDistributions(Environment):
|
|||
|
||||
self.dirty = False
|
||||
|
||||
@staticmethod
|
||||
def _wrap_lines(lines):
|
||||
return lines
|
||||
|
||||
def add(self, dist):
|
||||
"""Add `dist` to the distribution map"""
|
||||
if (dist.location not in self.paths and (
|
||||
dist.location not in self.sitedirs or
|
||||
dist.location == os.getcwd() # account for '.' being in PYTHONPATH
|
||||
)):
|
||||
new_path = (
|
||||
dist.location not in self.paths and (
|
||||
dist.location not in self.sitedirs or
|
||||
# account for '.' being in PYTHONPATH
|
||||
dist.location == os.getcwd()
|
||||
)
|
||||
)
|
||||
if new_path:
|
||||
self.paths.append(dist.location)
|
||||
self.dirty = True
|
||||
Environment.add(self, dist)
|
||||
|
|
@ -1574,6 +1597,34 @@ class PthDistributions(Environment):
|
|||
return path
|
||||
|
||||
|
||||
class RewritePthDistributions(PthDistributions):
|
||||
|
||||
@classmethod
|
||||
def _wrap_lines(cls, lines):
|
||||
yield cls.prelude
|
||||
for line in lines:
|
||||
yield line
|
||||
yield cls.postlude
|
||||
|
||||
_inline = lambda text: textwrap.dedent(text).strip().replace('\n', '; ')
|
||||
prelude = _inline("""
|
||||
import sys
|
||||
sys.__plen = len(sys.path)
|
||||
""")
|
||||
postlude = _inline("""
|
||||
import sys
|
||||
new = sys.path[sys.__plen:]
|
||||
del sys.path[sys.__plen:]
|
||||
p = getattr(sys, '__egginsert', 0)
|
||||
sys.path[p:p] = new
|
||||
sys.__egginsert = p + len(new)
|
||||
""")
|
||||
|
||||
|
||||
if os.environ.get('SETUPTOOLS_SYS_PATH_TECHNIQUE', 'rewrite') == 'rewrite':
|
||||
PthDistributions = RewritePthDistributions
|
||||
|
||||
|
||||
def _first_line_re():
|
||||
"""
|
||||
Return a regular expression based on first_line_re suitable for matching
|
||||
|
|
@ -1586,33 +1637,6 @@ def _first_line_re():
|
|||
return re.compile(first_line_re.pattern.decode())
|
||||
|
||||
|
||||
def get_script_header(script_text, executable=sys_executable, wininst=False):
|
||||
"""Create a #! line, getting options (if any) from script_text"""
|
||||
first = (script_text + '\n').splitlines()[0]
|
||||
match = _first_line_re().match(first)
|
||||
options = ''
|
||||
if match:
|
||||
options = match.group(1) or ''
|
||||
if options:
|
||||
options = ' ' + options
|
||||
if wininst:
|
||||
executable = "python.exe"
|
||||
else:
|
||||
executable = nt_quote_arg(executable)
|
||||
hdr = "#!%(executable)s%(options)s\n" % locals()
|
||||
if not isascii(hdr):
|
||||
# Non-ascii path to sys.executable, use -x to prevent warnings
|
||||
if options:
|
||||
if options.strip().startswith('-'):
|
||||
options = ' -x' + options.strip()[1:]
|
||||
# else: punt, we can't do it, let the warning happen anyway
|
||||
else:
|
||||
options = ' -x'
|
||||
executable = fix_jython_executable(executable, options)
|
||||
hdr = "#!%(executable)s%(options)s\n" % locals()
|
||||
return hdr
|
||||
|
||||
|
||||
def auto_chmod(func, arg, exc):
|
||||
if func is os.remove and os.name == 'nt':
|
||||
chmod(arg, stat.S_IWRITE)
|
||||
|
|
@ -1811,9 +1835,8 @@ def is_python(text, filename='<string>'):
|
|||
def is_sh(executable):
|
||||
"""Determine if the specified executable is a .sh (contains a #! line)"""
|
||||
try:
|
||||
fp = open(executable)
|
||||
magic = fp.read(2)
|
||||
fp.close()
|
||||
with io.open(executable, encoding='latin-1') as fp:
|
||||
magic = fp.read(2)
|
||||
except (OSError, IOError):
|
||||
return executable
|
||||
return magic == '#!'
|
||||
|
|
@ -1821,36 +1844,7 @@ def is_sh(executable):
|
|||
|
||||
def nt_quote_arg(arg):
|
||||
"""Quote a command line argument according to Windows parsing rules"""
|
||||
|
||||
result = []
|
||||
needquote = False
|
||||
nb = 0
|
||||
|
||||
needquote = (" " in arg) or ("\t" in arg)
|
||||
if needquote:
|
||||
result.append('"')
|
||||
|
||||
for c in arg:
|
||||
if c == '\\':
|
||||
nb += 1
|
||||
elif c == '"':
|
||||
# double preceding backslashes, then add a \"
|
||||
result.append('\\' * (nb * 2) + '\\"')
|
||||
nb = 0
|
||||
else:
|
||||
if nb:
|
||||
result.append('\\' * nb)
|
||||
nb = 0
|
||||
result.append(c)
|
||||
|
||||
if nb:
|
||||
result.append('\\' * nb)
|
||||
|
||||
if needquote:
|
||||
result.append('\\' * nb) # double the trailing backslashes
|
||||
result.append('"')
|
||||
|
||||
return ''.join(result)
|
||||
return subprocess.list2cmdline([arg])
|
||||
|
||||
|
||||
def is_python_script(script_text, filename):
|
||||
|
|
@ -1879,31 +1873,130 @@ def chmod(path, mode):
|
|||
log.debug("changing mode of %s to %o", path, mode)
|
||||
try:
|
||||
_chmod(path, mode)
|
||||
except os.error:
|
||||
e = sys.exc_info()[1]
|
||||
except os.error as e:
|
||||
log.debug("chmod failed: %s", e)
|
||||
|
||||
|
||||
def fix_jython_executable(executable, options):
|
||||
if sys.platform.startswith('java') and is_sh(executable):
|
||||
# Workaround for Jython is not needed on Linux systems.
|
||||
import java
|
||||
warnings.warn("Use JythonCommandSpec", DeprecationWarning, stacklevel=2)
|
||||
|
||||
if java.lang.System.getProperty("os.name") == "Linux":
|
||||
return executable
|
||||
if not JythonCommandSpec.relevant():
|
||||
return executable
|
||||
|
||||
# Workaround Jython's sys.executable being a .sh (an invalid
|
||||
# shebang line interpreter)
|
||||
if options:
|
||||
cmd = CommandSpec.best().from_param(executable)
|
||||
cmd.install_options(options)
|
||||
return cmd.as_header().lstrip('#!').rstrip('\n')
|
||||
|
||||
|
||||
class CommandSpec(list):
|
||||
"""
|
||||
A command spec for a #! header, specified as a list of arguments akin to
|
||||
those passed to Popen.
|
||||
"""
|
||||
|
||||
options = []
|
||||
split_args = dict()
|
||||
|
||||
@classmethod
|
||||
def best(cls):
|
||||
"""
|
||||
Choose the best CommandSpec class based on environmental conditions.
|
||||
"""
|
||||
return cls if not JythonCommandSpec.relevant() else JythonCommandSpec
|
||||
|
||||
@classmethod
|
||||
def _sys_executable(cls):
|
||||
_default = os.path.normpath(sys.executable)
|
||||
return os.environ.get('__PYVENV_LAUNCHER__', _default)
|
||||
|
||||
@classmethod
|
||||
def from_param(cls, param):
|
||||
"""
|
||||
Construct a CommandSpec from a parameter to build_scripts, which may
|
||||
be None.
|
||||
"""
|
||||
if isinstance(param, cls):
|
||||
return param
|
||||
if isinstance(param, list):
|
||||
return cls(param)
|
||||
if param is None:
|
||||
return cls.from_environment()
|
||||
# otherwise, assume it's a string.
|
||||
return cls.from_string(param)
|
||||
|
||||
@classmethod
|
||||
def from_environment(cls):
|
||||
return cls([cls._sys_executable()])
|
||||
|
||||
@classmethod
|
||||
def from_string(cls, string):
|
||||
"""
|
||||
Construct a command spec from a simple string representing a command
|
||||
line parseable by shlex.split.
|
||||
"""
|
||||
items = shlex.split(string, **cls.split_args)
|
||||
return cls(items)
|
||||
|
||||
def install_options(self, script_text):
|
||||
self.options = shlex.split(self._extract_options(script_text))
|
||||
cmdline = subprocess.list2cmdline(self)
|
||||
if not isascii(cmdline):
|
||||
self.options[:0] = ['-x']
|
||||
|
||||
@staticmethod
|
||||
def _extract_options(orig_script):
|
||||
"""
|
||||
Extract any options from the first line of the script.
|
||||
"""
|
||||
first = (orig_script + '\n').splitlines()[0]
|
||||
match = _first_line_re().match(first)
|
||||
options = match.group(1) or '' if match else ''
|
||||
return options.strip()
|
||||
|
||||
def as_header(self):
|
||||
return self._render(self + list(self.options))
|
||||
|
||||
@staticmethod
|
||||
def _render(items):
|
||||
cmdline = subprocess.list2cmdline(items)
|
||||
return '#!' + cmdline + '\n'
|
||||
|
||||
# For pbr compat; will be removed in a future version.
|
||||
sys_executable = CommandSpec._sys_executable()
|
||||
|
||||
|
||||
class WindowsCommandSpec(CommandSpec):
|
||||
split_args = dict(posix=False)
|
||||
|
||||
|
||||
class JythonCommandSpec(CommandSpec):
|
||||
@classmethod
|
||||
def relevant(cls):
|
||||
return (
|
||||
sys.platform.startswith('java')
|
||||
and
|
||||
__import__('java').lang.System.getProperty('os.name') != 'Linux'
|
||||
)
|
||||
|
||||
def as_header(self):
|
||||
"""
|
||||
Workaround Jython's sys.executable being a .sh (an invalid
|
||||
shebang line interpreter)
|
||||
"""
|
||||
if not is_sh(self[0]):
|
||||
return super(JythonCommandSpec, self).as_header()
|
||||
|
||||
if self.options:
|
||||
# Can't apply the workaround, leave it broken
|
||||
log.warn(
|
||||
"WARNING: Unable to adapt shebang line for Jython,"
|
||||
" the following script is NOT executable\n"
|
||||
" see http://bugs.jython.org/issue1112 for"
|
||||
" more information.")
|
||||
else:
|
||||
return '/usr/bin/env %s' % executable
|
||||
return executable
|
||||
return super(JythonCommandSpec, self).as_header()
|
||||
|
||||
items = ['/usr/bin/env'] + self + list(self.options)
|
||||
return self._render(items)
|
||||
|
||||
|
||||
class ScriptWriter(object):
|
||||
|
|
@ -1924,39 +2017,92 @@ class ScriptWriter(object):
|
|||
)
|
||||
""").lstrip()
|
||||
|
||||
command_spec_class = CommandSpec
|
||||
|
||||
@classmethod
|
||||
def get_script_args(cls, dist, executable=sys_executable, wininst=False):
|
||||
def get_script_args(cls, dist, executable=None, wininst=False):
|
||||
# for backward compatibility
|
||||
warnings.warn("Use get_args", DeprecationWarning)
|
||||
writer = (WindowsScriptWriter if wininst else ScriptWriter).best()
|
||||
header = cls.get_script_header("", executable, wininst)
|
||||
return writer.get_args(dist, header)
|
||||
|
||||
@classmethod
|
||||
def get_script_header(cls, script_text, executable=None, wininst=False):
|
||||
# for backward compatibility
|
||||
warnings.warn("Use get_header", DeprecationWarning)
|
||||
if wininst:
|
||||
executable = "python.exe"
|
||||
cmd = cls.command_spec_class.best().from_param(executable)
|
||||
cmd.install_options(script_text)
|
||||
return cmd.as_header()
|
||||
|
||||
@classmethod
|
||||
def get_args(cls, dist, header=None):
|
||||
"""
|
||||
Yield write_script() argument tuples for a distribution's entrypoints
|
||||
Yield write_script() argument tuples for a distribution's
|
||||
console_scripts and gui_scripts entry points.
|
||||
"""
|
||||
gen_class = cls.get_writer(wininst)
|
||||
if header is None:
|
||||
header = cls.get_header()
|
||||
spec = str(dist.as_requirement())
|
||||
header = get_script_header("", executable, wininst)
|
||||
for type_ in 'console', 'gui':
|
||||
group = type_ + '_scripts'
|
||||
for name, ep in dist.get_entry_map(group).items():
|
||||
script_text = gen_class.template % locals()
|
||||
for res in gen_class._get_script_args(type_, name, header,
|
||||
script_text):
|
||||
cls._ensure_safe_name(name)
|
||||
script_text = cls.template % locals()
|
||||
args = cls._get_script_args(type_, name, header, script_text)
|
||||
for res in args:
|
||||
yield res
|
||||
|
||||
@staticmethod
|
||||
def _ensure_safe_name(name):
|
||||
"""
|
||||
Prevent paths in *_scripts entry point names.
|
||||
"""
|
||||
has_path_sep = re.search(r'[\\/]', name)
|
||||
if has_path_sep:
|
||||
raise ValueError("Path separators not allowed in script names")
|
||||
|
||||
@classmethod
|
||||
def get_writer(cls, force_windows):
|
||||
if force_windows or sys.platform == 'win32':
|
||||
return WindowsScriptWriter.get_writer()
|
||||
return cls
|
||||
# for backward compatibility
|
||||
warnings.warn("Use best", DeprecationWarning)
|
||||
return WindowsScriptWriter.best() if force_windows else cls.best()
|
||||
|
||||
@classmethod
|
||||
def best(cls):
|
||||
"""
|
||||
Select the best ScriptWriter for this environment.
|
||||
"""
|
||||
return WindowsScriptWriter.best() if sys.platform == 'win32' else cls
|
||||
|
||||
@classmethod
|
||||
def _get_script_args(cls, type_, name, header, script_text):
|
||||
# Simply write the stub with no extension.
|
||||
yield (name, header + script_text)
|
||||
|
||||
@classmethod
|
||||
def get_header(cls, script_text="", executable=None):
|
||||
"""Create a #! line, getting options (if any) from script_text"""
|
||||
cmd = cls.command_spec_class.best().from_param(executable)
|
||||
cmd.install_options(script_text)
|
||||
return cmd.as_header()
|
||||
|
||||
|
||||
class WindowsScriptWriter(ScriptWriter):
|
||||
command_spec_class = WindowsCommandSpec
|
||||
|
||||
@classmethod
|
||||
def get_writer(cls):
|
||||
# for backward compatibility
|
||||
warnings.warn("Use best", DeprecationWarning)
|
||||
return cls.best()
|
||||
|
||||
@classmethod
|
||||
def best(cls):
|
||||
"""
|
||||
Get a script writer suitable for Windows
|
||||
Select the best ScriptWriter suitable for Windows
|
||||
"""
|
||||
writer_lookup = dict(
|
||||
executable=WindowsExecutableLauncherWriter,
|
||||
|
|
@ -2031,6 +2177,7 @@ class WindowsExecutableLauncherWriter(WindowsScriptWriter):
|
|||
|
||||
# for backward-compatibility
|
||||
get_script_args = ScriptWriter.get_script_args
|
||||
get_script_header = ScriptWriter.get_script_header
|
||||
|
||||
|
||||
def get_win_launcher(type):
|
||||
|
|
@ -2114,39 +2261,41 @@ def bootstrap():
|
|||
def main(argv=None, **kw):
|
||||
from setuptools import setup
|
||||
from setuptools.dist import Distribution
|
||||
import distutils.core
|
||||
|
||||
USAGE = """\
|
||||
usage: %(script)s [options] requirement_or_url ...
|
||||
or: %(script)s --help
|
||||
"""
|
||||
class DistributionWithoutHelpCommands(Distribution):
|
||||
common_usage = ""
|
||||
|
||||
def _show_help(self, *args, **kw):
|
||||
with _patch_usage():
|
||||
Distribution._show_help(self, *args, **kw)
|
||||
|
||||
if argv is None:
|
||||
argv = sys.argv[1:]
|
||||
|
||||
with _patch_usage():
|
||||
setup(
|
||||
script_args=['-q', 'easy_install', '-v'] + argv,
|
||||
script_name=sys.argv[0] or 'easy_install',
|
||||
distclass=DistributionWithoutHelpCommands, **kw
|
||||
)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _patch_usage():
|
||||
import distutils.core
|
||||
USAGE = textwrap.dedent("""
|
||||
usage: %(script)s [options] requirement_or_url ...
|
||||
or: %(script)s --help
|
||||
""").lstrip()
|
||||
|
||||
def gen_usage(script_name):
|
||||
return USAGE % dict(
|
||||
script=os.path.basename(script_name),
|
||||
)
|
||||
|
||||
def with_ei_usage(f):
|
||||
old_gen_usage = distutils.core.gen_usage
|
||||
try:
|
||||
distutils.core.gen_usage = gen_usage
|
||||
return f()
|
||||
finally:
|
||||
distutils.core.gen_usage = old_gen_usage
|
||||
|
||||
class DistributionWithoutHelpCommands(Distribution):
|
||||
common_usage = ""
|
||||
|
||||
def _show_help(self, *args, **kw):
|
||||
with_ei_usage(lambda: Distribution._show_help(self, *args, **kw))
|
||||
|
||||
if argv is None:
|
||||
argv = sys.argv[1:]
|
||||
|
||||
with_ei_usage(
|
||||
lambda: setup(
|
||||
script_args=['-q', 'easy_install', '-v'] + argv,
|
||||
script_name=sys.argv[0] or 'easy_install',
|
||||
distclass=DistributionWithoutHelpCommands, **kw
|
||||
)
|
||||
)
|
||||
saved = distutils.core.gen_usage
|
||||
distutils.core.gen_usage = gen_usage
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
distutils.core.gen_usage = saved
|
||||
|
|
|
|||
|
|
@ -6,20 +6,26 @@ from distutils.filelist import FileList as _FileList
|
|||
from distutils.util import convert_path
|
||||
from distutils import log
|
||||
import distutils.errors
|
||||
import distutils.filelist
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
try:
|
||||
from setuptools_svn import svn_utils
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
from setuptools import Command
|
||||
from setuptools.command.sdist import sdist
|
||||
from setuptools.compat import basestring, PY3, StringIO
|
||||
from setuptools import svn_utils
|
||||
from setuptools.command.sdist import walk_revctrl
|
||||
from pkg_resources import (
|
||||
parse_requirements, safe_name, parse_version,
|
||||
safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename)
|
||||
import setuptools.unicode_utils as unicode_utils
|
||||
|
||||
from pkg_resources import packaging
|
||||
|
||||
class egg_info(Command):
|
||||
description = "create a distribution's .egg-info directory"
|
||||
|
|
@ -68,10 +74,15 @@ class egg_info(Command):
|
|||
self.vtags = self.tags()
|
||||
self.egg_version = self.tagged_version()
|
||||
|
||||
parsed_version = parse_version(self.egg_version)
|
||||
|
||||
try:
|
||||
is_version = isinstance(parsed_version, packaging.version.Version)
|
||||
spec = (
|
||||
"%s==%s" if is_version else "%s===%s"
|
||||
)
|
||||
list(
|
||||
parse_requirements('%s==%s' % (self.egg_name,
|
||||
self.egg_version))
|
||||
parse_requirements(spec % (self.egg_name, self.egg_version))
|
||||
)
|
||||
except ValueError:
|
||||
raise distutils.errors.DistutilsOptionError(
|
||||
|
|
@ -157,7 +168,8 @@ class egg_info(Command):
|
|||
self.mkpath(self.egg_info)
|
||||
installer = self.distribution.fetch_build_egg
|
||||
for ep in iter_entry_points('egg_info.writers'):
|
||||
writer = ep.load(installer=installer)
|
||||
ep.require(installer=installer)
|
||||
writer = ep.resolve()
|
||||
writer(self, ep.name, os.path.join(self.egg_info, ep.name))
|
||||
|
||||
# Get rid of native_libs.txt if it was put there by older bdist_egg
|
||||
|
|
@ -183,6 +195,8 @@ class egg_info(Command):
|
|||
|
||||
@staticmethod
|
||||
def get_svn_revision():
|
||||
if 'svn_utils' not in globals():
|
||||
return "0"
|
||||
return str(svn_utils.SvnInfo.load(os.curdir).get_revision())
|
||||
|
||||
def find_sources(self):
|
||||
|
|
@ -312,8 +326,33 @@ class manifest_maker(sdist):
|
|||
elif os.path.exists(self.manifest):
|
||||
self.read_manifest()
|
||||
ei_cmd = self.get_finalized_command('egg_info')
|
||||
self._add_egg_info(cmd=ei_cmd)
|
||||
self.filelist.include_pattern("*", prefix=ei_cmd.egg_info)
|
||||
|
||||
def _add_egg_info(self, cmd):
|
||||
"""
|
||||
Add paths for egg-info files for an external egg-base.
|
||||
|
||||
The egg-info files are written to egg-base. If egg-base is
|
||||
outside the current working directory, this method
|
||||
searchs the egg-base directory for files to include
|
||||
in the manifest. Uses distutils.filelist.findall (which is
|
||||
really the version monkeypatched in by setuptools/__init__.py)
|
||||
to perform the search.
|
||||
|
||||
Since findall records relative paths, prefix the returned
|
||||
paths with cmd.egg_base, so add_default's include_pattern call
|
||||
(which is looking for the absolute cmd.egg_info) will match
|
||||
them.
|
||||
"""
|
||||
if cmd.egg_base == os.curdir:
|
||||
# egg-info files were already added by something else
|
||||
return
|
||||
|
||||
discovered = distutils.filelist.findall(cmd.egg_base)
|
||||
resolved = (os.path.join(cmd.egg_base, path) for path in discovered)
|
||||
self.filelist.allfiles.extend(resolved)
|
||||
|
||||
def prune_file_list(self):
|
||||
build = self.get_finalized_command('build')
|
||||
base_dir = self.distribution.get_fullname()
|
||||
|
|
@ -382,6 +421,12 @@ def write_requirements(cmd, basename, filename):
|
|||
cmd.write_or_delete_file("requirements", filename, data.getvalue())
|
||||
|
||||
|
||||
def write_setup_requirements(cmd, basename, filename):
|
||||
data = StringIO()
|
||||
_write_requirements(data, cmd.distribution.setup_requires)
|
||||
cmd.write_or_delete_file("setup-requirements", filename, data.getvalue())
|
||||
|
||||
|
||||
def write_toplevel_names(cmd, basename, filename):
|
||||
pkgs = dict.fromkeys(
|
||||
[
|
||||
|
|
|
|||
|
|
@ -13,8 +13,7 @@ class install_scripts(orig.install_scripts):
|
|||
self.no_ep = False
|
||||
|
||||
def run(self):
|
||||
from setuptools.command.easy_install import get_script_args
|
||||
from setuptools.command.easy_install import sys_executable
|
||||
import setuptools.command.easy_install as ei
|
||||
|
||||
self.run_command("egg_info")
|
||||
if self.distribution.scripts:
|
||||
|
|
@ -31,11 +30,17 @@ class install_scripts(orig.install_scripts):
|
|||
ei_cmd.egg_name, ei_cmd.egg_version,
|
||||
)
|
||||
bs_cmd = self.get_finalized_command('build_scripts')
|
||||
executable = getattr(bs_cmd, 'executable', sys_executable)
|
||||
is_wininst = getattr(
|
||||
self.get_finalized_command("bdist_wininst"), '_is_running', False
|
||||
)
|
||||
for args in get_script_args(dist, executable, is_wininst):
|
||||
exec_param = getattr(bs_cmd, 'executable', None)
|
||||
bw_cmd = self.get_finalized_command("bdist_wininst")
|
||||
is_wininst = getattr(bw_cmd, '_is_running', False)
|
||||
writer = ei.ScriptWriter
|
||||
if is_wininst:
|
||||
exec_param = "python.exe"
|
||||
writer = ei.WindowsScriptWriter
|
||||
# resolve the writer to the environment
|
||||
writer = writer.best()
|
||||
cmd = writer.command_spec_class.best().from_param(exec_param)
|
||||
for args in writer.get_args(dist, cmd.as_header()):
|
||||
self.write_script(*args)
|
||||
|
||||
def write_script(self, script_name, contents, mode="t", *ignored):
|
||||
|
|
|
|||
|
|
@ -1,19 +1,17 @@
|
|||
from glob import glob
|
||||
from distutils.util import convert_path
|
||||
from distutils import log
|
||||
import distutils.command.sdist as orig
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
from setuptools import svn_utils
|
||||
from setuptools.compat import PY3
|
||||
from setuptools.utils import cs_path_exists
|
||||
|
||||
import pkg_resources
|
||||
|
||||
READMES = ('README', 'README.rst', 'README.txt')
|
||||
READMES = 'README', 'README.rst', 'README.txt'
|
||||
|
||||
_default_revctrl = list
|
||||
|
||||
def walk_revctrl(dirname=''):
|
||||
"""Find all files under revision control"""
|
||||
|
|
@ -22,60 +20,6 @@ def walk_revctrl(dirname=''):
|
|||
yield item
|
||||
|
||||
|
||||
# TODO will need test case
|
||||
class re_finder(object):
|
||||
"""
|
||||
Finder that locates files based on entries in a file matched by a
|
||||
regular expression.
|
||||
"""
|
||||
|
||||
def __init__(self, path, pattern, postproc=lambda x: x):
|
||||
self.pattern = pattern
|
||||
self.postproc = postproc
|
||||
self.entries_path = convert_path(path)
|
||||
|
||||
def _finder(self, dirname, filename):
|
||||
f = open(filename, 'rU')
|
||||
try:
|
||||
data = f.read()
|
||||
finally:
|
||||
f.close()
|
||||
for match in self.pattern.finditer(data):
|
||||
path = match.group(1)
|
||||
# postproc was formerly used when the svn finder
|
||||
# was an re_finder for calling unescape
|
||||
path = self.postproc(path)
|
||||
yield svn_utils.joinpath(dirname, path)
|
||||
|
||||
def find(self, dirname=''):
|
||||
path = svn_utils.joinpath(dirname, self.entries_path)
|
||||
|
||||
if not os.path.isfile(path):
|
||||
# entries file doesn't exist
|
||||
return
|
||||
for path in self._finder(dirname, path):
|
||||
if os.path.isfile(path):
|
||||
yield path
|
||||
elif os.path.isdir(path):
|
||||
for item in self.find(path):
|
||||
yield item
|
||||
|
||||
__call__ = find
|
||||
|
||||
|
||||
def _default_revctrl(dirname=''):
|
||||
'Primary svn_cvs entry point'
|
||||
for finder in finders:
|
||||
for item in finder(dirname):
|
||||
yield item
|
||||
|
||||
|
||||
finders = [
|
||||
re_finder('CVS/Entries', re.compile(r"^\w?/([^/]+)/", re.M)),
|
||||
svn_utils.svn_finder,
|
||||
]
|
||||
|
||||
|
||||
class sdist(orig.sdist):
|
||||
"""Smart sdist that finds anything supported by revision control"""
|
||||
|
||||
|
|
@ -126,7 +70,8 @@ class sdist(orig.sdist):
|
|||
try:
|
||||
orig.sdist.read_template(self)
|
||||
except:
|
||||
sys.exc_info()[2].tb_next.tb_frame.f_locals['template'].close()
|
||||
_, _, tb = sys.exc_info()
|
||||
tb.tb_next.tb_frame.f_locals['template'].close()
|
||||
raise
|
||||
|
||||
# Beginning with Python 2.7.2, 3.1.4, and 3.2.1, this leaky file handle
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
from distutils.errors import DistutilsOptionError
|
||||
from unittest import TestLoader
|
||||
import unittest
|
||||
import sys
|
||||
|
||||
from pkg_resources import (resource_listdir, resource_exists, normalize_path,
|
||||
|
|
@ -12,7 +11,7 @@ from setuptools.py31compat import unittest_main
|
|||
|
||||
|
||||
class ScanningLoader(TestLoader):
|
||||
def loadTestsFromModule(self, module):
|
||||
def loadTestsFromModule(self, module, pattern=None):
|
||||
"""Return a suite of all tests cases contained in the given module
|
||||
|
||||
If the module is a package, load tests from all the modules in it.
|
||||
|
|
@ -62,20 +61,16 @@ class test(Command):
|
|||
|
||||
def finalize_options(self):
|
||||
|
||||
if self.test_suite and self.test_module:
|
||||
msg = "You may specify a module or a suite, but not both"
|
||||
raise DistutilsOptionError(msg)
|
||||
|
||||
if self.test_suite is None:
|
||||
if self.test_module is None:
|
||||
self.test_suite = self.distribution.test_suite
|
||||
else:
|
||||
self.test_suite = self.test_module + ".test_suite"
|
||||
elif self.test_module:
|
||||
raise DistutilsOptionError(
|
||||
"You may specify a module or a suite, but not both"
|
||||
)
|
||||
|
||||
self.test_args = [self.test_suite]
|
||||
|
||||
if self.verbose:
|
||||
self.test_args.insert(0, '--verbose')
|
||||
if self.test_loader is None:
|
||||
self.test_loader = getattr(self.distribution, 'test_loader', None)
|
||||
if self.test_loader is None:
|
||||
|
|
@ -83,6 +78,16 @@ class test(Command):
|
|||
if self.test_runner is None:
|
||||
self.test_runner = getattr(self.distribution, 'test_runner', None)
|
||||
|
||||
@property
|
||||
def test_args(self):
|
||||
return list(self._test_args())
|
||||
|
||||
def _test_args(self):
|
||||
if self.verbose:
|
||||
yield '--verbose'
|
||||
if self.test_suite:
|
||||
yield self.test_suite
|
||||
|
||||
def with_project_on_sys_path(self, func):
|
||||
with_2to3 = PY3 and getattr(self.distribution, 'use_2to3', False)
|
||||
|
||||
|
|
@ -133,20 +138,19 @@ class test(Command):
|
|||
if self.distribution.tests_require:
|
||||
self.distribution.fetch_build_eggs(self.distribution.tests_require)
|
||||
|
||||
if self.test_suite:
|
||||
cmd = ' '.join(self.test_args)
|
||||
if self.dry_run:
|
||||
self.announce('skipping "unittest %s" (dry run)' % cmd)
|
||||
else:
|
||||
self.announce('running "unittest %s"' % cmd)
|
||||
self.with_project_on_sys_path(self.run_tests)
|
||||
cmd = ' '.join(self._argv)
|
||||
if self.dry_run:
|
||||
self.announce('skipping "%s" (dry run)' % cmd)
|
||||
else:
|
||||
self.announce('running "%s"' % cmd)
|
||||
self.with_project_on_sys_path(self.run_tests)
|
||||
|
||||
def run_tests(self):
|
||||
# Purge modules under test from sys.modules. The test loader will
|
||||
# re-import them from the build location. Required when 2to3 is used
|
||||
# with namespace packages.
|
||||
if PY3 and getattr(self.distribution, 'use_2to3', False):
|
||||
module = self.test_args[-1].split('.')[0]
|
||||
module = self.test_suite.split('.')[0]
|
||||
if module in _namespace_packages:
|
||||
del_modules = []
|
||||
if module in sys.modules:
|
||||
|
|
@ -158,11 +162,15 @@ class test(Command):
|
|||
list(map(sys.modules.__delitem__, del_modules))
|
||||
|
||||
unittest_main(
|
||||
None, None, [unittest.__file__] + self.test_args,
|
||||
None, None, self._argv,
|
||||
testLoader=self._resolve_as_ep(self.test_loader),
|
||||
testRunner=self._resolve_as_ep(self.test_runner),
|
||||
)
|
||||
|
||||
@property
|
||||
def _argv(self):
|
||||
return ['unittest'] + self.test_args
|
||||
|
||||
@staticmethod
|
||||
def _resolve_as_ep(val):
|
||||
"""
|
||||
|
|
@ -172,4 +180,4 @@ class test(Command):
|
|||
if val is None:
|
||||
return
|
||||
parsed = EntryPoint.parse("x=" + val)
|
||||
return parsed.load(require=False)()
|
||||
return parsed.resolve()()
|
||||
|
|
|
|||
|
|
@ -169,8 +169,7 @@ class upload_docs(upload):
|
|||
conn.putheader('Authorization', auth)
|
||||
conn.endheaders()
|
||||
conn.send(body)
|
||||
except socket.error:
|
||||
e = sys.exc_info()[1]
|
||||
except socket.error as e:
|
||||
self.announce(str(e), log.ERROR)
|
||||
return
|
||||
|
||||
|
|
|
|||
|
|
@ -15,8 +15,12 @@ from distutils.errors import (DistutilsOptionError, DistutilsPlatformError,
|
|||
|
||||
from setuptools.depends import Require
|
||||
from setuptools.compat import basestring, PY2
|
||||
from setuptools import windows_support
|
||||
import pkg_resources
|
||||
|
||||
packaging = pkg_resources.packaging
|
||||
|
||||
|
||||
def _get_unpatched(cls):
|
||||
"""Protect against re-patching the distutils if reloaded
|
||||
|
||||
|
|
@ -111,24 +115,26 @@ def check_extras(dist, attr, value):
|
|||
def assert_bool(dist, attr, value):
|
||||
"""Verify that value is True, False, 0, or 1"""
|
||||
if bool(value) != value:
|
||||
raise DistutilsSetupError(
|
||||
"%r must be a boolean value (got %r)" % (attr,value)
|
||||
)
|
||||
tmpl = "{attr!r} must be a boolean value (got {value!r})"
|
||||
raise DistutilsSetupError(tmpl.format(attr=attr, value=value))
|
||||
|
||||
|
||||
def check_requirements(dist, attr, value):
|
||||
"""Verify that install_requires is a valid requirements list"""
|
||||
try:
|
||||
list(pkg_resources.parse_requirements(value))
|
||||
except (TypeError,ValueError):
|
||||
raise DistutilsSetupError(
|
||||
"%r must be a string or list of strings "
|
||||
"containing valid project/version requirement specifiers" % (attr,)
|
||||
except (TypeError, ValueError) as error:
|
||||
tmpl = (
|
||||
"{attr!r} must be a string or list of strings "
|
||||
"containing valid project/version requirement specifiers; {error}"
|
||||
)
|
||||
raise DistutilsSetupError(tmpl.format(attr=attr, error=error))
|
||||
|
||||
def check_entry_points(dist, attr, value):
|
||||
"""Verify that entry_points map is parseable"""
|
||||
try:
|
||||
pkg_resources.EntryPoint.parse_map(value)
|
||||
except ValueError:
|
||||
e = sys.exc_info()[1]
|
||||
except ValueError as e:
|
||||
raise DistutilsSetupError(e)
|
||||
|
||||
def check_test_suite(dist, attr, value):
|
||||
|
|
@ -268,6 +274,26 @@ class Distribution(_Distribution):
|
|||
# Some people apparently take "version number" too literally :)
|
||||
self.metadata.version = str(self.metadata.version)
|
||||
|
||||
if self.metadata.version is not None:
|
||||
try:
|
||||
ver = packaging.version.Version(self.metadata.version)
|
||||
normalized_version = str(ver)
|
||||
if self.metadata.version != normalized_version:
|
||||
warnings.warn(
|
||||
"Normalizing '%s' to '%s'" % (
|
||||
self.metadata.version,
|
||||
normalized_version,
|
||||
)
|
||||
)
|
||||
self.metadata.version = normalized_version
|
||||
except (packaging.version.InvalidVersion, TypeError):
|
||||
warnings.warn(
|
||||
"The version specified (%r) is an invalid version, this "
|
||||
"may not work as expected with newer versions of "
|
||||
"setuptools, pip, and PyPI. Please see PEP 440 for more "
|
||||
"details." % self.metadata.version
|
||||
)
|
||||
|
||||
def parse_command_line(self):
|
||||
"""Process features after parsing command line options"""
|
||||
result = _Distribution.parse_command_line(self)
|
||||
|
|
@ -305,6 +331,21 @@ class Distribution(_Distribution):
|
|||
else:
|
||||
self.convert_2to3_doctests = []
|
||||
|
||||
def get_egg_cache_dir(self):
|
||||
egg_cache_dir = os.path.join(os.curdir, '.eggs')
|
||||
if not os.path.exists(egg_cache_dir):
|
||||
os.mkdir(egg_cache_dir)
|
||||
windows_support.hide_file(egg_cache_dir)
|
||||
readme_txt_filename = os.path.join(egg_cache_dir, 'README.txt')
|
||||
with open(readme_txt_filename, 'w') as f:
|
||||
f.write('This directory contains eggs that were downloaded '
|
||||
'by setuptools to build, test, and run plug-ins.\n\n')
|
||||
f.write('This directory caches those eggs to prevent '
|
||||
'repeated downloads.\n\n')
|
||||
f.write('However, it is safe to delete this directory.\n\n')
|
||||
|
||||
return egg_cache_dir
|
||||
|
||||
def fetch_build_egg(self, req):
|
||||
"""Fetch an egg needed for building"""
|
||||
|
||||
|
|
@ -328,8 +369,9 @@ class Distribution(_Distribution):
|
|||
if 'find_links' in opts:
|
||||
links = opts['find_links'][1].split() + links
|
||||
opts['find_links'] = ('setup', links)
|
||||
install_dir = self.get_egg_cache_dir()
|
||||
cmd = easy_install(
|
||||
dist, args=["x"], install_dir=os.curdir, exclude_scripts=True,
|
||||
dist, args=["x"], install_dir=install_dir, exclude_scripts=True,
|
||||
always_copy=False, build_directory=None, editable=False,
|
||||
upgrade=False, multi_version=True, no_report=True, user=False
|
||||
)
|
||||
|
|
@ -393,7 +435,8 @@ class Distribution(_Distribution):
|
|||
def print_commands(self):
|
||||
for ep in pkg_resources.iter_entry_points('distutils.commands'):
|
||||
if ep.name not in self.cmdclass:
|
||||
cmdclass = ep.load(False) # don't require extras, we're not running
|
||||
# don't require extras as the commands won't be invoked
|
||||
cmdclass = ep.resolve()
|
||||
self.cmdclass[ep.name] = cmdclass
|
||||
return _Distribution.print_commands(self)
|
||||
|
||||
|
|
|
|||
|
|
@ -12,35 +12,33 @@ _Extension = _get_unpatched(distutils.core.Extension)
|
|||
|
||||
msvc9_support.patch_for_specialized_compiler()
|
||||
|
||||
def have_pyrex():
|
||||
def _have_cython():
|
||||
"""
|
||||
Return True if Cython or Pyrex can be imported.
|
||||
Return True if Cython can be imported.
|
||||
"""
|
||||
pyrex_impls = 'Cython.Distutils.build_ext', 'Pyrex.Distutils.build_ext'
|
||||
for pyrex_impl in pyrex_impls:
|
||||
try:
|
||||
# from (pyrex_impl) import build_ext
|
||||
__import__(pyrex_impl, fromlist=['build_ext']).build_ext
|
||||
return True
|
||||
except Exception:
|
||||
pass
|
||||
cython_impl = 'Cython.Distutils.build_ext',
|
||||
try:
|
||||
# from (cython_impl) import build_ext
|
||||
__import__(cython_impl, fromlist=['build_ext']).build_ext
|
||||
return True
|
||||
except Exception:
|
||||
pass
|
||||
return False
|
||||
|
||||
# for compatibility
|
||||
have_pyrex = _have_cython
|
||||
|
||||
|
||||
class Extension(_Extension):
|
||||
"""Extension that uses '.c' files in place of '.pyx' files"""
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
_Extension.__init__(self, *args, **kw)
|
||||
self._convert_pyx_sources_to_lang()
|
||||
|
||||
def _convert_pyx_sources_to_lang(self):
|
||||
"""
|
||||
Replace sources with .pyx extensions to sources with the target
|
||||
language extension. This mechanism allows language authors to supply
|
||||
pre-converted sources but to prefer the .pyx sources.
|
||||
"""
|
||||
if have_pyrex():
|
||||
if _have_cython():
|
||||
# the build has Cython, so allow it to compile the .pyx files
|
||||
return
|
||||
lang = self.language or ''
|
||||
|
|
|
|||
0
Shared/lib/python3.4/site-packages/setuptools/gui-32.exe
Executable file → Normal file
0
Shared/lib/python3.4/site-packages/setuptools/gui-32.exe
Executable file → Normal file
0
Shared/lib/python3.4/site-packages/setuptools/gui-64.exe
Executable file → Normal file
0
Shared/lib/python3.4/site-packages/setuptools/gui-64.exe
Executable file → Normal file
0
Shared/lib/python3.4/site-packages/setuptools/gui-arm-32.exe
Executable file → Normal file
0
Shared/lib/python3.4/site-packages/setuptools/gui-arm-32.exe
Executable file → Normal file
0
Shared/lib/python3.4/site-packages/setuptools/gui.exe
Executable file → Normal file
0
Shared/lib/python3.4/site-packages/setuptools/gui.exe
Executable file → Normal file
|
|
@ -1,5 +1,3 @@
|
|||
import sys
|
||||
|
||||
try:
|
||||
import distutils.msvc9compiler
|
||||
except ImportError:
|
||||
|
|
@ -29,13 +27,15 @@ def patch_for_specialized_compiler():
|
|||
def find_vcvarsall(version):
|
||||
Reg = distutils.msvc9compiler.Reg
|
||||
VC_BASE = r'Software\%sMicrosoft\DevDiv\VCForPython\%0.1f'
|
||||
key = VC_BASE % ('', version)
|
||||
try:
|
||||
# Per-user installs register the compiler path here
|
||||
productdir = Reg.get_value(VC_BASE % ('', version), "installdir")
|
||||
productdir = Reg.get_value(key, "installdir")
|
||||
except KeyError:
|
||||
try:
|
||||
# All-user installs on a 64-bit system register here
|
||||
productdir = Reg.get_value(VC_BASE % ('Wow6432Node\\', version), "installdir")
|
||||
key = VC_BASE % ('Wow6432Node\\', version)
|
||||
productdir = Reg.get_value(key, "installdir")
|
||||
except KeyError:
|
||||
productdir = None
|
||||
|
||||
|
|
@ -50,8 +50,7 @@ def find_vcvarsall(version):
|
|||
def query_vcvarsall(version, *args, **kwargs):
|
||||
try:
|
||||
return unpatched['query_vcvarsall'](version, *args, **kwargs)
|
||||
except distutils.errors.DistutilsPlatformError:
|
||||
exc = sys.exc_info()[1]
|
||||
except distutils.errors.DistutilsPlatformError as exc:
|
||||
if exc and "vcvarsall.bat" in exc.args[0]:
|
||||
message = 'Microsoft Visual C++ %0.1f is required (%s).' % (version, exc.args[0])
|
||||
if int(version) == 9:
|
||||
|
|
|
|||
|
|
@ -138,10 +138,9 @@ def interpret_distro_name(
|
|||
# versions in distribution archive names (sdist and bdist).
|
||||
|
||||
parts = basename.split('-')
|
||||
if not py_version:
|
||||
for i,p in enumerate(parts[2:]):
|
||||
if len(p)==5 and p.startswith('py2.'):
|
||||
return # It's a bdist_dumb, not an sdist -- bail out
|
||||
if not py_version and any(re.match('py\d\.\d$', p) for p in parts[2:]):
|
||||
# it is a bdist_dumb, not an sdist -- bail out
|
||||
return
|
||||
|
||||
for p in range(1,len(parts)+1):
|
||||
yield Distribution(
|
||||
|
|
@ -699,25 +698,21 @@ class PackageIndex(Environment):
|
|||
return local_open(url)
|
||||
try:
|
||||
return open_with_auth(url, self.opener)
|
||||
except (ValueError, httplib.InvalidURL):
|
||||
v = sys.exc_info()[1]
|
||||
except (ValueError, httplib.InvalidURL) as v:
|
||||
msg = ' '.join([str(arg) for arg in v.args])
|
||||
if warning:
|
||||
self.warn(warning, msg)
|
||||
else:
|
||||
raise DistutilsError('%s %s' % (url, msg))
|
||||
except urllib2.HTTPError:
|
||||
v = sys.exc_info()[1]
|
||||
except urllib2.HTTPError as v:
|
||||
return v
|
||||
except urllib2.URLError:
|
||||
v = sys.exc_info()[1]
|
||||
except urllib2.URLError as v:
|
||||
if warning:
|
||||
self.warn(warning, v.reason)
|
||||
else:
|
||||
raise DistutilsError("Download error for %s: %s"
|
||||
% (url, v.reason))
|
||||
except httplib.BadStatusLine:
|
||||
v = sys.exc_info()[1]
|
||||
except httplib.BadStatusLine as v:
|
||||
if warning:
|
||||
self.warn(warning, v.line)
|
||||
else:
|
||||
|
|
@ -726,8 +721,7 @@ class PackageIndex(Environment):
|
|||
'down, %s' %
|
||||
(url, v.line)
|
||||
)
|
||||
except httplib.HTTPException:
|
||||
v = sys.exc_info()[1]
|
||||
except httplib.HTTPException as v:
|
||||
if warning:
|
||||
self.warn(warning, v)
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -5,10 +5,12 @@ import operator
|
|||
import functools
|
||||
import itertools
|
||||
import re
|
||||
import contextlib
|
||||
import pickle
|
||||
|
||||
import pkg_resources
|
||||
|
||||
if os.name == "java":
|
||||
if sys.platform.startswith('java'):
|
||||
import org.python.modules.posix.PosixModule as _os
|
||||
else:
|
||||
_os = sys.modules[os.name]
|
||||
|
|
@ -20,6 +22,7 @@ _open = open
|
|||
from distutils.errors import DistutilsError
|
||||
from pkg_resources import working_set
|
||||
|
||||
from setuptools import compat
|
||||
from setuptools.compat import builtins
|
||||
|
||||
__all__ = [
|
||||
|
|
@ -31,31 +34,197 @@ def _execfile(filename, globals, locals=None):
|
|||
Python 3 implementation of execfile.
|
||||
"""
|
||||
mode = 'rb'
|
||||
# Python 2.6 compile requires LF for newlines, so use deprecated
|
||||
# Universal newlines support.
|
||||
if sys.version_info < (2, 7):
|
||||
mode += 'U'
|
||||
with open(filename, mode) as stream:
|
||||
script = stream.read()
|
||||
# compile() function in Python 2.6 and 3.1 requires LF line endings.
|
||||
if sys.version_info[:2] < (2, 7) or sys.version_info[:2] >= (3, 0) and sys.version_info[:2] < (3, 2):
|
||||
script = script.replace(b'\r\n', b'\n')
|
||||
script = script.replace(b'\r', b'\n')
|
||||
if locals is None:
|
||||
locals = globals
|
||||
code = compile(script, filename, 'exec')
|
||||
exec(code, globals, locals)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def save_argv(repl=None):
|
||||
saved = sys.argv[:]
|
||||
if repl is not None:
|
||||
sys.argv[:] = repl
|
||||
try:
|
||||
yield saved
|
||||
finally:
|
||||
sys.argv[:] = saved
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def save_path():
|
||||
saved = sys.path[:]
|
||||
try:
|
||||
yield saved
|
||||
finally:
|
||||
sys.path[:] = saved
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def override_temp(replacement):
|
||||
"""
|
||||
Monkey-patch tempfile.tempdir with replacement, ensuring it exists
|
||||
"""
|
||||
if not os.path.isdir(replacement):
|
||||
os.makedirs(replacement)
|
||||
|
||||
saved = tempfile.tempdir
|
||||
|
||||
tempfile.tempdir = replacement
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
tempfile.tempdir = saved
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def pushd(target):
|
||||
saved = os.getcwd()
|
||||
os.chdir(target)
|
||||
try:
|
||||
yield saved
|
||||
finally:
|
||||
os.chdir(saved)
|
||||
|
||||
|
||||
class UnpickleableException(Exception):
|
||||
"""
|
||||
An exception representing another Exception that could not be pickled.
|
||||
"""
|
||||
@classmethod
|
||||
def dump(cls, type, exc):
|
||||
"""
|
||||
Always return a dumped (pickled) type and exc. If exc can't be pickled,
|
||||
wrap it in UnpickleableException first.
|
||||
"""
|
||||
try:
|
||||
return pickle.dumps(type), pickle.dumps(exc)
|
||||
except Exception:
|
||||
return cls.dump(cls, cls(repr(exc)))
|
||||
|
||||
|
||||
class ExceptionSaver:
|
||||
"""
|
||||
A Context Manager that will save an exception, serialized, and restore it
|
||||
later.
|
||||
"""
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, type, exc, tb):
|
||||
if not exc:
|
||||
return
|
||||
|
||||
# dump the exception
|
||||
self._saved = UnpickleableException.dump(type, exc)
|
||||
self._tb = tb
|
||||
|
||||
# suppress the exception
|
||||
return True
|
||||
|
||||
def resume(self):
|
||||
"restore and re-raise any exception"
|
||||
|
||||
if '_saved' not in vars(self):
|
||||
return
|
||||
|
||||
type, exc = map(pickle.loads, self._saved)
|
||||
compat.reraise(type, exc, self._tb)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def save_modules():
|
||||
"""
|
||||
Context in which imported modules are saved.
|
||||
|
||||
Translates exceptions internal to the context into the equivalent exception
|
||||
outside the context.
|
||||
"""
|
||||
saved = sys.modules.copy()
|
||||
with ExceptionSaver() as saved_exc:
|
||||
yield saved
|
||||
|
||||
sys.modules.update(saved)
|
||||
# remove any modules imported since
|
||||
del_modules = (
|
||||
mod_name for mod_name in sys.modules
|
||||
if mod_name not in saved
|
||||
# exclude any encodings modules. See #285
|
||||
and not mod_name.startswith('encodings.')
|
||||
)
|
||||
_clear_modules(del_modules)
|
||||
|
||||
saved_exc.resume()
|
||||
|
||||
|
||||
def _clear_modules(module_names):
|
||||
for mod_name in list(module_names):
|
||||
del sys.modules[mod_name]
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def save_pkg_resources_state():
|
||||
saved = pkg_resources.__getstate__()
|
||||
try:
|
||||
yield saved
|
||||
finally:
|
||||
pkg_resources.__setstate__(saved)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def setup_context(setup_dir):
|
||||
temp_dir = os.path.join(setup_dir, 'temp')
|
||||
with save_pkg_resources_state():
|
||||
with save_modules():
|
||||
hide_setuptools()
|
||||
with save_path():
|
||||
with save_argv():
|
||||
with override_temp(temp_dir):
|
||||
with pushd(setup_dir):
|
||||
# ensure setuptools commands are available
|
||||
__import__('setuptools')
|
||||
yield
|
||||
|
||||
|
||||
def _needs_hiding(mod_name):
|
||||
"""
|
||||
>>> _needs_hiding('setuptools')
|
||||
True
|
||||
>>> _needs_hiding('pkg_resources')
|
||||
True
|
||||
>>> _needs_hiding('setuptools_plugin')
|
||||
False
|
||||
>>> _needs_hiding('setuptools.__init__')
|
||||
True
|
||||
>>> _needs_hiding('distutils')
|
||||
True
|
||||
"""
|
||||
pattern = re.compile('(setuptools|pkg_resources|distutils)(\.|$)')
|
||||
return bool(pattern.match(mod_name))
|
||||
|
||||
|
||||
def hide_setuptools():
|
||||
"""
|
||||
Remove references to setuptools' modules from sys.modules to allow the
|
||||
invocation to import the most appropriate setuptools. This technique is
|
||||
necessary to avoid issues such as #315 where setuptools upgrading itself
|
||||
would fail to find a function declared in the metadata.
|
||||
"""
|
||||
modules = filter(_needs_hiding, sys.modules)
|
||||
_clear_modules(modules)
|
||||
|
||||
|
||||
def run_setup(setup_script, args):
|
||||
"""Run a distutils setup script, sandboxed in its directory"""
|
||||
old_dir = os.getcwd()
|
||||
save_argv = sys.argv[:]
|
||||
save_path = sys.path[:]
|
||||
setup_dir = os.path.abspath(os.path.dirname(setup_script))
|
||||
temp_dir = os.path.join(setup_dir,'temp')
|
||||
if not os.path.isdir(temp_dir): os.makedirs(temp_dir)
|
||||
save_tmp = tempfile.tempdir
|
||||
save_modules = sys.modules.copy()
|
||||
pr_state = pkg_resources.__getstate__()
|
||||
try:
|
||||
tempfile.tempdir = temp_dir
|
||||
os.chdir(setup_dir)
|
||||
with setup_context(setup_dir):
|
||||
try:
|
||||
sys.argv[:] = [setup_script]+list(args)
|
||||
sys.path.insert(0, setup_dir)
|
||||
|
|
@ -66,26 +235,10 @@ def run_setup(setup_script, args):
|
|||
ns = dict(__file__=setup_script, __name__='__main__')
|
||||
_execfile(setup_script, ns)
|
||||
DirectorySandbox(setup_dir).run(runner)
|
||||
except SystemExit:
|
||||
v = sys.exc_info()[1]
|
||||
except SystemExit as v:
|
||||
if v.args and v.args[0]:
|
||||
raise
|
||||
# Normal exit, just return
|
||||
finally:
|
||||
pkg_resources.__setstate__(pr_state)
|
||||
sys.modules.update(save_modules)
|
||||
# remove any modules imported within the sandbox
|
||||
del_modules = [
|
||||
mod_name for mod_name in sys.modules
|
||||
if mod_name not in save_modules
|
||||
# exclude any encodings modules. See #285
|
||||
and not mod_name.startswith('encodings.')
|
||||
]
|
||||
list(map(sys.modules.__delitem__, del_modules))
|
||||
os.chdir(old_dir)
|
||||
sys.path[:] = save_path
|
||||
sys.argv[:] = save_argv
|
||||
tempfile.tempdir = save_tmp
|
||||
|
||||
|
||||
class AbstractSandbox:
|
||||
|
|
|
|||
|
|
@ -1,585 +0,0 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from distutils import log
|
||||
import xml.dom.pulldom
|
||||
import shlex
|
||||
import locale
|
||||
import codecs
|
||||
import unicodedata
|
||||
import warnings
|
||||
from setuptools.compat import unicode, PY2
|
||||
from setuptools.py31compat import TemporaryDirectory
|
||||
from xml.sax.saxutils import unescape
|
||||
|
||||
try:
|
||||
import urlparse
|
||||
except ImportError:
|
||||
import urllib.parse as urlparse
|
||||
|
||||
from subprocess import Popen as _Popen, PIPE as _PIPE
|
||||
|
||||
#NOTE: Use of the command line options require SVN 1.3 or newer (December 2005)
|
||||
# and SVN 1.3 hasn't been supported by the developers since mid 2008.
|
||||
|
||||
#subprocess is called several times with shell=(sys.platform=='win32')
|
||||
#see the follow for more information:
|
||||
# http://bugs.python.org/issue8557
|
||||
# http://stackoverflow.com/questions/5658622/
|
||||
# python-subprocess-popen-environment-path
|
||||
|
||||
def _run_command(args, stdout=_PIPE, stderr=_PIPE, encoding=None, stream=0):
|
||||
#regarding the shell argument, see: http://bugs.python.org/issue8557
|
||||
try:
|
||||
proc = _Popen(args, stdout=stdout, stderr=stderr,
|
||||
shell=(sys.platform == 'win32'))
|
||||
|
||||
data = proc.communicate()[stream]
|
||||
except OSError:
|
||||
return 1, ''
|
||||
|
||||
#doubled checked and
|
||||
data = decode_as_string(data, encoding)
|
||||
|
||||
#communciate calls wait()
|
||||
return proc.returncode, data
|
||||
|
||||
|
||||
def _get_entry_schedule(entry):
|
||||
schedule = entry.getElementsByTagName('schedule')[0]
|
||||
return "".join([t.nodeValue
|
||||
for t in schedule.childNodes
|
||||
if t.nodeType == t.TEXT_NODE])
|
||||
|
||||
|
||||
def _get_target_property(target):
|
||||
property_text = target.getElementsByTagName('property')[0]
|
||||
return "".join([t.nodeValue
|
||||
for t in property_text.childNodes
|
||||
if t.nodeType == t.TEXT_NODE])
|
||||
|
||||
|
||||
def _get_xml_data(decoded_str):
|
||||
if PY2:
|
||||
#old versions want an encoded string
|
||||
data = decoded_str.encode('utf-8')
|
||||
else:
|
||||
data = decoded_str
|
||||
return data
|
||||
|
||||
|
||||
def joinpath(prefix, *suffix):
|
||||
if not prefix or prefix == '.':
|
||||
return os.path.join(*suffix)
|
||||
return os.path.join(prefix, *suffix)
|
||||
|
||||
def determine_console_encoding():
|
||||
try:
|
||||
#try for the preferred encoding
|
||||
encoding = locale.getpreferredencoding()
|
||||
|
||||
#see if the locale.getdefaultlocale returns null
|
||||
#some versions of python\platforms return US-ASCII
|
||||
#when it cannot determine an encoding
|
||||
if not encoding or encoding == "US-ASCII":
|
||||
encoding = locale.getdefaultlocale()[1]
|
||||
|
||||
if encoding:
|
||||
codecs.lookup(encoding) # make sure a lookup error is not made
|
||||
|
||||
except (locale.Error, LookupError):
|
||||
encoding = None
|
||||
|
||||
is_osx = sys.platform == "darwin"
|
||||
if not encoding:
|
||||
return ["US-ASCII", "utf-8"][is_osx]
|
||||
elif encoding.startswith("mac-") and is_osx:
|
||||
#certain versions of python would return mac-roman as default
|
||||
#OSX as a left over of earlier mac versions.
|
||||
return "utf-8"
|
||||
else:
|
||||
return encoding
|
||||
|
||||
_console_encoding = determine_console_encoding()
|
||||
|
||||
def decode_as_string(text, encoding=None):
|
||||
"""
|
||||
Decode the console or file output explicitly using getpreferredencoding.
|
||||
The text paraemeter should be a encoded string, if not no decode occurs
|
||||
If no encoding is given, getpreferredencoding is used. If encoding is
|
||||
specified, that is used instead. This would be needed for SVN --xml
|
||||
output. Unicode is explicitly put in composed NFC form.
|
||||
|
||||
--xml should be UTF-8 (SVN Issue 2938) the discussion on the Subversion
|
||||
DEV List from 2007 seems to indicate the same.
|
||||
"""
|
||||
#text should be a byte string
|
||||
|
||||
if encoding is None:
|
||||
encoding = _console_encoding
|
||||
|
||||
if not isinstance(text, unicode):
|
||||
text = text.decode(encoding)
|
||||
|
||||
text = unicodedata.normalize('NFC', text)
|
||||
|
||||
return text
|
||||
|
||||
|
||||
def parse_dir_entries(decoded_str):
|
||||
'''Parse the entries from a recursive info xml'''
|
||||
doc = xml.dom.pulldom.parseString(_get_xml_data(decoded_str))
|
||||
entries = list()
|
||||
|
||||
for event, node in doc:
|
||||
if event == 'START_ELEMENT' and node.nodeName == 'entry':
|
||||
doc.expandNode(node)
|
||||
if not _get_entry_schedule(node).startswith('delete'):
|
||||
entries.append((node.getAttribute('path'),
|
||||
node.getAttribute('kind')))
|
||||
|
||||
return entries[1:] # do not want the root directory
|
||||
|
||||
|
||||
def parse_externals_xml(decoded_str, prefix=''):
|
||||
'''Parse a propget svn:externals xml'''
|
||||
prefix = os.path.normpath(prefix)
|
||||
prefix = os.path.normcase(prefix)
|
||||
|
||||
doc = xml.dom.pulldom.parseString(_get_xml_data(decoded_str))
|
||||
externals = list()
|
||||
|
||||
for event, node in doc:
|
||||
if event == 'START_ELEMENT' and node.nodeName == 'target':
|
||||
doc.expandNode(node)
|
||||
path = os.path.normpath(node.getAttribute('path'))
|
||||
|
||||
if os.path.normcase(path).startswith(prefix):
|
||||
path = path[len(prefix)+1:]
|
||||
|
||||
data = _get_target_property(node)
|
||||
#data should be decoded already
|
||||
for external in parse_external_prop(data):
|
||||
externals.append(joinpath(path, external))
|
||||
|
||||
return externals # do not want the root directory
|
||||
|
||||
|
||||
def parse_external_prop(lines):
|
||||
"""
|
||||
Parse the value of a retrieved svn:externals entry.
|
||||
|
||||
possible token setups (with quotng and backscaping in laters versions)
|
||||
URL[@#] EXT_FOLDERNAME
|
||||
[-r#] URL EXT_FOLDERNAME
|
||||
EXT_FOLDERNAME [-r#] URL
|
||||
"""
|
||||
externals = []
|
||||
for line in lines.splitlines():
|
||||
line = line.lstrip() # there might be a "\ "
|
||||
if not line:
|
||||
continue
|
||||
|
||||
if PY2:
|
||||
#shlex handles NULLs just fine and shlex in 2.7 tries to encode
|
||||
#as ascii automatiically
|
||||
line = line.encode('utf-8')
|
||||
line = shlex.split(line)
|
||||
if PY2:
|
||||
line = [x.decode('utf-8') for x in line]
|
||||
|
||||
#EXT_FOLDERNAME is either the first or last depending on where
|
||||
#the URL falls
|
||||
if urlparse.urlsplit(line[-1])[0]:
|
||||
external = line[0]
|
||||
else:
|
||||
external = line[-1]
|
||||
|
||||
external = decode_as_string(external, encoding="utf-8")
|
||||
externals.append(os.path.normpath(external))
|
||||
|
||||
return externals
|
||||
|
||||
|
||||
def parse_prop_file(filename, key):
|
||||
found = False
|
||||
f = open(filename, 'rt')
|
||||
data = ''
|
||||
try:
|
||||
for line in iter(f.readline, ''): # can't use direct iter!
|
||||
parts = line.split()
|
||||
if len(parts) == 2:
|
||||
kind, length = parts
|
||||
data = f.read(int(length))
|
||||
if kind == 'K' and data == key:
|
||||
found = True
|
||||
elif kind == 'V' and found:
|
||||
break
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
return data
|
||||
|
||||
|
||||
class SvnInfo(object):
|
||||
'''
|
||||
Generic svn_info object. No has little knowledge of how to extract
|
||||
information. Use cls.load to instatiate according svn version.
|
||||
|
||||
Paths are not filesystem encoded.
|
||||
'''
|
||||
|
||||
@staticmethod
|
||||
def get_svn_version():
|
||||
# Temp config directory should be enough to check for repository
|
||||
# This is needed because .svn always creates .subversion and
|
||||
# some operating systems do not handle dot directory correctly.
|
||||
# Real queries in real svn repos with be concerned with it creation
|
||||
with TemporaryDirectory() as tempdir:
|
||||
code, data = _run_command(['svn',
|
||||
'--config-dir', tempdir,
|
||||
'--version',
|
||||
'--quiet'])
|
||||
|
||||
if code == 0 and data:
|
||||
return data.strip()
|
||||
else:
|
||||
return ''
|
||||
|
||||
#svnversion return values (previous implementations return max revision)
|
||||
# 4123:4168 mixed revision working copy
|
||||
# 4168M modified working copy
|
||||
# 4123S switched working copy
|
||||
# 4123:4168MS mixed revision, modified, switched working copy
|
||||
revision_re = re.compile(r'(?:([\-0-9]+):)?(\d+)([a-z]*)\s*$', re.I)
|
||||
|
||||
@classmethod
|
||||
def load(cls, dirname=''):
|
||||
normdir = os.path.normpath(dirname)
|
||||
|
||||
# Temp config directory should be enough to check for repository
|
||||
# This is needed because .svn always creates .subversion and
|
||||
# some operating systems do not handle dot directory correctly.
|
||||
# Real queries in real svn repos with be concerned with it creation
|
||||
with TemporaryDirectory() as tempdir:
|
||||
code, data = _run_command(['svn',
|
||||
'--config-dir', tempdir,
|
||||
'info', normdir])
|
||||
|
||||
# Must check for some contents, as some use empty directories
|
||||
# in testcases, however only enteries is needed also the info
|
||||
# command above MUST have worked
|
||||
svn_dir = os.path.join(normdir, '.svn')
|
||||
is_svn_wd = (not code or
|
||||
os.path.isfile(os.path.join(svn_dir, 'entries')))
|
||||
|
||||
svn_version = tuple(cls.get_svn_version().split('.'))
|
||||
|
||||
try:
|
||||
base_svn_version = tuple(int(x) for x in svn_version[:2])
|
||||
except ValueError:
|
||||
base_svn_version = tuple()
|
||||
|
||||
if not is_svn_wd:
|
||||
#return an instance of this NO-OP class
|
||||
return SvnInfo(dirname)
|
||||
|
||||
if code or not base_svn_version or base_svn_version < (1, 3):
|
||||
warnings.warn(("No SVN 1.3+ command found: falling back "
|
||||
"on pre 1.7 .svn parsing"), DeprecationWarning)
|
||||
return SvnFileInfo(dirname)
|
||||
|
||||
if base_svn_version < (1, 5):
|
||||
return Svn13Info(dirname)
|
||||
|
||||
return Svn15Info(dirname)
|
||||
|
||||
def __init__(self, path=''):
|
||||
self.path = path
|
||||
self._entries = None
|
||||
self._externals = None
|
||||
|
||||
def get_revision(self):
|
||||
'Retrieve the directory revision informatino using svnversion'
|
||||
code, data = _run_command(['svnversion', '-c', self.path])
|
||||
if code:
|
||||
log.warn("svnversion failed")
|
||||
return 0
|
||||
|
||||
parsed = self.revision_re.match(data)
|
||||
if parsed:
|
||||
return int(parsed.group(2))
|
||||
else:
|
||||
return 0
|
||||
|
||||
@property
|
||||
def entries(self):
|
||||
if self._entries is None:
|
||||
self._entries = self.get_entries()
|
||||
return self._entries
|
||||
|
||||
@property
|
||||
def externals(self):
|
||||
if self._externals is None:
|
||||
self._externals = self.get_externals()
|
||||
return self._externals
|
||||
|
||||
def iter_externals(self):
|
||||
'''
|
||||
Iterate over the svn:external references in the repository path.
|
||||
'''
|
||||
for item in self.externals:
|
||||
yield item
|
||||
|
||||
def iter_files(self):
|
||||
'''
|
||||
Iterate over the non-deleted file entries in the repository path
|
||||
'''
|
||||
for item, kind in self.entries:
|
||||
if kind.lower() == 'file':
|
||||
yield item
|
||||
|
||||
def iter_dirs(self, include_root=True):
|
||||
'''
|
||||
Iterate over the non-deleted file entries in the repository path
|
||||
'''
|
||||
if include_root:
|
||||
yield self.path
|
||||
for item, kind in self.entries:
|
||||
if kind.lower() == 'dir':
|
||||
yield item
|
||||
|
||||
def get_entries(self):
|
||||
return []
|
||||
|
||||
def get_externals(self):
|
||||
return []
|
||||
|
||||
|
||||
class Svn13Info(SvnInfo):
|
||||
def get_entries(self):
|
||||
code, data = _run_command(['svn', 'info', '-R', '--xml', self.path],
|
||||
encoding="utf-8")
|
||||
|
||||
if code:
|
||||
log.debug("svn info failed")
|
||||
return []
|
||||
|
||||
return parse_dir_entries(data)
|
||||
|
||||
def get_externals(self):
|
||||
#Previous to 1.5 --xml was not supported for svn propget and the -R
|
||||
#output format breaks the shlex compatible semantics.
|
||||
cmd = ['svn', 'propget', 'svn:externals']
|
||||
result = []
|
||||
for folder in self.iter_dirs():
|
||||
code, lines = _run_command(cmd + [folder], encoding="utf-8")
|
||||
if code != 0:
|
||||
log.warn("svn propget failed")
|
||||
return []
|
||||
#lines should a str
|
||||
for external in parse_external_prop(lines):
|
||||
if folder:
|
||||
external = os.path.join(folder, external)
|
||||
result.append(os.path.normpath(external))
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class Svn15Info(Svn13Info):
|
||||
def get_externals(self):
|
||||
cmd = ['svn', 'propget', 'svn:externals', self.path, '-R', '--xml']
|
||||
code, lines = _run_command(cmd, encoding="utf-8")
|
||||
if code:
|
||||
log.debug("svn propget failed")
|
||||
return []
|
||||
return parse_externals_xml(lines, prefix=os.path.abspath(self.path))
|
||||
|
||||
|
||||
class SvnFileInfo(SvnInfo):
|
||||
|
||||
def __init__(self, path=''):
|
||||
super(SvnFileInfo, self).__init__(path)
|
||||
self._directories = None
|
||||
self._revision = None
|
||||
|
||||
def _walk_svn(self, base):
|
||||
entry_file = joinpath(base, '.svn', 'entries')
|
||||
if os.path.isfile(entry_file):
|
||||
entries = SVNEntriesFile.load(base)
|
||||
yield (base, False, entries.parse_revision())
|
||||
for path in entries.get_undeleted_records():
|
||||
path = decode_as_string(path)
|
||||
path = joinpath(base, path)
|
||||
if os.path.isfile(path):
|
||||
yield (path, True, None)
|
||||
elif os.path.isdir(path):
|
||||
for item in self._walk_svn(path):
|
||||
yield item
|
||||
|
||||
def _build_entries(self):
|
||||
entries = list()
|
||||
|
||||
rev = 0
|
||||
for path, isfile, dir_rev in self._walk_svn(self.path):
|
||||
if isfile:
|
||||
entries.append((path, 'file'))
|
||||
else:
|
||||
entries.append((path, 'dir'))
|
||||
rev = max(rev, dir_rev)
|
||||
|
||||
self._entries = entries
|
||||
self._revision = rev
|
||||
|
||||
def get_entries(self):
|
||||
if self._entries is None:
|
||||
self._build_entries()
|
||||
return self._entries
|
||||
|
||||
def get_revision(self):
|
||||
if self._revision is None:
|
||||
self._build_entries()
|
||||
return self._revision
|
||||
|
||||
def get_externals(self):
|
||||
prop_files = [['.svn', 'dir-prop-base'],
|
||||
['.svn', 'dir-props']]
|
||||
externals = []
|
||||
|
||||
for dirname in self.iter_dirs():
|
||||
prop_file = None
|
||||
for rel_parts in prop_files:
|
||||
filename = joinpath(dirname, *rel_parts)
|
||||
if os.path.isfile(filename):
|
||||
prop_file = filename
|
||||
|
||||
if prop_file is not None:
|
||||
ext_prop = parse_prop_file(prop_file, 'svn:externals')
|
||||
#ext_prop should be utf-8 coming from svn:externals
|
||||
ext_prop = decode_as_string(ext_prop, encoding="utf-8")
|
||||
externals.extend(parse_external_prop(ext_prop))
|
||||
|
||||
return externals
|
||||
|
||||
|
||||
def svn_finder(dirname=''):
|
||||
#combined externals due to common interface
|
||||
#combined externals and entries due to lack of dir_props in 1.7
|
||||
info = SvnInfo.load(dirname)
|
||||
for path in info.iter_files():
|
||||
yield path
|
||||
|
||||
for path in info.iter_externals():
|
||||
sub_info = SvnInfo.load(path)
|
||||
for sub_path in sub_info.iter_files():
|
||||
yield sub_path
|
||||
|
||||
|
||||
class SVNEntriesFile(object):
|
||||
def __init__(self, data):
|
||||
self.data = data
|
||||
|
||||
@classmethod
|
||||
def load(class_, base):
|
||||
filename = os.path.join(base, '.svn', 'entries')
|
||||
f = open(filename)
|
||||
try:
|
||||
result = SVNEntriesFile.read(f)
|
||||
finally:
|
||||
f.close()
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
def read(class_, fileobj):
|
||||
data = fileobj.read()
|
||||
is_xml = data.startswith('<?xml')
|
||||
class_ = [SVNEntriesFileText, SVNEntriesFileXML][is_xml]
|
||||
return class_(data)
|
||||
|
||||
def parse_revision(self):
|
||||
all_revs = self.parse_revision_numbers() + [0]
|
||||
return max(all_revs)
|
||||
|
||||
|
||||
class SVNEntriesFileText(SVNEntriesFile):
|
||||
known_svn_versions = {
|
||||
'1.4.x': 8,
|
||||
'1.5.x': 9,
|
||||
'1.6.x': 10,
|
||||
}
|
||||
|
||||
def __get_cached_sections(self):
|
||||
return self.sections
|
||||
|
||||
def get_sections(self):
|
||||
SECTION_DIVIDER = '\f\n'
|
||||
sections = self.data.split(SECTION_DIVIDER)
|
||||
sections = [x for x in map(str.splitlines, sections)]
|
||||
try:
|
||||
# remove the SVN version number from the first line
|
||||
svn_version = int(sections[0].pop(0))
|
||||
if not svn_version in self.known_svn_versions.values():
|
||||
log.warn("Unknown subversion verson %d", svn_version)
|
||||
except ValueError:
|
||||
return
|
||||
self.sections = sections
|
||||
self.get_sections = self.__get_cached_sections
|
||||
return self.sections
|
||||
|
||||
def is_valid(self):
|
||||
return bool(self.get_sections())
|
||||
|
||||
def get_url(self):
|
||||
return self.get_sections()[0][4]
|
||||
|
||||
def parse_revision_numbers(self):
|
||||
revision_line_number = 9
|
||||
rev_numbers = [
|
||||
int(section[revision_line_number])
|
||||
for section in self.get_sections()
|
||||
if (len(section) > revision_line_number
|
||||
and section[revision_line_number])
|
||||
]
|
||||
return rev_numbers
|
||||
|
||||
def get_undeleted_records(self):
|
||||
undeleted = lambda s: s and s[0] and (len(s) < 6 or s[5] != 'delete')
|
||||
result = [
|
||||
section[0]
|
||||
for section in self.get_sections()
|
||||
if undeleted(section)
|
||||
]
|
||||
return result
|
||||
|
||||
|
||||
class SVNEntriesFileXML(SVNEntriesFile):
|
||||
def is_valid(self):
|
||||
return True
|
||||
|
||||
def get_url(self):
|
||||
"Get repository URL"
|
||||
urlre = re.compile('url="([^"]+)"')
|
||||
return urlre.search(self.data).group(1)
|
||||
|
||||
def parse_revision_numbers(self):
|
||||
revre = re.compile(r'committed-rev="(\d+)"')
|
||||
return [
|
||||
int(m.group(1))
|
||||
for m in revre.finditer(self.data)
|
||||
]
|
||||
|
||||
def get_undeleted_records(self):
|
||||
entries_pattern = \
|
||||
re.compile(r'name="([^"]+)"(?![^>]+deleted="true")', re.I)
|
||||
results = [
|
||||
unescape(match.group(1))
|
||||
for match in entries_pattern.finditer(self.data)
|
||||
]
|
||||
return results
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
for name in svn_finder(sys.argv[1]):
|
||||
print(name)
|
||||
|
|
@ -1,351 +0,0 @@
|
|||
"""Tests for the 'setuptools' package"""
|
||||
import sys
|
||||
import os
|
||||
import unittest
|
||||
import doctest
|
||||
import distutils.core
|
||||
import distutils.cmd
|
||||
from distutils.errors import DistutilsOptionError, DistutilsPlatformError
|
||||
from distutils.errors import DistutilsSetupError
|
||||
from distutils.core import Extension
|
||||
from distutils.version import LooseVersion
|
||||
from setuptools.compat import func_code
|
||||
|
||||
from setuptools.compat import func_code
|
||||
import setuptools.dist
|
||||
import setuptools.depends as dep
|
||||
from setuptools import Feature
|
||||
from setuptools.depends import Require
|
||||
|
||||
def additional_tests():
|
||||
suite = unittest.TestSuite((
|
||||
doctest.DocFileSuite(
|
||||
os.path.join('tests', 'api_tests.txt'),
|
||||
optionflags=doctest.ELLIPSIS, package='pkg_resources',
|
||||
),
|
||||
))
|
||||
if sys.platform == 'win32':
|
||||
suite.addTest(doctest.DocFileSuite('win_script_wrapper.txt'))
|
||||
return suite
|
||||
|
||||
def makeSetup(**args):
|
||||
"""Return distribution from 'setup(**args)', without executing commands"""
|
||||
|
||||
distutils.core._setup_stop_after = "commandline"
|
||||
|
||||
# Don't let system command line leak into tests!
|
||||
args.setdefault('script_args',['install'])
|
||||
|
||||
try:
|
||||
return setuptools.setup(**args)
|
||||
finally:
|
||||
distutils.core._setup_stop_after = None
|
||||
|
||||
|
||||
class DependsTests(unittest.TestCase):
|
||||
|
||||
def testExtractConst(self):
|
||||
if not hasattr(dep, 'extract_constant'):
|
||||
# skip on non-bytecode platforms
|
||||
return
|
||||
|
||||
def f1():
|
||||
global x, y, z
|
||||
x = "test"
|
||||
y = z
|
||||
|
||||
fc = func_code(f1)
|
||||
# unrecognized name
|
||||
self.assertEqual(dep.extract_constant(fc,'q', -1), None)
|
||||
|
||||
# constant assigned
|
||||
self.assertEqual(dep.extract_constant(fc,'x', -1), "test")
|
||||
|
||||
# expression assigned
|
||||
self.assertEqual(dep.extract_constant(fc,'y', -1), -1)
|
||||
|
||||
# recognized name, not assigned
|
||||
self.assertEqual(dep.extract_constant(fc,'z', -1), None)
|
||||
|
||||
def testFindModule(self):
|
||||
self.assertRaises(ImportError, dep.find_module, 'no-such.-thing')
|
||||
self.assertRaises(ImportError, dep.find_module, 'setuptools.non-existent')
|
||||
f,p,i = dep.find_module('setuptools.tests')
|
||||
f.close()
|
||||
|
||||
def testModuleExtract(self):
|
||||
if not hasattr(dep, 'get_module_constant'):
|
||||
# skip on non-bytecode platforms
|
||||
return
|
||||
|
||||
from email import __version__
|
||||
self.assertEqual(
|
||||
dep.get_module_constant('email','__version__'), __version__
|
||||
)
|
||||
self.assertEqual(
|
||||
dep.get_module_constant('sys','version'), sys.version
|
||||
)
|
||||
self.assertEqual(
|
||||
dep.get_module_constant('setuptools.tests','__doc__'),__doc__
|
||||
)
|
||||
|
||||
def testRequire(self):
|
||||
if not hasattr(dep, 'extract_constant'):
|
||||
# skip on non-bytecode platformsh
|
||||
return
|
||||
|
||||
req = Require('Email','1.0.3','email')
|
||||
|
||||
self.assertEqual(req.name, 'Email')
|
||||
self.assertEqual(req.module, 'email')
|
||||
self.assertEqual(req.requested_version, '1.0.3')
|
||||
self.assertEqual(req.attribute, '__version__')
|
||||
self.assertEqual(req.full_name(), 'Email-1.0.3')
|
||||
|
||||
from email import __version__
|
||||
self.assertEqual(req.get_version(), __version__)
|
||||
self.assertTrue(req.version_ok('1.0.9'))
|
||||
self.assertTrue(not req.version_ok('0.9.1'))
|
||||
self.assertTrue(not req.version_ok('unknown'))
|
||||
|
||||
self.assertTrue(req.is_present())
|
||||
self.assertTrue(req.is_current())
|
||||
|
||||
req = Require('Email 3000','03000','email',format=LooseVersion)
|
||||
self.assertTrue(req.is_present())
|
||||
self.assertTrue(not req.is_current())
|
||||
self.assertTrue(not req.version_ok('unknown'))
|
||||
|
||||
req = Require('Do-what-I-mean','1.0','d-w-i-m')
|
||||
self.assertTrue(not req.is_present())
|
||||
self.assertTrue(not req.is_current())
|
||||
|
||||
req = Require('Tests', None, 'tests', homepage="http://example.com")
|
||||
self.assertEqual(req.format, None)
|
||||
self.assertEqual(req.attribute, None)
|
||||
self.assertEqual(req.requested_version, None)
|
||||
self.assertEqual(req.full_name(), 'Tests')
|
||||
self.assertEqual(req.homepage, 'http://example.com')
|
||||
|
||||
paths = [os.path.dirname(p) for p in __path__]
|
||||
self.assertTrue(req.is_present(paths))
|
||||
self.assertTrue(req.is_current(paths))
|
||||
|
||||
|
||||
class DistroTests(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.e1 = Extension('bar.ext',['bar.c'])
|
||||
self.e2 = Extension('c.y', ['y.c'])
|
||||
|
||||
self.dist = makeSetup(
|
||||
packages=['a', 'a.b', 'a.b.c', 'b', 'c'],
|
||||
py_modules=['b.d','x'],
|
||||
ext_modules = (self.e1, self.e2),
|
||||
package_dir = {},
|
||||
)
|
||||
|
||||
def testDistroType(self):
|
||||
self.assertTrue(isinstance(self.dist,setuptools.dist.Distribution))
|
||||
|
||||
def testExcludePackage(self):
|
||||
self.dist.exclude_package('a')
|
||||
self.assertEqual(self.dist.packages, ['b','c'])
|
||||
|
||||
self.dist.exclude_package('b')
|
||||
self.assertEqual(self.dist.packages, ['c'])
|
||||
self.assertEqual(self.dist.py_modules, ['x'])
|
||||
self.assertEqual(self.dist.ext_modules, [self.e1, self.e2])
|
||||
|
||||
self.dist.exclude_package('c')
|
||||
self.assertEqual(self.dist.packages, [])
|
||||
self.assertEqual(self.dist.py_modules, ['x'])
|
||||
self.assertEqual(self.dist.ext_modules, [self.e1])
|
||||
|
||||
# test removals from unspecified options
|
||||
makeSetup().exclude_package('x')
|
||||
|
||||
def testIncludeExclude(self):
|
||||
# remove an extension
|
||||
self.dist.exclude(ext_modules=[self.e1])
|
||||
self.assertEqual(self.dist.ext_modules, [self.e2])
|
||||
|
||||
# add it back in
|
||||
self.dist.include(ext_modules=[self.e1])
|
||||
self.assertEqual(self.dist.ext_modules, [self.e2, self.e1])
|
||||
|
||||
# should not add duplicate
|
||||
self.dist.include(ext_modules=[self.e1])
|
||||
self.assertEqual(self.dist.ext_modules, [self.e2, self.e1])
|
||||
|
||||
def testExcludePackages(self):
|
||||
self.dist.exclude(packages=['c','b','a'])
|
||||
self.assertEqual(self.dist.packages, [])
|
||||
self.assertEqual(self.dist.py_modules, ['x'])
|
||||
self.assertEqual(self.dist.ext_modules, [self.e1])
|
||||
|
||||
def testEmpty(self):
|
||||
dist = makeSetup()
|
||||
dist.include(packages=['a'], py_modules=['b'], ext_modules=[self.e2])
|
||||
dist = makeSetup()
|
||||
dist.exclude(packages=['a'], py_modules=['b'], ext_modules=[self.e2])
|
||||
|
||||
def testContents(self):
|
||||
self.assertTrue(self.dist.has_contents_for('a'))
|
||||
self.dist.exclude_package('a')
|
||||
self.assertTrue(not self.dist.has_contents_for('a'))
|
||||
|
||||
self.assertTrue(self.dist.has_contents_for('b'))
|
||||
self.dist.exclude_package('b')
|
||||
self.assertTrue(not self.dist.has_contents_for('b'))
|
||||
|
||||
self.assertTrue(self.dist.has_contents_for('c'))
|
||||
self.dist.exclude_package('c')
|
||||
self.assertTrue(not self.dist.has_contents_for('c'))
|
||||
|
||||
def testInvalidIncludeExclude(self):
|
||||
self.assertRaises(DistutilsSetupError,
|
||||
self.dist.include, nonexistent_option='x'
|
||||
)
|
||||
self.assertRaises(DistutilsSetupError,
|
||||
self.dist.exclude, nonexistent_option='x'
|
||||
)
|
||||
self.assertRaises(DistutilsSetupError,
|
||||
self.dist.include, packages={'x':'y'}
|
||||
)
|
||||
self.assertRaises(DistutilsSetupError,
|
||||
self.dist.exclude, packages={'x':'y'}
|
||||
)
|
||||
self.assertRaises(DistutilsSetupError,
|
||||
self.dist.include, ext_modules={'x':'y'}
|
||||
)
|
||||
self.assertRaises(DistutilsSetupError,
|
||||
self.dist.exclude, ext_modules={'x':'y'}
|
||||
)
|
||||
|
||||
self.assertRaises(DistutilsSetupError,
|
||||
self.dist.include, package_dir=['q']
|
||||
)
|
||||
self.assertRaises(DistutilsSetupError,
|
||||
self.dist.exclude, package_dir=['q']
|
||||
)
|
||||
|
||||
|
||||
class FeatureTests(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.req = Require('Distutils','1.0.3','distutils')
|
||||
self.dist = makeSetup(
|
||||
features={
|
||||
'foo': Feature("foo",standard=True,require_features=['baz',self.req]),
|
||||
'bar': Feature("bar", standard=True, packages=['pkg.bar'],
|
||||
py_modules=['bar_et'], remove=['bar.ext'],
|
||||
),
|
||||
'baz': Feature(
|
||||
"baz", optional=False, packages=['pkg.baz'],
|
||||
scripts = ['scripts/baz_it'],
|
||||
libraries=[('libfoo','foo/foofoo.c')]
|
||||
),
|
||||
'dwim': Feature("DWIM", available=False, remove='bazish'),
|
||||
},
|
||||
script_args=['--without-bar', 'install'],
|
||||
packages = ['pkg.bar', 'pkg.foo'],
|
||||
py_modules = ['bar_et', 'bazish'],
|
||||
ext_modules = [Extension('bar.ext',['bar.c'])]
|
||||
)
|
||||
|
||||
def testDefaults(self):
|
||||
self.assertTrue(not
|
||||
Feature(
|
||||
"test",standard=True,remove='x',available=False
|
||||
).include_by_default()
|
||||
)
|
||||
self.assertTrue(
|
||||
Feature("test",standard=True,remove='x').include_by_default()
|
||||
)
|
||||
# Feature must have either kwargs, removes, or require_features
|
||||
self.assertRaises(DistutilsSetupError, Feature, "test")
|
||||
|
||||
def testAvailability(self):
|
||||
self.assertRaises(
|
||||
DistutilsPlatformError,
|
||||
self.dist.features['dwim'].include_in, self.dist
|
||||
)
|
||||
|
||||
def testFeatureOptions(self):
|
||||
dist = self.dist
|
||||
self.assertTrue(
|
||||
('with-dwim',None,'include DWIM') in dist.feature_options
|
||||
)
|
||||
self.assertTrue(
|
||||
('without-dwim',None,'exclude DWIM (default)') in dist.feature_options
|
||||
)
|
||||
self.assertTrue(
|
||||
('with-bar',None,'include bar (default)') in dist.feature_options
|
||||
)
|
||||
self.assertTrue(
|
||||
('without-bar',None,'exclude bar') in dist.feature_options
|
||||
)
|
||||
self.assertEqual(dist.feature_negopt['without-foo'],'with-foo')
|
||||
self.assertEqual(dist.feature_negopt['without-bar'],'with-bar')
|
||||
self.assertEqual(dist.feature_negopt['without-dwim'],'with-dwim')
|
||||
self.assertTrue(not 'without-baz' in dist.feature_negopt)
|
||||
|
||||
def testUseFeatures(self):
|
||||
dist = self.dist
|
||||
self.assertEqual(dist.with_foo,1)
|
||||
self.assertEqual(dist.with_bar,0)
|
||||
self.assertEqual(dist.with_baz,1)
|
||||
self.assertTrue(not 'bar_et' in dist.py_modules)
|
||||
self.assertTrue(not 'pkg.bar' in dist.packages)
|
||||
self.assertTrue('pkg.baz' in dist.packages)
|
||||
self.assertTrue('scripts/baz_it' in dist.scripts)
|
||||
self.assertTrue(('libfoo','foo/foofoo.c') in dist.libraries)
|
||||
self.assertEqual(dist.ext_modules,[])
|
||||
self.assertEqual(dist.require_features, [self.req])
|
||||
|
||||
# If we ask for bar, it should fail because we explicitly disabled
|
||||
# it on the command line
|
||||
self.assertRaises(DistutilsOptionError, dist.include_feature, 'bar')
|
||||
|
||||
def testFeatureWithInvalidRemove(self):
|
||||
self.assertRaises(
|
||||
SystemExit, makeSetup, features = {'x':Feature('x', remove='y')}
|
||||
)
|
||||
|
||||
class TestCommandTests(unittest.TestCase):
|
||||
|
||||
def testTestIsCommand(self):
|
||||
test_cmd = makeSetup().get_command_obj('test')
|
||||
self.assertTrue(isinstance(test_cmd, distutils.cmd.Command))
|
||||
|
||||
def testLongOptSuiteWNoDefault(self):
|
||||
ts1 = makeSetup(script_args=['test','--test-suite=foo.tests.suite'])
|
||||
ts1 = ts1.get_command_obj('test')
|
||||
ts1.ensure_finalized()
|
||||
self.assertEqual(ts1.test_suite, 'foo.tests.suite')
|
||||
|
||||
def testDefaultSuite(self):
|
||||
ts2 = makeSetup(test_suite='bar.tests.suite').get_command_obj('test')
|
||||
ts2.ensure_finalized()
|
||||
self.assertEqual(ts2.test_suite, 'bar.tests.suite')
|
||||
|
||||
def testDefaultWModuleOnCmdLine(self):
|
||||
ts3 = makeSetup(
|
||||
test_suite='bar.tests',
|
||||
script_args=['test','-m','foo.tests']
|
||||
).get_command_obj('test')
|
||||
ts3.ensure_finalized()
|
||||
self.assertEqual(ts3.test_module, 'foo.tests')
|
||||
self.assertEqual(ts3.test_suite, 'foo.tests.test_suite')
|
||||
|
||||
def testConflictingOptions(self):
|
||||
ts4 = makeSetup(
|
||||
script_args=['test','-m','bar.tests', '-s','foo.tests.suite']
|
||||
).get_command_obj('test')
|
||||
self.assertRaises(DistutilsOptionError, ts4.ensure_finalized)
|
||||
|
||||
def testNoSuite(self):
|
||||
ts5 = makeSetup().get_command_obj('test')
|
||||
ts5.ensure_finalized()
|
||||
self.assertEqual(ts5.test_suite, None)
|
||||
|
|
@ -1,165 +0,0 @@
|
|||
import os
|
||||
import zipfile
|
||||
import sys
|
||||
import tempfile
|
||||
import unittest
|
||||
import shutil
|
||||
import stat
|
||||
import unicodedata
|
||||
|
||||
from subprocess import Popen as _Popen, PIPE as _PIPE
|
||||
|
||||
|
||||
def _extract(self, member, path=None, pwd=None):
|
||||
"""for zipfile py2.5 borrowed from cpython"""
|
||||
if not isinstance(member, zipfile.ZipInfo):
|
||||
member = self.getinfo(member)
|
||||
|
||||
if path is None:
|
||||
path = os.getcwd()
|
||||
|
||||
return _extract_member(self, member, path, pwd)
|
||||
|
||||
|
||||
def _extract_from_zip(self, name, dest_path):
|
||||
dest_file = open(dest_path, 'wb')
|
||||
try:
|
||||
dest_file.write(self.read(name))
|
||||
finally:
|
||||
dest_file.close()
|
||||
|
||||
|
||||
def _extract_member(self, member, targetpath, pwd):
|
||||
"""for zipfile py2.5 borrowed from cpython"""
|
||||
# build the destination pathname, replacing
|
||||
# forward slashes to platform specific separators.
|
||||
# Strip trailing path separator, unless it represents the root.
|
||||
if (targetpath[-1:] in (os.path.sep, os.path.altsep)
|
||||
and len(os.path.splitdrive(targetpath)[1]) > 1):
|
||||
targetpath = targetpath[:-1]
|
||||
|
||||
# don't include leading "/" from file name if present
|
||||
if member.filename[0] == '/':
|
||||
targetpath = os.path.join(targetpath, member.filename[1:])
|
||||
else:
|
||||
targetpath = os.path.join(targetpath, member.filename)
|
||||
|
||||
targetpath = os.path.normpath(targetpath)
|
||||
|
||||
# Create all upper directories if necessary.
|
||||
upperdirs = os.path.dirname(targetpath)
|
||||
if upperdirs and not os.path.exists(upperdirs):
|
||||
os.makedirs(upperdirs)
|
||||
|
||||
if member.filename[-1] == '/':
|
||||
if not os.path.isdir(targetpath):
|
||||
os.mkdir(targetpath)
|
||||
return targetpath
|
||||
|
||||
_extract_from_zip(self, member.filename, targetpath)
|
||||
|
||||
return targetpath
|
||||
|
||||
|
||||
def _remove_dir(target):
|
||||
|
||||
#on windows this seems to a problem
|
||||
for dir_path, dirs, files in os.walk(target):
|
||||
os.chmod(dir_path, stat.S_IWRITE)
|
||||
for filename in files:
|
||||
os.chmod(os.path.join(dir_path, filename), stat.S_IWRITE)
|
||||
shutil.rmtree(target)
|
||||
|
||||
|
||||
class ZippedEnvironment(unittest.TestCase):
|
||||
|
||||
datafile = None
|
||||
dataname = None
|
||||
old_cwd = None
|
||||
|
||||
def setUp(self):
|
||||
if self.datafile is None or self.dataname is None:
|
||||
return
|
||||
|
||||
if not os.path.isfile(self.datafile):
|
||||
self.old_cwd = None
|
||||
return
|
||||
|
||||
self.old_cwd = os.getcwd()
|
||||
|
||||
self.temp_dir = tempfile.mkdtemp()
|
||||
zip_file, source, target = [None, None, None]
|
||||
try:
|
||||
zip_file = zipfile.ZipFile(self.datafile)
|
||||
for files in zip_file.namelist():
|
||||
_extract(zip_file, files, self.temp_dir)
|
||||
finally:
|
||||
if zip_file:
|
||||
zip_file.close()
|
||||
del zip_file
|
||||
|
||||
os.chdir(os.path.join(self.temp_dir, self.dataname))
|
||||
|
||||
def tearDown(self):
|
||||
#Assume setUp was never completed
|
||||
if self.dataname is None or self.datafile is None:
|
||||
return
|
||||
|
||||
try:
|
||||
if self.old_cwd:
|
||||
os.chdir(self.old_cwd)
|
||||
_remove_dir(self.temp_dir)
|
||||
except OSError:
|
||||
#sigh?
|
||||
pass
|
||||
|
||||
|
||||
def _which_dirs(cmd):
|
||||
result = set()
|
||||
for path in os.environ.get('PATH', '').split(os.pathsep):
|
||||
filename = os.path.join(path, cmd)
|
||||
if os.access(filename, os.X_OK):
|
||||
result.add(path)
|
||||
return result
|
||||
|
||||
|
||||
def run_setup_py(cmd, pypath=None, path=None,
|
||||
data_stream=0, env=None):
|
||||
"""
|
||||
Execution command for tests, separate from those used by the
|
||||
code directly to prevent accidental behavior issues
|
||||
"""
|
||||
if env is None:
|
||||
env = dict()
|
||||
for envname in os.environ:
|
||||
env[envname] = os.environ[envname]
|
||||
|
||||
#override the python path if needed
|
||||
if pypath is not None:
|
||||
env["PYTHONPATH"] = pypath
|
||||
|
||||
#overide the execution path if needed
|
||||
if path is not None:
|
||||
env["PATH"] = path
|
||||
if not env.get("PATH", ""):
|
||||
env["PATH"] = _which_dirs("tar").union(_which_dirs("gzip"))
|
||||
env["PATH"] = os.pathsep.join(env["PATH"])
|
||||
|
||||
cmd = [sys.executable, "setup.py"] + list(cmd)
|
||||
|
||||
#regarding the shell argument, see: http://bugs.python.org/issue8557
|
||||
try:
|
||||
proc = _Popen(cmd, stdout=_PIPE, stderr=_PIPE,
|
||||
shell=(sys.platform == 'win32'), env=env)
|
||||
|
||||
data = proc.communicate()[data_stream]
|
||||
except OSError:
|
||||
return 1, ''
|
||||
|
||||
#decode the console string if needed
|
||||
if hasattr(data, "decode"):
|
||||
data = data.decode() # should use the preffered encoding
|
||||
data = unicodedata.normalize('NFC', data)
|
||||
|
||||
#communciate calls wait()
|
||||
return proc.returncode, data
|
||||
|
|
@ -1,14 +0,0 @@
|
|||
import unittest
|
||||
|
||||
try:
|
||||
# provide skipIf for Python 2.4-2.6
|
||||
skipIf = unittest.skipIf
|
||||
except AttributeError:
|
||||
def skipIf(condition, reason):
|
||||
def skipper(func):
|
||||
def skip(*args, **kwargs):
|
||||
return
|
||||
if condition:
|
||||
return skip
|
||||
return func
|
||||
return skipper
|
||||
|
|
@ -1,3 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
result = 'passed'
|
||||
|
|
@ -1,82 +0,0 @@
|
|||
"""Basic http server for tests to simulate PyPI or custom indexes
|
||||
"""
|
||||
import sys
|
||||
import time
|
||||
import threading
|
||||
from setuptools.compat import BaseHTTPRequestHandler
|
||||
from setuptools.compat import (urllib2, URLError, HTTPServer,
|
||||
SimpleHTTPRequestHandler)
|
||||
|
||||
class IndexServer(HTTPServer):
|
||||
"""Basic single-threaded http server simulating a package index
|
||||
|
||||
You can use this server in unittest like this::
|
||||
s = IndexServer()
|
||||
s.start()
|
||||
index_url = s.base_url() + 'mytestindex'
|
||||
# do some test requests to the index
|
||||
# The index files should be located in setuptools/tests/indexes
|
||||
s.stop()
|
||||
"""
|
||||
def __init__(self, server_address=('', 0),
|
||||
RequestHandlerClass=SimpleHTTPRequestHandler):
|
||||
HTTPServer.__init__(self, server_address, RequestHandlerClass)
|
||||
self._run = True
|
||||
|
||||
def serve(self):
|
||||
while self._run:
|
||||
self.handle_request()
|
||||
|
||||
def start(self):
|
||||
self.thread = threading.Thread(target=self.serve)
|
||||
self.thread.start()
|
||||
|
||||
def stop(self):
|
||||
"Stop the server"
|
||||
|
||||
# Let the server finish the last request and wait for a new one.
|
||||
time.sleep(0.1)
|
||||
|
||||
# self.shutdown is not supported on python < 2.6, so just
|
||||
# set _run to false, and make a request, causing it to
|
||||
# terminate.
|
||||
self._run = False
|
||||
url = 'http://127.0.0.1:%(server_port)s/' % vars(self)
|
||||
try:
|
||||
if sys.version_info >= (2, 6):
|
||||
urllib2.urlopen(url, timeout=5)
|
||||
else:
|
||||
urllib2.urlopen(url)
|
||||
except URLError:
|
||||
# ignore any errors; all that's important is the request
|
||||
pass
|
||||
self.thread.join()
|
||||
self.socket.close()
|
||||
|
||||
def base_url(self):
|
||||
port = self.server_port
|
||||
return 'http://127.0.0.1:%s/setuptools/tests/indexes/' % port
|
||||
|
||||
class RequestRecorder(BaseHTTPRequestHandler):
|
||||
def do_GET(self):
|
||||
requests = vars(self.server).setdefault('requests', [])
|
||||
requests.append(self)
|
||||
self.send_response(200, 'OK')
|
||||
|
||||
class MockServer(HTTPServer, threading.Thread):
|
||||
"""
|
||||
A simple HTTP Server that records the requests made to it.
|
||||
"""
|
||||
def __init__(self, server_address=('', 0),
|
||||
RequestHandlerClass=RequestRecorder):
|
||||
HTTPServer.__init__(self, server_address, RequestHandlerClass)
|
||||
threading.Thread.__init__(self)
|
||||
self.setDaemon(True)
|
||||
self.requests = []
|
||||
|
||||
def run(self):
|
||||
self.serve_forever()
|
||||
|
||||
def url(self):
|
||||
return 'http://localhost:%(server_port)s/' % vars(self)
|
||||
url = property(url)
|
||||
|
|
@ -1,72 +0,0 @@
|
|||
"""develop tests
|
||||
"""
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import site
|
||||
import sys
|
||||
import tempfile
|
||||
import unittest
|
||||
|
||||
from distutils.errors import DistutilsError
|
||||
from setuptools.compat import StringIO
|
||||
from setuptools.command.bdist_egg import bdist_egg
|
||||
from setuptools.command import easy_install as easy_install_pkg
|
||||
from setuptools.dist import Distribution
|
||||
|
||||
SETUP_PY = """\
|
||||
from setuptools import setup
|
||||
|
||||
setup(name='foo', py_modules=['hi'])
|
||||
"""
|
||||
|
||||
class TestDevelopTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.dir = tempfile.mkdtemp()
|
||||
self.old_cwd = os.getcwd()
|
||||
os.chdir(self.dir)
|
||||
f = open('setup.py', 'w')
|
||||
f.write(SETUP_PY)
|
||||
f.close()
|
||||
f = open('hi.py', 'w')
|
||||
f.write('1\n')
|
||||
f.close()
|
||||
if sys.version >= "2.6":
|
||||
self.old_base = site.USER_BASE
|
||||
site.USER_BASE = tempfile.mkdtemp()
|
||||
self.old_site = site.USER_SITE
|
||||
site.USER_SITE = tempfile.mkdtemp()
|
||||
|
||||
def tearDown(self):
|
||||
os.chdir(self.old_cwd)
|
||||
shutil.rmtree(self.dir)
|
||||
if sys.version >= "2.6":
|
||||
shutil.rmtree(site.USER_BASE)
|
||||
shutil.rmtree(site.USER_SITE)
|
||||
site.USER_BASE = self.old_base
|
||||
site.USER_SITE = self.old_site
|
||||
|
||||
def test_bdist_egg(self):
|
||||
dist = Distribution(dict(
|
||||
script_name='setup.py',
|
||||
script_args=['bdist_egg'],
|
||||
name='foo',
|
||||
py_modules=['hi']
|
||||
))
|
||||
os.makedirs(os.path.join('build', 'src'))
|
||||
old_stdout = sys.stdout
|
||||
sys.stdout = o = StringIO()
|
||||
try:
|
||||
dist.parse_command_line()
|
||||
dist.run_commands()
|
||||
finally:
|
||||
sys.stdout = old_stdout
|
||||
|
||||
# let's see if we got our egg link at the right place
|
||||
[content] = os.listdir('dist')
|
||||
self.assertTrue(re.match('foo-0.0.0-py[23].\d.egg$', content))
|
||||
|
||||
def test_suite():
|
||||
return unittest.makeSuite(TestDevelopTest)
|
||||
|
||||
|
|
@ -1,19 +0,0 @@
|
|||
"""build_ext tests
|
||||
"""
|
||||
import unittest
|
||||
from distutils.command.build_ext import build_ext as distutils_build_ext
|
||||
from setuptools.command.build_ext import build_ext
|
||||
from setuptools.dist import Distribution
|
||||
|
||||
class TestBuildExtTest(unittest.TestCase):
|
||||
|
||||
def test_get_ext_filename(self):
|
||||
# setuptools needs to give back the same
|
||||
# result than distutils, even if the fullname
|
||||
# is not in ext_map
|
||||
dist = Distribution()
|
||||
cmd = build_ext(dist)
|
||||
cmd.ext_map['foo/bar'] = ''
|
||||
res = cmd.get_ext_filename('foo')
|
||||
wanted = distutils_build_ext.get_ext_filename(cmd, 'foo')
|
||||
assert res == wanted
|
||||
|
|
@ -1,122 +0,0 @@
|
|||
"""develop tests
|
||||
"""
|
||||
import os
|
||||
import shutil
|
||||
import site
|
||||
import sys
|
||||
import tempfile
|
||||
import unittest
|
||||
|
||||
from distutils.errors import DistutilsError
|
||||
from setuptools.command.develop import develop
|
||||
from setuptools.dist import Distribution
|
||||
|
||||
SETUP_PY = """\
|
||||
from setuptools import setup
|
||||
|
||||
setup(name='foo',
|
||||
packages=['foo'],
|
||||
use_2to3=True,
|
||||
)
|
||||
"""
|
||||
|
||||
INIT_PY = """print "foo"
|
||||
"""
|
||||
|
||||
class TestDevelopTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
if sys.version < "2.6" or hasattr(sys, 'real_prefix'):
|
||||
return
|
||||
|
||||
# Directory structure
|
||||
self.dir = tempfile.mkdtemp()
|
||||
os.mkdir(os.path.join(self.dir, 'foo'))
|
||||
# setup.py
|
||||
setup = os.path.join(self.dir, 'setup.py')
|
||||
f = open(setup, 'w')
|
||||
f.write(SETUP_PY)
|
||||
f.close()
|
||||
self.old_cwd = os.getcwd()
|
||||
# foo/__init__.py
|
||||
init = os.path.join(self.dir, 'foo', '__init__.py')
|
||||
f = open(init, 'w')
|
||||
f.write(INIT_PY)
|
||||
f.close()
|
||||
|
||||
os.chdir(self.dir)
|
||||
self.old_base = site.USER_BASE
|
||||
site.USER_BASE = tempfile.mkdtemp()
|
||||
self.old_site = site.USER_SITE
|
||||
site.USER_SITE = tempfile.mkdtemp()
|
||||
|
||||
def tearDown(self):
|
||||
if sys.version < "2.6" or hasattr(sys, 'real_prefix') or (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix):
|
||||
return
|
||||
|
||||
os.chdir(self.old_cwd)
|
||||
shutil.rmtree(self.dir)
|
||||
shutil.rmtree(site.USER_BASE)
|
||||
shutil.rmtree(site.USER_SITE)
|
||||
site.USER_BASE = self.old_base
|
||||
site.USER_SITE = self.old_site
|
||||
|
||||
def test_develop(self):
|
||||
if sys.version < "2.6" or hasattr(sys, 'real_prefix'):
|
||||
return
|
||||
dist = Distribution(
|
||||
dict(name='foo',
|
||||
packages=['foo'],
|
||||
use_2to3=True,
|
||||
version='0.0',
|
||||
))
|
||||
dist.script_name = 'setup.py'
|
||||
cmd = develop(dist)
|
||||
cmd.user = 1
|
||||
cmd.ensure_finalized()
|
||||
cmd.install_dir = site.USER_SITE
|
||||
cmd.user = 1
|
||||
old_stdout = sys.stdout
|
||||
#sys.stdout = StringIO()
|
||||
try:
|
||||
cmd.run()
|
||||
finally:
|
||||
sys.stdout = old_stdout
|
||||
|
||||
# let's see if we got our egg link at the right place
|
||||
content = os.listdir(site.USER_SITE)
|
||||
content.sort()
|
||||
self.assertEqual(content, ['easy-install.pth', 'foo.egg-link'])
|
||||
|
||||
# Check that we are using the right code.
|
||||
egg_link_file = open(os.path.join(site.USER_SITE, 'foo.egg-link'), 'rt')
|
||||
try:
|
||||
path = egg_link_file.read().split()[0].strip()
|
||||
finally:
|
||||
egg_link_file.close()
|
||||
init_file = open(os.path.join(path, 'foo', '__init__.py'), 'rt')
|
||||
try:
|
||||
init = init_file.read().strip()
|
||||
finally:
|
||||
init_file.close()
|
||||
if sys.version < "3":
|
||||
self.assertEqual(init, 'print "foo"')
|
||||
else:
|
||||
self.assertEqual(init, 'print("foo")')
|
||||
|
||||
def notest_develop_with_setup_requires(self):
|
||||
|
||||
wanted = ("Could not find suitable distribution for "
|
||||
"Requirement.parse('I-DONT-EXIST')")
|
||||
old_dir = os.getcwd()
|
||||
os.chdir(self.dir)
|
||||
try:
|
||||
try:
|
||||
Distribution({'setup_requires': ['I_DONT_EXIST']})
|
||||
except DistutilsError:
|
||||
e = sys.exc_info()[1]
|
||||
error = str(e)
|
||||
if error == wanted:
|
||||
pass
|
||||
finally:
|
||||
os.chdir(old_dir)
|
||||
|
|
@ -1,83 +0,0 @@
|
|||
"""Test .dist-info style distributions.
|
||||
"""
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
import unittest
|
||||
import textwrap
|
||||
|
||||
try:
|
||||
import ast
|
||||
except:
|
||||
pass
|
||||
|
||||
import pkg_resources
|
||||
|
||||
from setuptools.tests.py26compat import skipIf
|
||||
|
||||
def DALS(s):
|
||||
"dedent and left-strip"
|
||||
return textwrap.dedent(s).lstrip()
|
||||
|
||||
class TestDistInfo(unittest.TestCase):
|
||||
|
||||
def test_distinfo(self):
|
||||
dists = {}
|
||||
for d in pkg_resources.find_distributions(self.tmpdir):
|
||||
dists[d.project_name] = d
|
||||
|
||||
assert len(dists) == 2, dists
|
||||
|
||||
unversioned = dists['UnversionedDistribution']
|
||||
versioned = dists['VersionedDistribution']
|
||||
|
||||
assert versioned.version == '2.718' # from filename
|
||||
assert unversioned.version == '0.3' # from METADATA
|
||||
|
||||
@skipIf('ast' not in globals(),
|
||||
"ast is used to test conditional dependencies (Python >= 2.6)")
|
||||
def test_conditional_dependencies(self):
|
||||
requires = [pkg_resources.Requirement.parse('splort==4'),
|
||||
pkg_resources.Requirement.parse('quux>=1.1')]
|
||||
|
||||
for d in pkg_resources.find_distributions(self.tmpdir):
|
||||
self.assertEqual(d.requires(), requires[:1])
|
||||
self.assertEqual(d.requires(extras=('baz',)), requires)
|
||||
self.assertEqual(d.extras, ['baz'])
|
||||
|
||||
def setUp(self):
|
||||
self.tmpdir = tempfile.mkdtemp()
|
||||
versioned = os.path.join(self.tmpdir,
|
||||
'VersionedDistribution-2.718.dist-info')
|
||||
os.mkdir(versioned)
|
||||
metadata_file = open(os.path.join(versioned, 'METADATA'), 'w+')
|
||||
try:
|
||||
metadata_file.write(DALS(
|
||||
"""
|
||||
Metadata-Version: 1.2
|
||||
Name: VersionedDistribution
|
||||
Requires-Dist: splort (4)
|
||||
Provides-Extra: baz
|
||||
Requires-Dist: quux (>=1.1); extra == 'baz'
|
||||
"""))
|
||||
finally:
|
||||
metadata_file.close()
|
||||
unversioned = os.path.join(self.tmpdir,
|
||||
'UnversionedDistribution.dist-info')
|
||||
os.mkdir(unversioned)
|
||||
metadata_file = open(os.path.join(unversioned, 'METADATA'), 'w+')
|
||||
try:
|
||||
metadata_file.write(DALS(
|
||||
"""
|
||||
Metadata-Version: 1.2
|
||||
Name: UnversionedDistribution
|
||||
Version: 0.3
|
||||
Requires-Dist: splort (==4)
|
||||
Provides-Extra: baz
|
||||
Requires-Dist: quux (>=1.1); extra == 'baz'
|
||||
"""))
|
||||
finally:
|
||||
metadata_file.close()
|
||||
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self.tmpdir)
|
||||
|
|
@ -1,462 +0,0 @@
|
|||
"""Easy install Tests
|
||||
"""
|
||||
import sys
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
import unittest
|
||||
import site
|
||||
import contextlib
|
||||
import textwrap
|
||||
import tarfile
|
||||
import logging
|
||||
import distutils.core
|
||||
|
||||
from setuptools.compat import StringIO, BytesIO, urlparse
|
||||
from setuptools.sandbox import run_setup, SandboxViolation
|
||||
from setuptools.command.easy_install import (
|
||||
easy_install, fix_jython_executable, get_script_args, nt_quote_arg)
|
||||
from setuptools.command.easy_install import PthDistributions
|
||||
from setuptools.command import easy_install as easy_install_pkg
|
||||
from setuptools.dist import Distribution
|
||||
from pkg_resources import working_set, VersionConflict
|
||||
from pkg_resources import Distribution as PRDistribution
|
||||
import setuptools.tests.server
|
||||
import pkg_resources
|
||||
from .py26compat import skipIf
|
||||
|
||||
class FakeDist(object):
|
||||
def get_entry_map(self, group):
|
||||
if group != 'console_scripts':
|
||||
return {}
|
||||
return {'name': 'ep'}
|
||||
|
||||
def as_requirement(self):
|
||||
return 'spec'
|
||||
|
||||
WANTED = """\
|
||||
#!%s
|
||||
# EASY-INSTALL-ENTRY-SCRIPT: 'spec','console_scripts','name'
|
||||
__requires__ = 'spec'
|
||||
import sys
|
||||
from pkg_resources import load_entry_point
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(
|
||||
load_entry_point('spec', 'console_scripts', 'name')()
|
||||
)
|
||||
""" % nt_quote_arg(fix_jython_executable(sys.executable, ""))
|
||||
|
||||
SETUP_PY = """\
|
||||
from setuptools import setup
|
||||
|
||||
setup(name='foo')
|
||||
"""
|
||||
|
||||
class TestEasyInstallTest(unittest.TestCase):
|
||||
|
||||
def test_install_site_py(self):
|
||||
dist = Distribution()
|
||||
cmd = easy_install(dist)
|
||||
cmd.sitepy_installed = False
|
||||
cmd.install_dir = tempfile.mkdtemp()
|
||||
try:
|
||||
cmd.install_site_py()
|
||||
sitepy = os.path.join(cmd.install_dir, 'site.py')
|
||||
self.assertTrue(os.path.exists(sitepy))
|
||||
finally:
|
||||
shutil.rmtree(cmd.install_dir)
|
||||
|
||||
def test_get_script_args(self):
|
||||
dist = FakeDist()
|
||||
|
||||
old_platform = sys.platform
|
||||
try:
|
||||
name, script = [i for i in next(get_script_args(dist))][0:2]
|
||||
finally:
|
||||
sys.platform = old_platform
|
||||
|
||||
self.assertEqual(script, WANTED)
|
||||
|
||||
def test_no_find_links(self):
|
||||
# new option '--no-find-links', that blocks find-links added at
|
||||
# the project level
|
||||
dist = Distribution()
|
||||
cmd = easy_install(dist)
|
||||
cmd.check_pth_processing = lambda: True
|
||||
cmd.no_find_links = True
|
||||
cmd.find_links = ['link1', 'link2']
|
||||
cmd.install_dir = os.path.join(tempfile.mkdtemp(), 'ok')
|
||||
cmd.args = ['ok']
|
||||
cmd.ensure_finalized()
|
||||
self.assertEqual(cmd.package_index.scanned_urls, {})
|
||||
|
||||
# let's try without it (default behavior)
|
||||
cmd = easy_install(dist)
|
||||
cmd.check_pth_processing = lambda: True
|
||||
cmd.find_links = ['link1', 'link2']
|
||||
cmd.install_dir = os.path.join(tempfile.mkdtemp(), 'ok')
|
||||
cmd.args = ['ok']
|
||||
cmd.ensure_finalized()
|
||||
keys = sorted(cmd.package_index.scanned_urls.keys())
|
||||
self.assertEqual(keys, ['link1', 'link2'])
|
||||
|
||||
|
||||
class TestPTHFileWriter(unittest.TestCase):
|
||||
def test_add_from_cwd_site_sets_dirty(self):
|
||||
'''a pth file manager should set dirty
|
||||
if a distribution is in site but also the cwd
|
||||
'''
|
||||
pth = PthDistributions('does-not_exist', [os.getcwd()])
|
||||
self.assertTrue(not pth.dirty)
|
||||
pth.add(PRDistribution(os.getcwd()))
|
||||
self.assertTrue(pth.dirty)
|
||||
|
||||
def test_add_from_site_is_ignored(self):
|
||||
if os.name != 'nt':
|
||||
location = '/test/location/does-not-have-to-exist'
|
||||
else:
|
||||
location = 'c:\\does_not_exist'
|
||||
pth = PthDistributions('does-not_exist', [location, ])
|
||||
self.assertTrue(not pth.dirty)
|
||||
pth.add(PRDistribution(location))
|
||||
self.assertTrue(not pth.dirty)
|
||||
|
||||
|
||||
class TestUserInstallTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.dir = tempfile.mkdtemp()
|
||||
setup = os.path.join(self.dir, 'setup.py')
|
||||
f = open(setup, 'w')
|
||||
f.write(SETUP_PY)
|
||||
f.close()
|
||||
self.old_cwd = os.getcwd()
|
||||
os.chdir(self.dir)
|
||||
|
||||
self.old_enable_site = site.ENABLE_USER_SITE
|
||||
self.old_file = easy_install_pkg.__file__
|
||||
self.old_base = site.USER_BASE
|
||||
site.USER_BASE = tempfile.mkdtemp()
|
||||
self.old_site = site.USER_SITE
|
||||
site.USER_SITE = tempfile.mkdtemp()
|
||||
easy_install_pkg.__file__ = site.USER_SITE
|
||||
|
||||
def tearDown(self):
|
||||
os.chdir(self.old_cwd)
|
||||
shutil.rmtree(self.dir)
|
||||
|
||||
shutil.rmtree(site.USER_BASE)
|
||||
shutil.rmtree(site.USER_SITE)
|
||||
site.USER_BASE = self.old_base
|
||||
site.USER_SITE = self.old_site
|
||||
site.ENABLE_USER_SITE = self.old_enable_site
|
||||
easy_install_pkg.__file__ = self.old_file
|
||||
|
||||
def test_user_install_implied(self):
|
||||
site.ENABLE_USER_SITE = True # disabled sometimes
|
||||
#XXX: replace with something meaningfull
|
||||
dist = Distribution()
|
||||
dist.script_name = 'setup.py'
|
||||
cmd = easy_install(dist)
|
||||
cmd.args = ['py']
|
||||
cmd.ensure_finalized()
|
||||
self.assertTrue(cmd.user, 'user should be implied')
|
||||
|
||||
def test_multiproc_atexit(self):
|
||||
try:
|
||||
__import__('multiprocessing')
|
||||
except ImportError:
|
||||
# skip the test if multiprocessing is not available
|
||||
return
|
||||
|
||||
log = logging.getLogger('test_easy_install')
|
||||
logging.basicConfig(level=logging.INFO, stream=sys.stderr)
|
||||
log.info('this should not break')
|
||||
|
||||
def test_user_install_not_implied_without_usersite_enabled(self):
|
||||
site.ENABLE_USER_SITE = False # usually enabled
|
||||
#XXX: replace with something meaningfull
|
||||
dist = Distribution()
|
||||
dist.script_name = 'setup.py'
|
||||
cmd = easy_install(dist)
|
||||
cmd.args = ['py']
|
||||
cmd.initialize_options()
|
||||
self.assertFalse(cmd.user, 'NOT user should be implied')
|
||||
|
||||
def test_local_index(self):
|
||||
# make sure the local index is used
|
||||
# when easy_install looks for installed
|
||||
# packages
|
||||
new_location = tempfile.mkdtemp()
|
||||
target = tempfile.mkdtemp()
|
||||
egg_file = os.path.join(new_location, 'foo-1.0.egg-info')
|
||||
f = open(egg_file, 'w')
|
||||
try:
|
||||
f.write('Name: foo\n')
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
sys.path.append(target)
|
||||
old_ppath = os.environ.get('PYTHONPATH')
|
||||
os.environ['PYTHONPATH'] = os.path.pathsep.join(sys.path)
|
||||
try:
|
||||
dist = Distribution()
|
||||
dist.script_name = 'setup.py'
|
||||
cmd = easy_install(dist)
|
||||
cmd.install_dir = target
|
||||
cmd.args = ['foo']
|
||||
cmd.ensure_finalized()
|
||||
cmd.local_index.scan([new_location])
|
||||
res = cmd.easy_install('foo')
|
||||
actual = os.path.normcase(os.path.realpath(res.location))
|
||||
expected = os.path.normcase(os.path.realpath(new_location))
|
||||
self.assertEqual(actual, expected)
|
||||
finally:
|
||||
sys.path.remove(target)
|
||||
for basedir in [new_location, target, ]:
|
||||
if not os.path.exists(basedir) or not os.path.isdir(basedir):
|
||||
continue
|
||||
try:
|
||||
shutil.rmtree(basedir)
|
||||
except:
|
||||
pass
|
||||
if old_ppath is not None:
|
||||
os.environ['PYTHONPATH'] = old_ppath
|
||||
else:
|
||||
del os.environ['PYTHONPATH']
|
||||
|
||||
def test_setup_requires(self):
|
||||
"""Regression test for Distribute issue #318
|
||||
|
||||
Ensure that a package with setup_requires can be installed when
|
||||
setuptools is installed in the user site-packages without causing a
|
||||
SandboxViolation.
|
||||
"""
|
||||
|
||||
test_pkg = create_setup_requires_package(self.dir)
|
||||
test_setup_py = os.path.join(test_pkg, 'setup.py')
|
||||
|
||||
try:
|
||||
with quiet_context():
|
||||
with reset_setup_stop_context():
|
||||
run_setup(test_setup_py, ['install'])
|
||||
except SandboxViolation:
|
||||
self.fail('Installation caused SandboxViolation')
|
||||
except IndexError:
|
||||
# Test fails in some cases due to bugs in Python
|
||||
# See https://bitbucket.org/pypa/setuptools/issue/201
|
||||
pass
|
||||
|
||||
|
||||
class TestSetupRequires(unittest.TestCase):
|
||||
|
||||
def test_setup_requires_honors_fetch_params(self):
|
||||
"""
|
||||
When easy_install installs a source distribution which specifies
|
||||
setup_requires, it should honor the fetch parameters (such as
|
||||
allow-hosts, index-url, and find-links).
|
||||
"""
|
||||
# set up a server which will simulate an alternate package index.
|
||||
p_index = setuptools.tests.server.MockServer()
|
||||
p_index.start()
|
||||
netloc = 1
|
||||
p_index_loc = urlparse(p_index.url)[netloc]
|
||||
if p_index_loc.endswith(':0'):
|
||||
# Some platforms (Jython) don't find a port to which to bind,
|
||||
# so skip this test for them.
|
||||
return
|
||||
with quiet_context():
|
||||
# create an sdist that has a build-time dependency.
|
||||
with TestSetupRequires.create_sdist() as dist_file:
|
||||
with tempdir_context() as temp_install_dir:
|
||||
with environment_context(PYTHONPATH=temp_install_dir):
|
||||
ei_params = ['--index-url', p_index.url,
|
||||
'--allow-hosts', p_index_loc,
|
||||
'--exclude-scripts', '--install-dir', temp_install_dir,
|
||||
dist_file]
|
||||
with reset_setup_stop_context():
|
||||
with argv_context(['easy_install']):
|
||||
# attempt to install the dist. It should fail because
|
||||
# it doesn't exist.
|
||||
self.assertRaises(SystemExit,
|
||||
easy_install_pkg.main, ei_params)
|
||||
# there should have been two or three requests to the server
|
||||
# (three happens on Python 3.3a)
|
||||
self.assertTrue(2 <= len(p_index.requests) <= 3)
|
||||
self.assertEqual(p_index.requests[0].path, '/does-not-exist/')
|
||||
|
||||
@staticmethod
|
||||
@contextlib.contextmanager
|
||||
def create_sdist():
|
||||
"""
|
||||
Return an sdist with a setup_requires dependency (of something that
|
||||
doesn't exist)
|
||||
"""
|
||||
with tempdir_context() as dir:
|
||||
dist_path = os.path.join(dir, 'setuptools-test-fetcher-1.0.tar.gz')
|
||||
make_trivial_sdist(
|
||||
dist_path,
|
||||
textwrap.dedent("""
|
||||
import setuptools
|
||||
setuptools.setup(
|
||||
name="setuptools-test-fetcher",
|
||||
version="1.0",
|
||||
setup_requires = ['does-not-exist'],
|
||||
)
|
||||
""").lstrip())
|
||||
yield dist_path
|
||||
|
||||
def test_setup_requires_overrides_version_conflict(self):
|
||||
"""
|
||||
Regression test for issue #323.
|
||||
|
||||
Ensures that a distribution's setup_requires requirements can still be
|
||||
installed and used locally even if a conflicting version of that
|
||||
requirement is already on the path.
|
||||
"""
|
||||
|
||||
pr_state = pkg_resources.__getstate__()
|
||||
fake_dist = PRDistribution('does-not-matter', project_name='foobar',
|
||||
version='0.0')
|
||||
working_set.add(fake_dist)
|
||||
|
||||
try:
|
||||
with tempdir_context() as temp_dir:
|
||||
test_pkg = create_setup_requires_package(temp_dir)
|
||||
test_setup_py = os.path.join(test_pkg, 'setup.py')
|
||||
with quiet_context() as (stdout, stderr):
|
||||
with reset_setup_stop_context():
|
||||
try:
|
||||
# Don't even need to install the package, just
|
||||
# running the setup.py at all is sufficient
|
||||
run_setup(test_setup_py, ['--name'])
|
||||
except VersionConflict:
|
||||
self.fail('Installing setup.py requirements '
|
||||
'caused a VersionConflict')
|
||||
|
||||
lines = stdout.readlines()
|
||||
self.assertTrue(len(lines) > 0)
|
||||
self.assertTrue(lines[-1].strip(), 'test_pkg')
|
||||
finally:
|
||||
pkg_resources.__setstate__(pr_state)
|
||||
|
||||
|
||||
def create_setup_requires_package(path):
|
||||
"""Creates a source tree under path for a trivial test package that has a
|
||||
single requirement in setup_requires--a tarball for that requirement is
|
||||
also created and added to the dependency_links argument.
|
||||
"""
|
||||
|
||||
test_setup_attrs = {
|
||||
'name': 'test_pkg', 'version': '0.0',
|
||||
'setup_requires': ['foobar==0.1'],
|
||||
'dependency_links': [os.path.abspath(path)]
|
||||
}
|
||||
|
||||
test_pkg = os.path.join(path, 'test_pkg')
|
||||
test_setup_py = os.path.join(test_pkg, 'setup.py')
|
||||
os.mkdir(test_pkg)
|
||||
|
||||
f = open(test_setup_py, 'w')
|
||||
f.write(textwrap.dedent("""\
|
||||
import setuptools
|
||||
setuptools.setup(**%r)
|
||||
""" % test_setup_attrs))
|
||||
f.close()
|
||||
|
||||
foobar_path = os.path.join(path, 'foobar-0.1.tar.gz')
|
||||
make_trivial_sdist(
|
||||
foobar_path,
|
||||
textwrap.dedent("""\
|
||||
import setuptools
|
||||
setuptools.setup(
|
||||
name='foobar',
|
||||
version='0.1'
|
||||
)
|
||||
"""))
|
||||
|
||||
return test_pkg
|
||||
|
||||
|
||||
def make_trivial_sdist(dist_path, setup_py):
|
||||
"""Create a simple sdist tarball at dist_path, containing just a
|
||||
setup.py, the contents of which are provided by the setup_py string.
|
||||
"""
|
||||
|
||||
setup_py_file = tarfile.TarInfo(name='setup.py')
|
||||
try:
|
||||
# Python 3 (StringIO gets converted to io module)
|
||||
MemFile = BytesIO
|
||||
except AttributeError:
|
||||
MemFile = StringIO
|
||||
setup_py_bytes = MemFile(setup_py.encode('utf-8'))
|
||||
setup_py_file.size = len(setup_py_bytes.getvalue())
|
||||
dist = tarfile.open(dist_path, 'w:gz')
|
||||
try:
|
||||
dist.addfile(setup_py_file, fileobj=setup_py_bytes)
|
||||
finally:
|
||||
dist.close()
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def tempdir_context(cd=lambda dir:None):
|
||||
temp_dir = tempfile.mkdtemp()
|
||||
orig_dir = os.getcwd()
|
||||
try:
|
||||
cd(temp_dir)
|
||||
yield temp_dir
|
||||
finally:
|
||||
cd(orig_dir)
|
||||
shutil.rmtree(temp_dir)
|
||||
|
||||
@contextlib.contextmanager
|
||||
def environment_context(**updates):
|
||||
old_env = os.environ.copy()
|
||||
os.environ.update(updates)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
for key in updates:
|
||||
del os.environ[key]
|
||||
os.environ.update(old_env)
|
||||
|
||||
@contextlib.contextmanager
|
||||
def argv_context(repl):
|
||||
old_argv = sys.argv[:]
|
||||
sys.argv[:] = repl
|
||||
yield
|
||||
sys.argv[:] = old_argv
|
||||
|
||||
@contextlib.contextmanager
|
||||
def reset_setup_stop_context():
|
||||
"""
|
||||
When the setuptools tests are run using setup.py test, and then
|
||||
one wants to invoke another setup() command (such as easy_install)
|
||||
within those tests, it's necessary to reset the global variable
|
||||
in distutils.core so that the setup() command will run naturally.
|
||||
"""
|
||||
setup_stop_after = distutils.core._setup_stop_after
|
||||
distutils.core._setup_stop_after = None
|
||||
yield
|
||||
distutils.core._setup_stop_after = setup_stop_after
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def quiet_context():
|
||||
"""
|
||||
Redirect stdout/stderr to StringIO objects to prevent console output from
|
||||
distutils commands.
|
||||
"""
|
||||
|
||||
old_stdout = sys.stdout
|
||||
old_stderr = sys.stderr
|
||||
new_stdout = sys.stdout = StringIO()
|
||||
new_stderr = sys.stderr = StringIO()
|
||||
try:
|
||||
yield new_stdout, new_stderr
|
||||
finally:
|
||||
new_stdout.seek(0)
|
||||
new_stderr.seek(0)
|
||||
sys.stdout = old_stdout
|
||||
sys.stderr = old_stderr
|
||||
|
|
@ -1,210 +0,0 @@
|
|||
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
import shutil
|
||||
import unittest
|
||||
|
||||
import pkg_resources
|
||||
import warnings
|
||||
from setuptools.command import egg_info
|
||||
from setuptools import svn_utils
|
||||
from setuptools.tests import environment, test_svn
|
||||
from setuptools.tests.py26compat import skipIf
|
||||
|
||||
ENTRIES_V10 = pkg_resources.resource_string(__name__, 'entries-v10')
|
||||
"An entries file generated with svn 1.6.17 against the legacy Setuptools repo"
|
||||
|
||||
|
||||
class TestEggInfo(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.test_dir = tempfile.mkdtemp()
|
||||
os.mkdir(os.path.join(self.test_dir, '.svn'))
|
||||
|
||||
self.old_cwd = os.getcwd()
|
||||
os.chdir(self.test_dir)
|
||||
|
||||
def tearDown(self):
|
||||
os.chdir(self.old_cwd)
|
||||
shutil.rmtree(self.test_dir)
|
||||
|
||||
def _write_entries(self, entries):
|
||||
fn = os.path.join(self.test_dir, '.svn', 'entries')
|
||||
entries_f = open(fn, 'wb')
|
||||
entries_f.write(entries)
|
||||
entries_f.close()
|
||||
|
||||
@skipIf(not test_svn._svn_check, "No SVN to text, in the first place")
|
||||
def test_version_10_format(self):
|
||||
"""
|
||||
"""
|
||||
#keeping this set for 1.6 is a good check on the get_svn_revision
|
||||
#to ensure I return using svnversion what would had been returned
|
||||
version_str = svn_utils.SvnInfo.get_svn_version()
|
||||
version = [int(x) for x in version_str.split('.')[:2]]
|
||||
if version != [1, 6]:
|
||||
if hasattr(self, 'skipTest'):
|
||||
self.skipTest('')
|
||||
else:
|
||||
sys.stderr.write('\n Skipping due to SVN Version\n')
|
||||
return
|
||||
|
||||
self._write_entries(ENTRIES_V10)
|
||||
rev = egg_info.egg_info.get_svn_revision()
|
||||
self.assertEqual(rev, '89000')
|
||||
|
||||
def test_version_10_format_legacy_parser(self):
|
||||
"""
|
||||
"""
|
||||
path_variable = None
|
||||
for env in os.environ:
|
||||
if env.lower() == 'path':
|
||||
path_variable = env
|
||||
|
||||
if path_variable:
|
||||
old_path = os.environ[path_variable]
|
||||
os.environ[path_variable] = ''
|
||||
#catch_warnings not available until py26
|
||||
warning_filters = warnings.filters
|
||||
warnings.filters = warning_filters[:]
|
||||
try:
|
||||
warnings.simplefilter("ignore", DeprecationWarning)
|
||||
self._write_entries(ENTRIES_V10)
|
||||
rev = egg_info.egg_info.get_svn_revision()
|
||||
finally:
|
||||
#restore the warning filters
|
||||
warnings.filters = warning_filters
|
||||
#restore the os path
|
||||
if path_variable:
|
||||
os.environ[path_variable] = old_path
|
||||
|
||||
self.assertEqual(rev, '89000')
|
||||
|
||||
DUMMY_SOURCE_TXT = """CHANGES.txt
|
||||
CONTRIBUTORS.txt
|
||||
HISTORY.txt
|
||||
LICENSE
|
||||
MANIFEST.in
|
||||
README.txt
|
||||
setup.py
|
||||
dummy/__init__.py
|
||||
dummy/test.txt
|
||||
dummy.egg-info/PKG-INFO
|
||||
dummy.egg-info/SOURCES.txt
|
||||
dummy.egg-info/dependency_links.txt
|
||||
dummy.egg-info/top_level.txt"""
|
||||
|
||||
|
||||
class TestSvnDummy(environment.ZippedEnvironment):
|
||||
|
||||
def setUp(self):
|
||||
version = svn_utils.SvnInfo.get_svn_version()
|
||||
if not version: # None or Empty
|
||||
return None
|
||||
|
||||
self.base_version = tuple([int(x) for x in version.split('.')][:2])
|
||||
|
||||
if not self.base_version:
|
||||
raise ValueError('No SVN tools installed')
|
||||
elif self.base_version < (1, 3):
|
||||
raise ValueError('Insufficient SVN Version %s' % version)
|
||||
elif self.base_version >= (1, 9):
|
||||
#trying the latest version
|
||||
self.base_version = (1, 8)
|
||||
|
||||
self.dataname = "dummy%i%i" % self.base_version
|
||||
self.datafile = os.path.join('setuptools', 'tests',
|
||||
'svn_data', self.dataname + ".zip")
|
||||
super(TestSvnDummy, self).setUp()
|
||||
|
||||
@skipIf(not test_svn._svn_check, "No SVN to text, in the first place")
|
||||
def test_sources(self):
|
||||
code, data = environment.run_setup_py(["sdist"],
|
||||
pypath=self.old_cwd,
|
||||
data_stream=1)
|
||||
if code:
|
||||
raise AssertionError(data)
|
||||
|
||||
sources = os.path.join('dummy.egg-info', 'SOURCES.txt')
|
||||
infile = open(sources, 'r')
|
||||
try:
|
||||
read_contents = infile.read()
|
||||
finally:
|
||||
infile.close()
|
||||
del infile
|
||||
|
||||
self.assertEqual(DUMMY_SOURCE_TXT, read_contents)
|
||||
|
||||
return data
|
||||
|
||||
@skipIf(not test_svn._svn_check, "No SVN to text, in the first place")
|
||||
def test_svn_tags(self):
|
||||
code, data = environment.run_setup_py(["egg_info",
|
||||
"--tag-svn-revision"],
|
||||
pypath=self.old_cwd,
|
||||
data_stream=1)
|
||||
if code:
|
||||
raise AssertionError(data)
|
||||
|
||||
pkginfo = os.path.join('dummy.egg-info', 'PKG-INFO')
|
||||
infile = open(pkginfo, 'r')
|
||||
try:
|
||||
read_contents = infile.readlines()
|
||||
finally:
|
||||
infile.close()
|
||||
del infile
|
||||
|
||||
self.assertTrue("Version: 0.1.1-r1\n" in read_contents)
|
||||
|
||||
@skipIf(not test_svn._svn_check, "No SVN to text, in the first place")
|
||||
def test_no_tags(self):
|
||||
code, data = environment.run_setup_py(["egg_info"],
|
||||
pypath=self.old_cwd,
|
||||
data_stream=1)
|
||||
if code:
|
||||
raise AssertionError(data)
|
||||
|
||||
pkginfo = os.path.join('dummy.egg-info', 'PKG-INFO')
|
||||
infile = open(pkginfo, 'r')
|
||||
try:
|
||||
read_contents = infile.readlines()
|
||||
finally:
|
||||
infile.close()
|
||||
del infile
|
||||
|
||||
self.assertTrue("Version: 0.1.1\n" in read_contents)
|
||||
|
||||
|
||||
class TestSvnDummyLegacy(environment.ZippedEnvironment):
|
||||
|
||||
def setUp(self):
|
||||
self.base_version = (1, 6)
|
||||
self.dataname = "dummy%i%i" % self.base_version
|
||||
self.datafile = os.path.join('setuptools', 'tests',
|
||||
'svn_data', self.dataname + ".zip")
|
||||
super(TestSvnDummyLegacy, self).setUp()
|
||||
|
||||
def test_sources(self):
|
||||
code, data = environment.run_setup_py(["sdist"],
|
||||
pypath=self.old_cwd,
|
||||
path="",
|
||||
data_stream=1)
|
||||
if code:
|
||||
raise AssertionError(data)
|
||||
|
||||
sources = os.path.join('dummy.egg-info', 'SOURCES.txt')
|
||||
infile = open(sources, 'r')
|
||||
try:
|
||||
read_contents = infile.read()
|
||||
finally:
|
||||
infile.close()
|
||||
del infile
|
||||
|
||||
self.assertEqual(DUMMY_SOURCE_TXT, read_contents)
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def test_suite():
|
||||
return unittest.defaultTestLoader.loadTestsFromName(__name__)
|
||||
|
|
@ -1,170 +0,0 @@
|
|||
"""Tests for setuptools.find_packages()."""
|
||||
import os
|
||||
import sys
|
||||
import shutil
|
||||
import tempfile
|
||||
import unittest
|
||||
import platform
|
||||
|
||||
import setuptools
|
||||
from setuptools import find_packages
|
||||
from setuptools.tests.py26compat import skipIf
|
||||
|
||||
find_420_packages = setuptools.PEP420PackageFinder.find
|
||||
|
||||
# modeled after CPython's test.support.can_symlink
|
||||
def can_symlink():
|
||||
TESTFN = tempfile.mktemp()
|
||||
symlink_path = TESTFN + "can_symlink"
|
||||
try:
|
||||
os.symlink(TESTFN, symlink_path)
|
||||
can = True
|
||||
except (OSError, NotImplementedError, AttributeError):
|
||||
can = False
|
||||
else:
|
||||
os.remove(symlink_path)
|
||||
globals().update(can_symlink=lambda: can)
|
||||
return can
|
||||
|
||||
def has_symlink():
|
||||
bad_symlink = (
|
||||
# Windows symlink directory detection is broken on Python 3.2
|
||||
platform.system() == 'Windows' and sys.version_info[:2] == (3,2)
|
||||
)
|
||||
return can_symlink() and not bad_symlink
|
||||
|
||||
class TestFindPackages(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.dist_dir = tempfile.mkdtemp()
|
||||
self._make_pkg_structure()
|
||||
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self.dist_dir)
|
||||
|
||||
def _make_pkg_structure(self):
|
||||
"""Make basic package structure.
|
||||
|
||||
dist/
|
||||
docs/
|
||||
conf.py
|
||||
pkg/
|
||||
__pycache__/
|
||||
nspkg/
|
||||
mod.py
|
||||
subpkg/
|
||||
assets/
|
||||
asset
|
||||
__init__.py
|
||||
setup.py
|
||||
|
||||
"""
|
||||
self.docs_dir = self._mkdir('docs', self.dist_dir)
|
||||
self._touch('conf.py', self.docs_dir)
|
||||
self.pkg_dir = self._mkdir('pkg', self.dist_dir)
|
||||
self._mkdir('__pycache__', self.pkg_dir)
|
||||
self.ns_pkg_dir = self._mkdir('nspkg', self.pkg_dir)
|
||||
self._touch('mod.py', self.ns_pkg_dir)
|
||||
self.sub_pkg_dir = self._mkdir('subpkg', self.pkg_dir)
|
||||
self.asset_dir = self._mkdir('assets', self.sub_pkg_dir)
|
||||
self._touch('asset', self.asset_dir)
|
||||
self._touch('__init__.py', self.sub_pkg_dir)
|
||||
self._touch('setup.py', self.dist_dir)
|
||||
|
||||
def _mkdir(self, path, parent_dir=None):
|
||||
if parent_dir:
|
||||
path = os.path.join(parent_dir, path)
|
||||
os.mkdir(path)
|
||||
return path
|
||||
|
||||
def _touch(self, path, dir_=None):
|
||||
if dir_:
|
||||
path = os.path.join(dir_, path)
|
||||
fp = open(path, 'w')
|
||||
fp.close()
|
||||
return path
|
||||
|
||||
def test_regular_package(self):
|
||||
self._touch('__init__.py', self.pkg_dir)
|
||||
packages = find_packages(self.dist_dir)
|
||||
self.assertEqual(packages, ['pkg', 'pkg.subpkg'])
|
||||
|
||||
def test_exclude(self):
|
||||
self._touch('__init__.py', self.pkg_dir)
|
||||
packages = find_packages(self.dist_dir, exclude=('pkg.*',))
|
||||
assert packages == ['pkg']
|
||||
|
||||
def test_include_excludes_other(self):
|
||||
"""
|
||||
If include is specified, other packages should be excluded.
|
||||
"""
|
||||
self._touch('__init__.py', self.pkg_dir)
|
||||
alt_dir = self._mkdir('other_pkg', self.dist_dir)
|
||||
self._touch('__init__.py', alt_dir)
|
||||
packages = find_packages(self.dist_dir, include=['other_pkg'])
|
||||
self.assertEqual(packages, ['other_pkg'])
|
||||
|
||||
def test_dir_with_dot_is_skipped(self):
|
||||
shutil.rmtree(os.path.join(self.dist_dir, 'pkg/subpkg/assets'))
|
||||
data_dir = self._mkdir('some.data', self.pkg_dir)
|
||||
self._touch('__init__.py', data_dir)
|
||||
self._touch('file.dat', data_dir)
|
||||
packages = find_packages(self.dist_dir)
|
||||
self.assertTrue('pkg.some.data' not in packages)
|
||||
|
||||
def test_dir_with_packages_in_subdir_is_excluded(self):
|
||||
"""
|
||||
Ensure that a package in a non-package such as build/pkg/__init__.py
|
||||
is excluded.
|
||||
"""
|
||||
build_dir = self._mkdir('build', self.dist_dir)
|
||||
build_pkg_dir = self._mkdir('pkg', build_dir)
|
||||
self._touch('__init__.py', build_pkg_dir)
|
||||
packages = find_packages(self.dist_dir)
|
||||
self.assertTrue('build.pkg' not in packages)
|
||||
|
||||
@skipIf(not has_symlink(), 'Symlink support required')
|
||||
def test_symlinked_packages_are_included(self):
|
||||
"""
|
||||
A symbolically-linked directory should be treated like any other
|
||||
directory when matched as a package.
|
||||
|
||||
Create a link from lpkg -> pkg.
|
||||
"""
|
||||
self._touch('__init__.py', self.pkg_dir)
|
||||
linked_pkg = os.path.join(self.dist_dir, 'lpkg')
|
||||
os.symlink('pkg', linked_pkg)
|
||||
assert os.path.isdir(linked_pkg)
|
||||
packages = find_packages(self.dist_dir)
|
||||
self.assertTrue('lpkg' in packages)
|
||||
|
||||
def _assert_packages(self, actual, expected):
|
||||
self.assertEqual(set(actual), set(expected))
|
||||
|
||||
def test_pep420_ns_package(self):
|
||||
packages = find_420_packages(
|
||||
self.dist_dir, include=['pkg*'], exclude=['pkg.subpkg.assets'])
|
||||
self._assert_packages(packages, ['pkg', 'pkg.nspkg', 'pkg.subpkg'])
|
||||
|
||||
def test_pep420_ns_package_no_includes(self):
|
||||
packages = find_420_packages(
|
||||
self.dist_dir, exclude=['pkg.subpkg.assets'])
|
||||
self._assert_packages(packages, ['docs', 'pkg', 'pkg.nspkg', 'pkg.subpkg'])
|
||||
|
||||
def test_pep420_ns_package_no_includes_or_excludes(self):
|
||||
packages = find_420_packages(self.dist_dir)
|
||||
expected = [
|
||||
'docs', 'pkg', 'pkg.nspkg', 'pkg.subpkg', 'pkg.subpkg.assets']
|
||||
self._assert_packages(packages, expected)
|
||||
|
||||
def test_regular_package_with_nested_pep420_ns_packages(self):
|
||||
self._touch('__init__.py', self.pkg_dir)
|
||||
packages = find_420_packages(
|
||||
self.dist_dir, exclude=['docs', 'pkg.subpkg.assets'])
|
||||
self._assert_packages(packages, ['pkg', 'pkg.nspkg', 'pkg.subpkg'])
|
||||
|
||||
def test_pep420_ns_package_no_non_package_dirs(self):
|
||||
shutil.rmtree(self.docs_dir)
|
||||
shutil.rmtree(os.path.join(self.dist_dir, 'pkg/subpkg/assets'))
|
||||
packages = find_420_packages(self.dist_dir)
|
||||
self._assert_packages(packages, ['pkg', 'pkg.nspkg', 'pkg.subpkg'])
|
||||
|
|
@ -1,83 +0,0 @@
|
|||
"""Run some integration tests.
|
||||
|
||||
Try to install a few packages.
|
||||
"""
|
||||
|
||||
import glob
|
||||
import os
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
from setuptools.command.easy_install import easy_install
|
||||
from setuptools.command import easy_install as easy_install_pkg
|
||||
from setuptools.dist import Distribution
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def install_context(request, tmpdir, monkeypatch):
|
||||
"""Fixture to set up temporary installation directory.
|
||||
"""
|
||||
# Save old values so we can restore them.
|
||||
new_cwd = tmpdir.mkdir('cwd')
|
||||
user_base = tmpdir.mkdir('user_base')
|
||||
user_site = tmpdir.mkdir('user_site')
|
||||
install_dir = tmpdir.mkdir('install_dir')
|
||||
|
||||
def fin():
|
||||
# undo the monkeypatch, particularly needed under
|
||||
# windows because of kept handle on cwd
|
||||
monkeypatch.undo()
|
||||
new_cwd.remove()
|
||||
user_base.remove()
|
||||
user_site.remove()
|
||||
install_dir.remove()
|
||||
request.addfinalizer(fin)
|
||||
|
||||
# Change the environment and site settings to control where the
|
||||
# files are installed and ensure we do not overwrite anything.
|
||||
monkeypatch.chdir(new_cwd)
|
||||
monkeypatch.setattr(easy_install_pkg, '__file__', user_site.strpath)
|
||||
monkeypatch.setattr('site.USER_BASE', user_base.strpath)
|
||||
monkeypatch.setattr('site.USER_SITE', user_site.strpath)
|
||||
monkeypatch.setattr('sys.path', sys.path + [install_dir.strpath])
|
||||
monkeypatch.setenv('PYTHONPATH', os.path.pathsep.join(sys.path))
|
||||
|
||||
# Set up the command for performing the installation.
|
||||
dist = Distribution()
|
||||
cmd = easy_install(dist)
|
||||
cmd.install_dir = install_dir.strpath
|
||||
return cmd
|
||||
|
||||
|
||||
def _install_one(requirement, cmd, pkgname, modulename):
|
||||
cmd.args = [requirement]
|
||||
cmd.ensure_finalized()
|
||||
cmd.run()
|
||||
target = cmd.install_dir
|
||||
dest_path = glob.glob(os.path.join(target, pkgname + '*.egg'))
|
||||
assert dest_path
|
||||
assert os.path.exists(os.path.join(dest_path[0], pkgname, modulename))
|
||||
|
||||
|
||||
def test_stevedore(install_context):
|
||||
_install_one('stevedore', install_context,
|
||||
'stevedore', 'extension.py')
|
||||
|
||||
|
||||
@pytest.mark.xfail
|
||||
def test_virtualenvwrapper(install_context):
|
||||
_install_one('virtualenvwrapper', install_context,
|
||||
'virtualenvwrapper', 'hook_loader.py')
|
||||
|
||||
|
||||
@pytest.mark.xfail
|
||||
def test_pbr(install_context):
|
||||
_install_one('pbr', install_context,
|
||||
'pbr', 'core.py')
|
||||
|
||||
|
||||
@pytest.mark.xfail
|
||||
def test_python_novaclient(install_context):
|
||||
_install_one('python-novaclient', install_context,
|
||||
'novaclient', 'base.py')
|
||||
|
|
@ -1,68 +0,0 @@
|
|||
import os
|
||||
import unittest
|
||||
from setuptools.tests.py26compat import skipIf
|
||||
|
||||
try:
|
||||
import ast
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
class TestMarkerlib(unittest.TestCase):
|
||||
|
||||
@skipIf('ast' not in globals(),
|
||||
"ast not available (Python < 2.6?)")
|
||||
def test_markers(self):
|
||||
from _markerlib import interpret, default_environment, compile
|
||||
|
||||
os_name = os.name
|
||||
|
||||
self.assertTrue(interpret(""))
|
||||
|
||||
self.assertTrue(interpret("os.name != 'buuuu'"))
|
||||
self.assertTrue(interpret("os_name != 'buuuu'"))
|
||||
self.assertTrue(interpret("python_version > '1.0'"))
|
||||
self.assertTrue(interpret("python_version < '5.0'"))
|
||||
self.assertTrue(interpret("python_version <= '5.0'"))
|
||||
self.assertTrue(interpret("python_version >= '1.0'"))
|
||||
self.assertTrue(interpret("'%s' in os.name" % os_name))
|
||||
self.assertTrue(interpret("'%s' in os_name" % os_name))
|
||||
self.assertTrue(interpret("'buuuu' not in os.name"))
|
||||
|
||||
self.assertFalse(interpret("os.name == 'buuuu'"))
|
||||
self.assertFalse(interpret("os_name == 'buuuu'"))
|
||||
self.assertFalse(interpret("python_version < '1.0'"))
|
||||
self.assertFalse(interpret("python_version > '5.0'"))
|
||||
self.assertFalse(interpret("python_version >= '5.0'"))
|
||||
self.assertFalse(interpret("python_version <= '1.0'"))
|
||||
self.assertFalse(interpret("'%s' not in os.name" % os_name))
|
||||
self.assertFalse(interpret("'buuuu' in os.name and python_version >= '5.0'"))
|
||||
self.assertFalse(interpret("'buuuu' in os_name and python_version >= '5.0'"))
|
||||
|
||||
environment = default_environment()
|
||||
environment['extra'] = 'test'
|
||||
self.assertTrue(interpret("extra == 'test'", environment))
|
||||
self.assertFalse(interpret("extra == 'doc'", environment))
|
||||
|
||||
def raises_nameError():
|
||||
try:
|
||||
interpret("python.version == '42'")
|
||||
except NameError:
|
||||
pass
|
||||
else:
|
||||
raise Exception("Expected NameError")
|
||||
|
||||
raises_nameError()
|
||||
|
||||
def raises_syntaxError():
|
||||
try:
|
||||
interpret("(x for x in (4,))")
|
||||
except SyntaxError:
|
||||
pass
|
||||
else:
|
||||
raise Exception("Expected SyntaxError")
|
||||
|
||||
raises_syntaxError()
|
||||
|
||||
statement = "python_version == '5'"
|
||||
self.assertEqual(compile(statement).__doc__, statement)
|
||||
|
||||
|
|
@ -1,157 +0,0 @@
|
|||
"""msvc9compiler monkey patch test
|
||||
|
||||
This test ensures that importing setuptools is sufficient to replace
|
||||
the standard find_vcvarsall function with our patched version that
|
||||
finds the Visual C++ for Python package.
|
||||
"""
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
import unittest
|
||||
import distutils.errors
|
||||
import contextlib
|
||||
|
||||
# importing only setuptools should apply the patch
|
||||
__import__('setuptools')
|
||||
|
||||
class MockReg:
|
||||
"""Mock for distutils.msvc9compiler.Reg. We patch it
|
||||
with an instance of this class that mocks out the
|
||||
functions that access the registry.
|
||||
"""
|
||||
|
||||
def __init__(self, hkey_local_machine={}, hkey_current_user={}):
|
||||
self.hklm = hkey_local_machine
|
||||
self.hkcu = hkey_current_user
|
||||
|
||||
def __enter__(self):
|
||||
self.original_read_keys = distutils.msvc9compiler.Reg.read_keys
|
||||
self.original_read_values = distutils.msvc9compiler.Reg.read_values
|
||||
|
||||
_winreg = getattr(distutils.msvc9compiler, '_winreg', None)
|
||||
winreg = getattr(distutils.msvc9compiler, 'winreg', _winreg)
|
||||
|
||||
hives = {
|
||||
winreg.HKEY_CURRENT_USER: self.hkcu,
|
||||
winreg.HKEY_LOCAL_MACHINE: self.hklm,
|
||||
}
|
||||
|
||||
def read_keys(cls, base, key):
|
||||
"""Return list of registry keys."""
|
||||
hive = hives.get(base, {})
|
||||
return [k.rpartition('\\')[2]
|
||||
for k in hive if k.startswith(key.lower())]
|
||||
|
||||
def read_values(cls, base, key):
|
||||
"""Return dict of registry keys and values."""
|
||||
hive = hives.get(base, {})
|
||||
return dict((k.rpartition('\\')[2], hive[k])
|
||||
for k in hive if k.startswith(key.lower()))
|
||||
|
||||
distutils.msvc9compiler.Reg.read_keys = classmethod(read_keys)
|
||||
distutils.msvc9compiler.Reg.read_values = classmethod(read_values)
|
||||
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_value, exc_tb):
|
||||
distutils.msvc9compiler.Reg.read_keys = self.original_read_keys
|
||||
distutils.msvc9compiler.Reg.read_values = self.original_read_values
|
||||
|
||||
@contextlib.contextmanager
|
||||
def patch_env(**replacements):
|
||||
"""
|
||||
In a context, patch the environment with replacements. Pass None values
|
||||
to clear the values.
|
||||
"""
|
||||
saved = dict(
|
||||
(key, os.environ['key'])
|
||||
for key in replacements
|
||||
if key in os.environ
|
||||
)
|
||||
|
||||
# remove values that are null
|
||||
remove = (key for (key, value) in replacements.items() if value is None)
|
||||
for key in list(remove):
|
||||
os.environ.pop(key, None)
|
||||
replacements.pop(key)
|
||||
|
||||
os.environ.update(replacements)
|
||||
|
||||
try:
|
||||
yield saved
|
||||
finally:
|
||||
for key in replacements:
|
||||
os.environ.pop(key, None)
|
||||
os.environ.update(saved)
|
||||
|
||||
class TestMSVC9Compiler(unittest.TestCase):
|
||||
|
||||
def test_find_vcvarsall_patch(self):
|
||||
if not hasattr(distutils, 'msvc9compiler'):
|
||||
# skip
|
||||
return
|
||||
|
||||
self.assertEqual(
|
||||
"setuptools.msvc9_support",
|
||||
distutils.msvc9compiler.find_vcvarsall.__module__,
|
||||
"find_vcvarsall was not patched"
|
||||
)
|
||||
|
||||
find_vcvarsall = distutils.msvc9compiler.find_vcvarsall
|
||||
query_vcvarsall = distutils.msvc9compiler.query_vcvarsall
|
||||
|
||||
# No registry entries or environment variable means we should
|
||||
# not find anything
|
||||
with patch_env(VS90COMNTOOLS=None):
|
||||
with MockReg():
|
||||
self.assertIsNone(find_vcvarsall(9.0))
|
||||
|
||||
try:
|
||||
query_vcvarsall(9.0)
|
||||
self.fail('Expected DistutilsPlatformError from query_vcvarsall()')
|
||||
except distutils.errors.DistutilsPlatformError:
|
||||
exc_message = str(sys.exc_info()[1])
|
||||
self.assertIn('aka.ms/vcpython27', exc_message)
|
||||
|
||||
key_32 = r'software\microsoft\devdiv\vcforpython\9.0\installdir'
|
||||
key_64 = r'software\wow6432node\microsoft\devdiv\vcforpython\9.0\installdir'
|
||||
|
||||
# Make two mock files so we can tell whether HCKU entries are
|
||||
# preferred to HKLM entries.
|
||||
mock_installdir_1 = tempfile.mkdtemp()
|
||||
mock_vcvarsall_bat_1 = os.path.join(mock_installdir_1, 'vcvarsall.bat')
|
||||
open(mock_vcvarsall_bat_1, 'w').close()
|
||||
mock_installdir_2 = tempfile.mkdtemp()
|
||||
mock_vcvarsall_bat_2 = os.path.join(mock_installdir_2, 'vcvarsall.bat')
|
||||
open(mock_vcvarsall_bat_2, 'w').close()
|
||||
try:
|
||||
# Ensure we get the current user's setting first
|
||||
with MockReg(
|
||||
hkey_current_user={key_32: mock_installdir_1},
|
||||
hkey_local_machine={
|
||||
key_32: mock_installdir_2,
|
||||
key_64: mock_installdir_2,
|
||||
}
|
||||
):
|
||||
self.assertEqual(mock_vcvarsall_bat_1, find_vcvarsall(9.0))
|
||||
|
||||
# Ensure we get the local machine value if it's there
|
||||
with MockReg(hkey_local_machine={key_32: mock_installdir_2}):
|
||||
self.assertEqual(mock_vcvarsall_bat_2, find_vcvarsall(9.0))
|
||||
|
||||
# Ensure we prefer the 64-bit local machine key
|
||||
# (*not* the Wow6432Node key)
|
||||
with MockReg(
|
||||
hkey_local_machine={
|
||||
# This *should* only exist on 32-bit machines
|
||||
key_32: mock_installdir_1,
|
||||
# This *should* only exist on 64-bit machines
|
||||
key_64: mock_installdir_2,
|
||||
}
|
||||
):
|
||||
self.assertEqual(mock_vcvarsall_bat_1, find_vcvarsall(9.0))
|
||||
finally:
|
||||
shutil.rmtree(mock_installdir_1)
|
||||
shutil.rmtree(mock_installdir_2)
|
||||
|
|
@ -1,203 +0,0 @@
|
|||
"""Package Index Tests
|
||||
"""
|
||||
import sys
|
||||
import os
|
||||
import unittest
|
||||
import pkg_resources
|
||||
from setuptools.compat import urllib2, httplib, HTTPError, unicode, pathname2url
|
||||
import distutils.errors
|
||||
import setuptools.package_index
|
||||
from setuptools.tests.server import IndexServer
|
||||
|
||||
class TestPackageIndex(unittest.TestCase):
|
||||
|
||||
def test_bad_url_bad_port(self):
|
||||
index = setuptools.package_index.PackageIndex()
|
||||
url = 'http://127.0.0.1:0/nonesuch/test_package_index'
|
||||
try:
|
||||
v = index.open_url(url)
|
||||
except Exception:
|
||||
v = sys.exc_info()[1]
|
||||
self.assertTrue(url in str(v))
|
||||
else:
|
||||
self.assertTrue(isinstance(v, HTTPError))
|
||||
|
||||
def test_bad_url_typo(self):
|
||||
# issue 16
|
||||
# easy_install inquant.contentmirror.plone breaks because of a typo
|
||||
# in its home URL
|
||||
index = setuptools.package_index.PackageIndex(
|
||||
hosts=('www.example.com',)
|
||||
)
|
||||
|
||||
url = 'url:%20https://svn.plone.org/svn/collective/inquant.contentmirror.plone/trunk'
|
||||
try:
|
||||
v = index.open_url(url)
|
||||
except Exception:
|
||||
v = sys.exc_info()[1]
|
||||
self.assertTrue(url in str(v))
|
||||
else:
|
||||
self.assertTrue(isinstance(v, HTTPError))
|
||||
|
||||
def test_bad_url_bad_status_line(self):
|
||||
index = setuptools.package_index.PackageIndex(
|
||||
hosts=('www.example.com',)
|
||||
)
|
||||
|
||||
def _urlopen(*args):
|
||||
raise httplib.BadStatusLine('line')
|
||||
|
||||
index.opener = _urlopen
|
||||
url = 'http://example.com'
|
||||
try:
|
||||
v = index.open_url(url)
|
||||
except Exception:
|
||||
v = sys.exc_info()[1]
|
||||
self.assertTrue('line' in str(v))
|
||||
else:
|
||||
raise AssertionError('Should have raise here!')
|
||||
|
||||
def test_bad_url_double_scheme(self):
|
||||
"""
|
||||
A bad URL with a double scheme should raise a DistutilsError.
|
||||
"""
|
||||
index = setuptools.package_index.PackageIndex(
|
||||
hosts=('www.example.com',)
|
||||
)
|
||||
|
||||
# issue 20
|
||||
url = 'http://http://svn.pythonpaste.org/Paste/wphp/trunk'
|
||||
try:
|
||||
index.open_url(url)
|
||||
except distutils.errors.DistutilsError:
|
||||
error = sys.exc_info()[1]
|
||||
msg = unicode(error)
|
||||
assert 'nonnumeric port' in msg or 'getaddrinfo failed' in msg or 'Name or service not known' in msg
|
||||
return
|
||||
raise RuntimeError("Did not raise")
|
||||
|
||||
def test_bad_url_screwy_href(self):
|
||||
index = setuptools.package_index.PackageIndex(
|
||||
hosts=('www.example.com',)
|
||||
)
|
||||
|
||||
# issue #160
|
||||
if sys.version_info[0] == 2 and sys.version_info[1] == 7:
|
||||
# this should not fail
|
||||
url = 'http://example.com'
|
||||
page = ('<a href="http://www.famfamfam.com]('
|
||||
'http://www.famfamfam.com/">')
|
||||
index.process_index(url, page)
|
||||
|
||||
def test_url_ok(self):
|
||||
index = setuptools.package_index.PackageIndex(
|
||||
hosts=('www.example.com',)
|
||||
)
|
||||
url = 'file:///tmp/test_package_index'
|
||||
self.assertTrue(index.url_ok(url, True))
|
||||
|
||||
def test_links_priority(self):
|
||||
"""
|
||||
Download links from the pypi simple index should be used before
|
||||
external download links.
|
||||
https://bitbucket.org/tarek/distribute/issue/163
|
||||
|
||||
Usecase :
|
||||
- someone uploads a package on pypi, a md5 is generated
|
||||
- someone manually copies this link (with the md5 in the url) onto an
|
||||
external page accessible from the package page.
|
||||
- someone reuploads the package (with a different md5)
|
||||
- while easy_installing, an MD5 error occurs because the external link
|
||||
is used
|
||||
-> Setuptools should use the link from pypi, not the external one.
|
||||
"""
|
||||
if sys.platform.startswith('java'):
|
||||
# Skip this test on jython because binding to :0 fails
|
||||
return
|
||||
|
||||
# start an index server
|
||||
server = IndexServer()
|
||||
server.start()
|
||||
index_url = server.base_url() + 'test_links_priority/simple/'
|
||||
|
||||
# scan a test index
|
||||
pi = setuptools.package_index.PackageIndex(index_url)
|
||||
requirement = pkg_resources.Requirement.parse('foobar')
|
||||
pi.find_packages(requirement)
|
||||
server.stop()
|
||||
|
||||
# the distribution has been found
|
||||
self.assertTrue('foobar' in pi)
|
||||
# we have only one link, because links are compared without md5
|
||||
self.assertTrue(len(pi['foobar'])==1)
|
||||
# the link should be from the index
|
||||
self.assertTrue('correct_md5' in pi['foobar'][0].location)
|
||||
|
||||
def test_parse_bdist_wininst(self):
|
||||
self.assertEqual(setuptools.package_index.parse_bdist_wininst(
|
||||
'reportlab-2.5.win32-py2.4.exe'), ('reportlab-2.5', '2.4', 'win32'))
|
||||
self.assertEqual(setuptools.package_index.parse_bdist_wininst(
|
||||
'reportlab-2.5.win32.exe'), ('reportlab-2.5', None, 'win32'))
|
||||
self.assertEqual(setuptools.package_index.parse_bdist_wininst(
|
||||
'reportlab-2.5.win-amd64-py2.7.exe'), ('reportlab-2.5', '2.7', 'win-amd64'))
|
||||
self.assertEqual(setuptools.package_index.parse_bdist_wininst(
|
||||
'reportlab-2.5.win-amd64.exe'), ('reportlab-2.5', None, 'win-amd64'))
|
||||
|
||||
def test__vcs_split_rev_from_url(self):
|
||||
"""
|
||||
Test the basic usage of _vcs_split_rev_from_url
|
||||
"""
|
||||
vsrfu = setuptools.package_index.PackageIndex._vcs_split_rev_from_url
|
||||
url, rev = vsrfu('https://example.com/bar@2995')
|
||||
self.assertEqual(url, 'https://example.com/bar')
|
||||
self.assertEqual(rev, '2995')
|
||||
|
||||
def test_local_index(self):
|
||||
"""
|
||||
local_open should be able to read an index from the file system.
|
||||
"""
|
||||
f = open('index.html', 'w')
|
||||
f.write('<div>content</div>')
|
||||
f.close()
|
||||
try:
|
||||
url = 'file:' + pathname2url(os.getcwd()) + '/'
|
||||
res = setuptools.package_index.local_open(url)
|
||||
finally:
|
||||
os.remove('index.html')
|
||||
assert 'content' in res.read()
|
||||
|
||||
|
||||
class TestContentCheckers(unittest.TestCase):
|
||||
|
||||
def test_md5(self):
|
||||
checker = setuptools.package_index.HashChecker.from_url(
|
||||
'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478')
|
||||
checker.feed('You should probably not be using MD5'.encode('ascii'))
|
||||
self.assertEqual(checker.hash.hexdigest(),
|
||||
'f12895fdffbd45007040d2e44df98478')
|
||||
self.assertTrue(checker.is_valid())
|
||||
|
||||
def test_other_fragment(self):
|
||||
"Content checks should succeed silently if no hash is present"
|
||||
checker = setuptools.package_index.HashChecker.from_url(
|
||||
'http://foo/bar#something%20completely%20different')
|
||||
checker.feed('anything'.encode('ascii'))
|
||||
self.assertTrue(checker.is_valid())
|
||||
|
||||
def test_blank_md5(self):
|
||||
"Content checks should succeed if a hash is empty"
|
||||
checker = setuptools.package_index.HashChecker.from_url(
|
||||
'http://foo/bar#md5=')
|
||||
checker.feed('anything'.encode('ascii'))
|
||||
self.assertTrue(checker.is_valid())
|
||||
|
||||
def test_get_hash_name_md5(self):
|
||||
checker = setuptools.package_index.HashChecker.from_url(
|
||||
'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478')
|
||||
self.assertEqual(checker.hash_name, 'md5')
|
||||
|
||||
def test_report(self):
|
||||
checker = setuptools.package_index.HashChecker.from_url(
|
||||
'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478')
|
||||
rep = checker.report(lambda x: x, 'My message about %s')
|
||||
self.assertEqual(rep, 'My message about md5')
|
||||
|
|
@ -1,612 +0,0 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
# NOTE: the shebang and encoding lines are for ScriptHeaderTests do not remove
|
||||
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
import shutil
|
||||
from unittest import TestCase
|
||||
|
||||
import pkg_resources
|
||||
from pkg_resources import (parse_requirements, VersionConflict, parse_version,
|
||||
Distribution, EntryPoint, Requirement, safe_version, safe_name,
|
||||
WorkingSet)
|
||||
|
||||
from setuptools.command.easy_install import (get_script_header, is_sh,
|
||||
nt_quote_arg)
|
||||
from setuptools.compat import StringIO, iteritems, PY3
|
||||
from .py26compat import skipIf
|
||||
|
||||
def safe_repr(obj, short=False):
|
||||
""" copied from Python2.7"""
|
||||
try:
|
||||
result = repr(obj)
|
||||
except Exception:
|
||||
result = object.__repr__(obj)
|
||||
if not short or len(result) < pkg_resources._MAX_LENGTH:
|
||||
return result
|
||||
return result[:pkg_resources._MAX_LENGTH] + ' [truncated]...'
|
||||
|
||||
class Metadata(pkg_resources.EmptyProvider):
|
||||
"""Mock object to return metadata as if from an on-disk distribution"""
|
||||
|
||||
def __init__(self,*pairs):
|
||||
self.metadata = dict(pairs)
|
||||
|
||||
def has_metadata(self,name):
|
||||
return name in self.metadata
|
||||
|
||||
def get_metadata(self,name):
|
||||
return self.metadata[name]
|
||||
|
||||
def get_metadata_lines(self,name):
|
||||
return pkg_resources.yield_lines(self.get_metadata(name))
|
||||
|
||||
dist_from_fn = pkg_resources.Distribution.from_filename
|
||||
|
||||
class DistroTests(TestCase):
|
||||
|
||||
def testCollection(self):
|
||||
# empty path should produce no distributions
|
||||
ad = pkg_resources.Environment([], platform=None, python=None)
|
||||
self.assertEqual(list(ad), [])
|
||||
self.assertEqual(ad['FooPkg'],[])
|
||||
ad.add(dist_from_fn("FooPkg-1.3_1.egg"))
|
||||
ad.add(dist_from_fn("FooPkg-1.4-py2.4-win32.egg"))
|
||||
ad.add(dist_from_fn("FooPkg-1.2-py2.4.egg"))
|
||||
|
||||
# Name is in there now
|
||||
self.assertTrue(ad['FooPkg'])
|
||||
# But only 1 package
|
||||
self.assertEqual(list(ad), ['foopkg'])
|
||||
|
||||
# Distributions sort by version
|
||||
self.assertEqual(
|
||||
[dist.version for dist in ad['FooPkg']], ['1.4','1.3-1','1.2']
|
||||
)
|
||||
# Removing a distribution leaves sequence alone
|
||||
ad.remove(ad['FooPkg'][1])
|
||||
self.assertEqual(
|
||||
[dist.version for dist in ad['FooPkg']], ['1.4','1.2']
|
||||
)
|
||||
# And inserting adds them in order
|
||||
ad.add(dist_from_fn("FooPkg-1.9.egg"))
|
||||
self.assertEqual(
|
||||
[dist.version for dist in ad['FooPkg']], ['1.9','1.4','1.2']
|
||||
)
|
||||
|
||||
ws = WorkingSet([])
|
||||
foo12 = dist_from_fn("FooPkg-1.2-py2.4.egg")
|
||||
foo14 = dist_from_fn("FooPkg-1.4-py2.4-win32.egg")
|
||||
req, = parse_requirements("FooPkg>=1.3")
|
||||
|
||||
# Nominal case: no distros on path, should yield all applicable
|
||||
self.assertEqual(ad.best_match(req,ws).version, '1.9')
|
||||
# If a matching distro is already installed, should return only that
|
||||
ws.add(foo14)
|
||||
self.assertEqual(ad.best_match(req,ws).version, '1.4')
|
||||
|
||||
# If the first matching distro is unsuitable, it's a version conflict
|
||||
ws = WorkingSet([])
|
||||
ws.add(foo12)
|
||||
ws.add(foo14)
|
||||
self.assertRaises(VersionConflict, ad.best_match, req, ws)
|
||||
|
||||
# If more than one match on the path, the first one takes precedence
|
||||
ws = WorkingSet([])
|
||||
ws.add(foo14)
|
||||
ws.add(foo12)
|
||||
ws.add(foo14)
|
||||
self.assertEqual(ad.best_match(req,ws).version, '1.4')
|
||||
|
||||
def checkFooPkg(self,d):
|
||||
self.assertEqual(d.project_name, "FooPkg")
|
||||
self.assertEqual(d.key, "foopkg")
|
||||
self.assertEqual(d.version, "1.3-1")
|
||||
self.assertEqual(d.py_version, "2.4")
|
||||
self.assertEqual(d.platform, "win32")
|
||||
self.assertEqual(d.parsed_version, parse_version("1.3-1"))
|
||||
|
||||
def testDistroBasics(self):
|
||||
d = Distribution(
|
||||
"/some/path",
|
||||
project_name="FooPkg",version="1.3-1",py_version="2.4",platform="win32"
|
||||
)
|
||||
self.checkFooPkg(d)
|
||||
|
||||
d = Distribution("/some/path")
|
||||
self.assertEqual(d.py_version, sys.version[:3])
|
||||
self.assertEqual(d.platform, None)
|
||||
|
||||
def testDistroParse(self):
|
||||
d = dist_from_fn("FooPkg-1.3_1-py2.4-win32.egg")
|
||||
self.checkFooPkg(d)
|
||||
d = dist_from_fn("FooPkg-1.3_1-py2.4-win32.egg-info")
|
||||
self.checkFooPkg(d)
|
||||
|
||||
def testDistroMetadata(self):
|
||||
d = Distribution(
|
||||
"/some/path", project_name="FooPkg", py_version="2.4", platform="win32",
|
||||
metadata = Metadata(
|
||||
('PKG-INFO',"Metadata-Version: 1.0\nVersion: 1.3-1\n")
|
||||
)
|
||||
)
|
||||
self.checkFooPkg(d)
|
||||
|
||||
def distRequires(self, txt):
|
||||
return Distribution("/foo", metadata=Metadata(('depends.txt', txt)))
|
||||
|
||||
def checkRequires(self, dist, txt, extras=()):
|
||||
self.assertEqual(
|
||||
list(dist.requires(extras)),
|
||||
list(parse_requirements(txt))
|
||||
)
|
||||
|
||||
def testDistroDependsSimple(self):
|
||||
for v in "Twisted>=1.5", "Twisted>=1.5\nZConfig>=2.0":
|
||||
self.checkRequires(self.distRequires(v), v)
|
||||
|
||||
def testResolve(self):
|
||||
ad = pkg_resources.Environment([])
|
||||
ws = WorkingSet([])
|
||||
# Resolving no requirements -> nothing to install
|
||||
self.assertEqual(list(ws.resolve([],ad)), [])
|
||||
# Request something not in the collection -> DistributionNotFound
|
||||
self.assertRaises(
|
||||
pkg_resources.DistributionNotFound, ws.resolve, parse_requirements("Foo"), ad
|
||||
)
|
||||
Foo = Distribution.from_filename(
|
||||
"/foo_dir/Foo-1.2.egg",
|
||||
metadata=Metadata(('depends.txt', "[bar]\nBaz>=2.0"))
|
||||
)
|
||||
ad.add(Foo)
|
||||
ad.add(Distribution.from_filename("Foo-0.9.egg"))
|
||||
|
||||
# Request thing(s) that are available -> list to activate
|
||||
for i in range(3):
|
||||
targets = list(ws.resolve(parse_requirements("Foo"), ad))
|
||||
self.assertEqual(targets, [Foo])
|
||||
list(map(ws.add,targets))
|
||||
self.assertRaises(VersionConflict, ws.resolve,
|
||||
parse_requirements("Foo==0.9"), ad)
|
||||
ws = WorkingSet([]) # reset
|
||||
|
||||
# Request an extra that causes an unresolved dependency for "Baz"
|
||||
self.assertRaises(
|
||||
pkg_resources.DistributionNotFound, ws.resolve,parse_requirements("Foo[bar]"), ad
|
||||
)
|
||||
Baz = Distribution.from_filename(
|
||||
"/foo_dir/Baz-2.1.egg", metadata=Metadata(('depends.txt', "Foo"))
|
||||
)
|
||||
ad.add(Baz)
|
||||
|
||||
# Activation list now includes resolved dependency
|
||||
self.assertEqual(
|
||||
list(ws.resolve(parse_requirements("Foo[bar]"), ad)), [Foo,Baz]
|
||||
)
|
||||
# Requests for conflicting versions produce VersionConflict
|
||||
self.assertRaises(VersionConflict,
|
||||
ws.resolve, parse_requirements("Foo==1.2\nFoo!=1.2"), ad)
|
||||
|
||||
def testDistroDependsOptions(self):
|
||||
d = self.distRequires("""
|
||||
Twisted>=1.5
|
||||
[docgen]
|
||||
ZConfig>=2.0
|
||||
docutils>=0.3
|
||||
[fastcgi]
|
||||
fcgiapp>=0.1""")
|
||||
self.checkRequires(d,"Twisted>=1.5")
|
||||
self.checkRequires(
|
||||
d,"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3".split(), ["docgen"]
|
||||
)
|
||||
self.checkRequires(
|
||||
d,"Twisted>=1.5 fcgiapp>=0.1".split(), ["fastcgi"]
|
||||
)
|
||||
self.checkRequires(
|
||||
d,"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3 fcgiapp>=0.1".split(),
|
||||
["docgen","fastcgi"]
|
||||
)
|
||||
self.checkRequires(
|
||||
d,"Twisted>=1.5 fcgiapp>=0.1 ZConfig>=2.0 docutils>=0.3".split(),
|
||||
["fastcgi", "docgen"]
|
||||
)
|
||||
self.assertRaises(pkg_resources.UnknownExtra, d.requires, ["foo"])
|
||||
|
||||
|
||||
class EntryPointTests(TestCase):
|
||||
|
||||
def assertfields(self, ep):
|
||||
self.assertEqual(ep.name,"foo")
|
||||
self.assertEqual(ep.module_name,"setuptools.tests.test_resources")
|
||||
self.assertEqual(ep.attrs, ("EntryPointTests",))
|
||||
self.assertEqual(ep.extras, ("x",))
|
||||
self.assertTrue(ep.load() is EntryPointTests)
|
||||
self.assertEqual(
|
||||
str(ep),
|
||||
"foo = setuptools.tests.test_resources:EntryPointTests [x]"
|
||||
)
|
||||
|
||||
def setUp(self):
|
||||
self.dist = Distribution.from_filename(
|
||||
"FooPkg-1.2-py2.4.egg", metadata=Metadata(('requires.txt','[x]')))
|
||||
|
||||
def testBasics(self):
|
||||
ep = EntryPoint(
|
||||
"foo", "setuptools.tests.test_resources", ["EntryPointTests"],
|
||||
["x"], self.dist
|
||||
)
|
||||
self.assertfields(ep)
|
||||
|
||||
def testParse(self):
|
||||
s = "foo = setuptools.tests.test_resources:EntryPointTests [x]"
|
||||
ep = EntryPoint.parse(s, self.dist)
|
||||
self.assertfields(ep)
|
||||
|
||||
ep = EntryPoint.parse("bar baz= spammity[PING]")
|
||||
self.assertEqual(ep.name,"bar baz")
|
||||
self.assertEqual(ep.module_name,"spammity")
|
||||
self.assertEqual(ep.attrs, ())
|
||||
self.assertEqual(ep.extras, ("ping",))
|
||||
|
||||
ep = EntryPoint.parse(" fizzly = wocka:foo")
|
||||
self.assertEqual(ep.name,"fizzly")
|
||||
self.assertEqual(ep.module_name,"wocka")
|
||||
self.assertEqual(ep.attrs, ("foo",))
|
||||
self.assertEqual(ep.extras, ())
|
||||
|
||||
def testRejects(self):
|
||||
for ep in [
|
||||
"foo", "x=1=2", "x=a:b:c", "q=x/na", "fez=pish:tush-z", "x=f[a]>2",
|
||||
]:
|
||||
try: EntryPoint.parse(ep)
|
||||
except ValueError: pass
|
||||
else: raise AssertionError("Should've been bad", ep)
|
||||
|
||||
def checkSubMap(self, m):
|
||||
self.assertEqual(len(m), len(self.submap_expect))
|
||||
for key, ep in iteritems(self.submap_expect):
|
||||
self.assertEqual(repr(m.get(key)), repr(ep))
|
||||
|
||||
submap_expect = dict(
|
||||
feature1=EntryPoint('feature1', 'somemodule', ['somefunction']),
|
||||
feature2=EntryPoint('feature2', 'another.module', ['SomeClass'], ['extra1','extra2']),
|
||||
feature3=EntryPoint('feature3', 'this.module', extras=['something'])
|
||||
)
|
||||
submap_str = """
|
||||
# define features for blah blah
|
||||
feature1 = somemodule:somefunction
|
||||
feature2 = another.module:SomeClass [extra1,extra2]
|
||||
feature3 = this.module [something]
|
||||
"""
|
||||
|
||||
def testParseList(self):
|
||||
self.checkSubMap(EntryPoint.parse_group("xyz", self.submap_str))
|
||||
self.assertRaises(ValueError, EntryPoint.parse_group, "x a", "foo=bar")
|
||||
self.assertRaises(ValueError, EntryPoint.parse_group, "x",
|
||||
["foo=baz", "foo=bar"])
|
||||
|
||||
def testParseMap(self):
|
||||
m = EntryPoint.parse_map({'xyz':self.submap_str})
|
||||
self.checkSubMap(m['xyz'])
|
||||
self.assertEqual(list(m.keys()),['xyz'])
|
||||
m = EntryPoint.parse_map("[xyz]\n"+self.submap_str)
|
||||
self.checkSubMap(m['xyz'])
|
||||
self.assertEqual(list(m.keys()),['xyz'])
|
||||
self.assertRaises(ValueError, EntryPoint.parse_map, ["[xyz]", "[xyz]"])
|
||||
self.assertRaises(ValueError, EntryPoint.parse_map, self.submap_str)
|
||||
|
||||
class RequirementsTests(TestCase):
|
||||
|
||||
def testBasics(self):
|
||||
r = Requirement.parse("Twisted>=1.2")
|
||||
self.assertEqual(str(r),"Twisted>=1.2")
|
||||
self.assertEqual(repr(r),"Requirement.parse('Twisted>=1.2')")
|
||||
self.assertEqual(r, Requirement("Twisted", [('>=','1.2')], ()))
|
||||
self.assertEqual(r, Requirement("twisTed", [('>=','1.2')], ()))
|
||||
self.assertNotEqual(r, Requirement("Twisted", [('>=','2.0')], ()))
|
||||
self.assertNotEqual(r, Requirement("Zope", [('>=','1.2')], ()))
|
||||
self.assertNotEqual(r, Requirement("Zope", [('>=','3.0')], ()))
|
||||
self.assertNotEqual(r, Requirement.parse("Twisted[extras]>=1.2"))
|
||||
|
||||
def testOrdering(self):
|
||||
r1 = Requirement("Twisted", [('==','1.2c1'),('>=','1.2')], ())
|
||||
r2 = Requirement("Twisted", [('>=','1.2'),('==','1.2c1')], ())
|
||||
self.assertEqual(r1,r2)
|
||||
self.assertEqual(str(r1),str(r2))
|
||||
self.assertEqual(str(r2),"Twisted==1.2c1,>=1.2")
|
||||
|
||||
def testBasicContains(self):
|
||||
r = Requirement("Twisted", [('>=','1.2')], ())
|
||||
foo_dist = Distribution.from_filename("FooPkg-1.3_1.egg")
|
||||
twist11 = Distribution.from_filename("Twisted-1.1.egg")
|
||||
twist12 = Distribution.from_filename("Twisted-1.2.egg")
|
||||
self.assertTrue(parse_version('1.2') in r)
|
||||
self.assertTrue(parse_version('1.1') not in r)
|
||||
self.assertTrue('1.2' in r)
|
||||
self.assertTrue('1.1' not in r)
|
||||
self.assertTrue(foo_dist not in r)
|
||||
self.assertTrue(twist11 not in r)
|
||||
self.assertTrue(twist12 in r)
|
||||
|
||||
def testAdvancedContains(self):
|
||||
r, = parse_requirements("Foo>=1.2,<=1.3,==1.9,>2.0,!=2.5,<3.0,==4.5")
|
||||
for v in ('1.2','1.2.2','1.3','1.9','2.0.1','2.3','2.6','3.0c1','4.5'):
|
||||
self.assertTrue(v in r, (v,r))
|
||||
for v in ('1.2c1','1.3.1','1.5','1.9.1','2.0','2.5','3.0','4.0'):
|
||||
self.assertTrue(v not in r, (v,r))
|
||||
|
||||
def testOptionsAndHashing(self):
|
||||
r1 = Requirement.parse("Twisted[foo,bar]>=1.2")
|
||||
r2 = Requirement.parse("Twisted[bar,FOO]>=1.2")
|
||||
r3 = Requirement.parse("Twisted[BAR,FOO]>=1.2.0")
|
||||
self.assertEqual(r1,r2)
|
||||
self.assertEqual(r1,r3)
|
||||
self.assertEqual(r1.extras, ("foo","bar"))
|
||||
self.assertEqual(r2.extras, ("bar","foo")) # extras are normalized
|
||||
self.assertEqual(hash(r1), hash(r2))
|
||||
self.assertEqual(
|
||||
hash(r1), hash(("twisted", ((">=",parse_version("1.2")),),
|
||||
frozenset(["foo","bar"])))
|
||||
)
|
||||
|
||||
def testVersionEquality(self):
|
||||
r1 = Requirement.parse("foo==0.3a2")
|
||||
r2 = Requirement.parse("foo!=0.3a4")
|
||||
d = Distribution.from_filename
|
||||
|
||||
self.assertTrue(d("foo-0.3a4.egg") not in r1)
|
||||
self.assertTrue(d("foo-0.3a1.egg") not in r1)
|
||||
self.assertTrue(d("foo-0.3a4.egg") not in r2)
|
||||
|
||||
self.assertTrue(d("foo-0.3a2.egg") in r1)
|
||||
self.assertTrue(d("foo-0.3a2.egg") in r2)
|
||||
self.assertTrue(d("foo-0.3a3.egg") in r2)
|
||||
self.assertTrue(d("foo-0.3a5.egg") in r2)
|
||||
|
||||
def testSetuptoolsProjectName(self):
|
||||
"""
|
||||
The setuptools project should implement the setuptools package.
|
||||
"""
|
||||
|
||||
self.assertEqual(
|
||||
Requirement.parse('setuptools').project_name, 'setuptools')
|
||||
# setuptools 0.7 and higher means setuptools.
|
||||
self.assertEqual(
|
||||
Requirement.parse('setuptools == 0.7').project_name, 'setuptools')
|
||||
self.assertEqual(
|
||||
Requirement.parse('setuptools == 0.7a1').project_name, 'setuptools')
|
||||
self.assertEqual(
|
||||
Requirement.parse('setuptools >= 0.7').project_name, 'setuptools')
|
||||
|
||||
|
||||
class ParseTests(TestCase):
|
||||
|
||||
def testEmptyParse(self):
|
||||
self.assertEqual(list(parse_requirements('')), [])
|
||||
|
||||
def testYielding(self):
|
||||
for inp,out in [
|
||||
([], []), ('x',['x']), ([[]],[]), (' x\n y', ['x','y']),
|
||||
(['x\n\n','y'], ['x','y']),
|
||||
]:
|
||||
self.assertEqual(list(pkg_resources.yield_lines(inp)),out)
|
||||
|
||||
def testSplitting(self):
|
||||
sample = """
|
||||
x
|
||||
[Y]
|
||||
z
|
||||
|
||||
a
|
||||
[b ]
|
||||
# foo
|
||||
c
|
||||
[ d]
|
||||
[q]
|
||||
v
|
||||
"""
|
||||
self.assertEqual(list(pkg_resources.split_sections(sample)),
|
||||
[(None,["x"]), ("Y",["z","a"]), ("b",["c"]), ("d",[]), ("q",["v"])]
|
||||
)
|
||||
self.assertRaises(ValueError,list,pkg_resources.split_sections("[foo"))
|
||||
|
||||
def testSafeName(self):
|
||||
self.assertEqual(safe_name("adns-python"), "adns-python")
|
||||
self.assertEqual(safe_name("WSGI Utils"), "WSGI-Utils")
|
||||
self.assertEqual(safe_name("WSGI Utils"), "WSGI-Utils")
|
||||
self.assertEqual(safe_name("Money$$$Maker"), "Money-Maker")
|
||||
self.assertNotEqual(safe_name("peak.web"), "peak-web")
|
||||
|
||||
def testSafeVersion(self):
|
||||
self.assertEqual(safe_version("1.2-1"), "1.2-1")
|
||||
self.assertEqual(safe_version("1.2 alpha"), "1.2.alpha")
|
||||
self.assertEqual(safe_version("2.3.4 20050521"), "2.3.4.20050521")
|
||||
self.assertEqual(safe_version("Money$$$Maker"), "Money-Maker")
|
||||
self.assertEqual(safe_version("peak.web"), "peak.web")
|
||||
|
||||
def testSimpleRequirements(self):
|
||||
self.assertEqual(
|
||||
list(parse_requirements('Twis-Ted>=1.2-1')),
|
||||
[Requirement('Twis-Ted',[('>=','1.2-1')], ())]
|
||||
)
|
||||
self.assertEqual(
|
||||
list(parse_requirements('Twisted >=1.2, \ # more\n<2.0')),
|
||||
[Requirement('Twisted',[('>=','1.2'),('<','2.0')], ())]
|
||||
)
|
||||
self.assertEqual(
|
||||
Requirement.parse("FooBar==1.99a3"),
|
||||
Requirement("FooBar", [('==','1.99a3')], ())
|
||||
)
|
||||
self.assertRaises(ValueError,Requirement.parse,">=2.3")
|
||||
self.assertRaises(ValueError,Requirement.parse,"x\\")
|
||||
self.assertRaises(ValueError,Requirement.parse,"x==2 q")
|
||||
self.assertRaises(ValueError,Requirement.parse,"X==1\nY==2")
|
||||
self.assertRaises(ValueError,Requirement.parse,"#")
|
||||
|
||||
def testVersionEquality(self):
|
||||
def c(s1,s2):
|
||||
p1, p2 = parse_version(s1),parse_version(s2)
|
||||
self.assertEqual(p1,p2, (s1,s2,p1,p2))
|
||||
|
||||
c('1.2-rc1', '1.2rc1')
|
||||
c('0.4', '0.4.0')
|
||||
c('0.4.0.0', '0.4.0')
|
||||
c('0.4.0-0', '0.4-0')
|
||||
c('0pl1', '0.0pl1')
|
||||
c('0pre1', '0.0c1')
|
||||
c('0.0.0preview1', '0c1')
|
||||
c('0.0c1', '0-rc1')
|
||||
c('1.2a1', '1.2.a.1')
|
||||
c('1.2...a', '1.2a')
|
||||
|
||||
def testVersionOrdering(self):
|
||||
def c(s1,s2):
|
||||
p1, p2 = parse_version(s1),parse_version(s2)
|
||||
self.assertTrue(p1<p2, (s1,s2,p1,p2))
|
||||
|
||||
c('2.1','2.1.1')
|
||||
c('2a1','2b0')
|
||||
c('2a1','2.1')
|
||||
c('2.3a1', '2.3')
|
||||
c('2.1-1', '2.1-2')
|
||||
c('2.1-1', '2.1.1')
|
||||
c('2.1', '2.1pl4')
|
||||
c('2.1a0-20040501', '2.1')
|
||||
c('1.1', '02.1')
|
||||
c('A56','B27')
|
||||
c('3.2', '3.2.pl0')
|
||||
c('3.2-1', '3.2pl1')
|
||||
c('3.2pl1', '3.2pl1-1')
|
||||
c('0.4', '4.0')
|
||||
c('0.0.4', '0.4.0')
|
||||
c('0pl1', '0.4pl1')
|
||||
c('2.1.0-rc1','2.1.0')
|
||||
c('2.1dev','2.1a0')
|
||||
|
||||
torture ="""
|
||||
0.80.1-3 0.80.1-2 0.80.1-1 0.79.9999+0.80.0pre4-1
|
||||
0.79.9999+0.80.0pre2-3 0.79.9999+0.80.0pre2-2
|
||||
0.77.2-1 0.77.1-1 0.77.0-1
|
||||
""".split()
|
||||
|
||||
for p,v1 in enumerate(torture):
|
||||
for v2 in torture[p+1:]:
|
||||
c(v2,v1)
|
||||
|
||||
|
||||
class ScriptHeaderTests(TestCase):
|
||||
non_ascii_exe = '/Users/José/bin/python'
|
||||
exe_with_spaces = r'C:\Program Files\Python33\python.exe'
|
||||
|
||||
def test_get_script_header(self):
|
||||
if not sys.platform.startswith('java') or not is_sh(sys.executable):
|
||||
# This test is for non-Jython platforms
|
||||
expected = '#!%s\n' % nt_quote_arg(os.path.normpath(sys.executable))
|
||||
self.assertEqual(get_script_header('#!/usr/local/bin/python'),
|
||||
expected)
|
||||
expected = '#!%s -x\n' % nt_quote_arg(os.path.normpath(sys.executable))
|
||||
self.assertEqual(get_script_header('#!/usr/bin/python -x'),
|
||||
expected)
|
||||
self.assertEqual(get_script_header('#!/usr/bin/python',
|
||||
executable=self.non_ascii_exe),
|
||||
'#!%s -x\n' % self.non_ascii_exe)
|
||||
candidate = get_script_header('#!/usr/bin/python',
|
||||
executable=self.exe_with_spaces)
|
||||
self.assertEqual(candidate, '#!"%s"\n' % self.exe_with_spaces)
|
||||
|
||||
def test_get_script_header_jython_workaround(self):
|
||||
# This test doesn't work with Python 3 in some locales
|
||||
if PY3 and os.environ.get("LC_CTYPE") in (None, "C", "POSIX"):
|
||||
return
|
||||
|
||||
class java:
|
||||
class lang:
|
||||
class System:
|
||||
@staticmethod
|
||||
def getProperty(property):
|
||||
return ""
|
||||
sys.modules["java"] = java
|
||||
|
||||
platform = sys.platform
|
||||
sys.platform = 'java1.5.0_13'
|
||||
stdout, stderr = sys.stdout, sys.stderr
|
||||
try:
|
||||
# A mock sys.executable that uses a shebang line (this file)
|
||||
exe = os.path.normpath(os.path.splitext(__file__)[0] + '.py')
|
||||
self.assertEqual(
|
||||
get_script_header('#!/usr/local/bin/python', executable=exe),
|
||||
'#!/usr/bin/env %s\n' % exe)
|
||||
|
||||
# Ensure we generate what is basically a broken shebang line
|
||||
# when there's options, with a warning emitted
|
||||
sys.stdout = sys.stderr = StringIO()
|
||||
self.assertEqual(get_script_header('#!/usr/bin/python -x',
|
||||
executable=exe),
|
||||
'#!%s -x\n' % exe)
|
||||
self.assertTrue('Unable to adapt shebang line' in sys.stdout.getvalue())
|
||||
sys.stdout = sys.stderr = StringIO()
|
||||
self.assertEqual(get_script_header('#!/usr/bin/python',
|
||||
executable=self.non_ascii_exe),
|
||||
'#!%s -x\n' % self.non_ascii_exe)
|
||||
self.assertTrue('Unable to adapt shebang line' in sys.stdout.getvalue())
|
||||
finally:
|
||||
del sys.modules["java"]
|
||||
sys.platform = platform
|
||||
sys.stdout, sys.stderr = stdout, stderr
|
||||
|
||||
|
||||
class NamespaceTests(TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self._ns_pkgs = pkg_resources._namespace_packages.copy()
|
||||
self._tmpdir = tempfile.mkdtemp(prefix="tests-setuptools-")
|
||||
os.makedirs(os.path.join(self._tmpdir, "site-pkgs"))
|
||||
self._prev_sys_path = sys.path[:]
|
||||
sys.path.append(os.path.join(self._tmpdir, "site-pkgs"))
|
||||
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self._tmpdir)
|
||||
pkg_resources._namespace_packages = self._ns_pkgs.copy()
|
||||
sys.path = self._prev_sys_path[:]
|
||||
|
||||
msg = "Test fails when /tmp is a symlink. See #231"
|
||||
@skipIf(os.path.islink(tempfile.gettempdir()), msg)
|
||||
def test_two_levels_deep(self):
|
||||
"""
|
||||
Test nested namespace packages
|
||||
Create namespace packages in the following tree :
|
||||
site-packages-1/pkg1/pkg2
|
||||
site-packages-2/pkg1/pkg2
|
||||
Check both are in the _namespace_packages dict and that their __path__
|
||||
is correct
|
||||
"""
|
||||
sys.path.append(os.path.join(self._tmpdir, "site-pkgs2"))
|
||||
os.makedirs(os.path.join(self._tmpdir, "site-pkgs", "pkg1", "pkg2"))
|
||||
os.makedirs(os.path.join(self._tmpdir, "site-pkgs2", "pkg1", "pkg2"))
|
||||
ns_str = "__import__('pkg_resources').declare_namespace(__name__)\n"
|
||||
for site in ["site-pkgs", "site-pkgs2"]:
|
||||
pkg1_init = open(os.path.join(self._tmpdir, site,
|
||||
"pkg1", "__init__.py"), "w")
|
||||
pkg1_init.write(ns_str)
|
||||
pkg1_init.close()
|
||||
pkg2_init = open(os.path.join(self._tmpdir, site,
|
||||
"pkg1", "pkg2", "__init__.py"), "w")
|
||||
pkg2_init.write(ns_str)
|
||||
pkg2_init.close()
|
||||
import pkg1
|
||||
assert "pkg1" in pkg_resources._namespace_packages
|
||||
try:
|
||||
import pkg1.pkg2
|
||||
except ImportError:
|
||||
self.fail("Setuptools tried to import the parent namespace package")
|
||||
# check the _namespace_packages dict
|
||||
assert "pkg1.pkg2" in pkg_resources._namespace_packages
|
||||
assert pkg_resources._namespace_packages["pkg1"] == ["pkg1.pkg2"]
|
||||
# check the __path__ attribute contains both paths
|
||||
expected = [
|
||||
os.path.join(self._tmpdir, "site-pkgs", "pkg1", "pkg2"),
|
||||
os.path.join(self._tmpdir, "site-pkgs2", "pkg1", "pkg2"),
|
||||
]
|
||||
assert pkg1.pkg2.__path__ == expected
|
||||
|
|
@ -1,83 +0,0 @@
|
|||
"""develop tests
|
||||
"""
|
||||
import sys
|
||||
import os
|
||||
import shutil
|
||||
import unittest
|
||||
import tempfile
|
||||
import types
|
||||
|
||||
import pkg_resources
|
||||
import setuptools.sandbox
|
||||
from setuptools.sandbox import DirectorySandbox, SandboxViolation
|
||||
|
||||
def has_win32com():
|
||||
"""
|
||||
Run this to determine if the local machine has win32com, and if it
|
||||
does, include additional tests.
|
||||
"""
|
||||
if not sys.platform.startswith('win32'):
|
||||
return False
|
||||
try:
|
||||
__import__('win32com')
|
||||
except ImportError:
|
||||
return False
|
||||
return True
|
||||
|
||||
class TestSandbox(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.dir = tempfile.mkdtemp()
|
||||
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self.dir)
|
||||
|
||||
def test_devnull(self):
|
||||
sandbox = DirectorySandbox(self.dir)
|
||||
sandbox.run(self._file_writer(os.devnull))
|
||||
|
||||
def _file_writer(path):
|
||||
def do_write():
|
||||
f = open(path, 'w')
|
||||
f.write('xxx')
|
||||
f.close()
|
||||
return do_write
|
||||
|
||||
_file_writer = staticmethod(_file_writer)
|
||||
|
||||
if has_win32com():
|
||||
def test_win32com(self):
|
||||
"""
|
||||
win32com should not be prevented from caching COM interfaces
|
||||
in gen_py.
|
||||
"""
|
||||
import win32com
|
||||
gen_py = win32com.__gen_path__
|
||||
target = os.path.join(gen_py, 'test_write')
|
||||
sandbox = DirectorySandbox(self.dir)
|
||||
try:
|
||||
try:
|
||||
sandbox.run(self._file_writer(target))
|
||||
except SandboxViolation:
|
||||
self.fail("Could not create gen_py file due to SandboxViolation")
|
||||
finally:
|
||||
if os.path.exists(target): os.remove(target)
|
||||
|
||||
def test_setup_py_with_BOM(self):
|
||||
"""
|
||||
It should be possible to execute a setup.py with a Byte Order Mark
|
||||
"""
|
||||
target = pkg_resources.resource_filename(__name__,
|
||||
'script-with-bom.py')
|
||||
namespace = types.ModuleType('namespace')
|
||||
setuptools.sandbox._execfile(target, vars(namespace))
|
||||
assert namespace.result == 'passed'
|
||||
|
||||
def test_setup_py_with_CRLF(self):
|
||||
setup_py = os.path.join(self.dir, 'setup.py')
|
||||
with open(setup_py, 'wb') as stream:
|
||||
stream.write(b'"degenerate script"\r\n')
|
||||
setuptools.sandbox._execfile(setup_py, globals())
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
|
@ -1,552 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""sdist tests"""
|
||||
|
||||
import locale
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
import unittest
|
||||
import unicodedata
|
||||
import re
|
||||
import contextlib
|
||||
from setuptools.tests import environment, test_svn
|
||||
from setuptools.tests.py26compat import skipIf
|
||||
|
||||
from setuptools.compat import StringIO, unicode, PY3, PY2
|
||||
from setuptools.command.sdist import sdist, walk_revctrl
|
||||
from setuptools.command.egg_info import manifest_maker
|
||||
from setuptools.dist import Distribution
|
||||
from setuptools import svn_utils
|
||||
|
||||
SETUP_ATTRS = {
|
||||
'name': 'sdist_test',
|
||||
'version': '0.0',
|
||||
'packages': ['sdist_test'],
|
||||
'package_data': {'sdist_test': ['*.txt']}
|
||||
}
|
||||
|
||||
|
||||
SETUP_PY = """\
|
||||
from setuptools import setup
|
||||
|
||||
setup(**%r)
|
||||
""" % SETUP_ATTRS
|
||||
|
||||
|
||||
if PY3:
|
||||
LATIN1_FILENAME = 'smörbröd.py'.encode('latin-1')
|
||||
else:
|
||||
LATIN1_FILENAME = 'sm\xf6rbr\xf6d.py'
|
||||
|
||||
|
||||
# Cannot use context manager because of Python 2.4
|
||||
@contextlib.contextmanager
|
||||
def quiet():
|
||||
old_stdout, old_stderr = sys.stdout, sys.stderr
|
||||
sys.stdout, sys.stderr = StringIO(), StringIO()
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
sys.stdout, sys.stderr = old_stdout, old_stderr
|
||||
|
||||
|
||||
# Fake byte literals for Python <= 2.5
|
||||
def b(s, encoding='utf-8'):
|
||||
if PY3:
|
||||
return s.encode(encoding)
|
||||
return s
|
||||
|
||||
|
||||
# Convert to POSIX path
|
||||
def posix(path):
|
||||
if PY3 and not isinstance(path, str):
|
||||
return path.replace(os.sep.encode('ascii'), b('/'))
|
||||
else:
|
||||
return path.replace(os.sep, '/')
|
||||
|
||||
|
||||
# HFS Plus uses decomposed UTF-8
|
||||
def decompose(path):
|
||||
if isinstance(path, unicode):
|
||||
return unicodedata.normalize('NFD', path)
|
||||
try:
|
||||
path = path.decode('utf-8')
|
||||
path = unicodedata.normalize('NFD', path)
|
||||
path = path.encode('utf-8')
|
||||
except UnicodeError:
|
||||
pass # Not UTF-8
|
||||
return path
|
||||
|
||||
|
||||
class TestSdistTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.temp_dir = tempfile.mkdtemp()
|
||||
f = open(os.path.join(self.temp_dir, 'setup.py'), 'w')
|
||||
f.write(SETUP_PY)
|
||||
f.close()
|
||||
|
||||
# Set up the rest of the test package
|
||||
test_pkg = os.path.join(self.temp_dir, 'sdist_test')
|
||||
os.mkdir(test_pkg)
|
||||
# *.rst was not included in package_data, so c.rst should not be
|
||||
# automatically added to the manifest when not under version control
|
||||
for fname in ['__init__.py', 'a.txt', 'b.txt', 'c.rst']:
|
||||
# Just touch the files; their contents are irrelevant
|
||||
open(os.path.join(test_pkg, fname), 'w').close()
|
||||
|
||||
self.old_cwd = os.getcwd()
|
||||
os.chdir(self.temp_dir)
|
||||
|
||||
def tearDown(self):
|
||||
os.chdir(self.old_cwd)
|
||||
shutil.rmtree(self.temp_dir)
|
||||
|
||||
def test_package_data_in_sdist(self):
|
||||
"""Regression test for pull request #4: ensures that files listed in
|
||||
package_data are included in the manifest even if they're not added to
|
||||
version control.
|
||||
"""
|
||||
|
||||
dist = Distribution(SETUP_ATTRS)
|
||||
dist.script_name = 'setup.py'
|
||||
cmd = sdist(dist)
|
||||
cmd.ensure_finalized()
|
||||
|
||||
with quiet():
|
||||
cmd.run()
|
||||
|
||||
manifest = cmd.filelist.files
|
||||
self.assertTrue(os.path.join('sdist_test', 'a.txt') in manifest)
|
||||
self.assertTrue(os.path.join('sdist_test', 'b.txt') in manifest)
|
||||
self.assertTrue(os.path.join('sdist_test', 'c.rst') not in manifest)
|
||||
|
||||
|
||||
def test_defaults_case_sensitivity(self):
|
||||
"""
|
||||
Make sure default files (README.*, etc.) are added in a case-sensitive
|
||||
way to avoid problems with packages built on Windows.
|
||||
"""
|
||||
|
||||
open(os.path.join(self.temp_dir, 'readme.rst'), 'w').close()
|
||||
open(os.path.join(self.temp_dir, 'SETUP.cfg'), 'w').close()
|
||||
|
||||
dist = Distribution(SETUP_ATTRS)
|
||||
# the extension deliberately capitalized for this test
|
||||
# to make sure the actual filename (not capitalized) gets added
|
||||
# to the manifest
|
||||
dist.script_name = 'setup.PY'
|
||||
cmd = sdist(dist)
|
||||
cmd.ensure_finalized()
|
||||
|
||||
with quiet():
|
||||
cmd.run()
|
||||
|
||||
# lowercase all names so we can test in a case-insensitive way to make sure the files are not included
|
||||
manifest = map(lambda x: x.lower(), cmd.filelist.files)
|
||||
self.assertFalse('readme.rst' in manifest, manifest)
|
||||
self.assertFalse('setup.py' in manifest, manifest)
|
||||
self.assertFalse('setup.cfg' in manifest, manifest)
|
||||
|
||||
def test_manifest_is_written_with_utf8_encoding(self):
|
||||
# Test for #303.
|
||||
dist = Distribution(SETUP_ATTRS)
|
||||
dist.script_name = 'setup.py'
|
||||
mm = manifest_maker(dist)
|
||||
mm.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt')
|
||||
os.mkdir('sdist_test.egg-info')
|
||||
|
||||
# UTF-8 filename
|
||||
filename = os.path.join('sdist_test', 'smörbröd.py')
|
||||
|
||||
# Must create the file or it will get stripped.
|
||||
open(filename, 'w').close()
|
||||
|
||||
# Add UTF-8 filename and write manifest
|
||||
with quiet():
|
||||
mm.run()
|
||||
mm.filelist.append(filename)
|
||||
mm.write_manifest()
|
||||
|
||||
manifest = open(mm.manifest, 'rbU')
|
||||
contents = manifest.read()
|
||||
manifest.close()
|
||||
|
||||
# The manifest should be UTF-8 encoded
|
||||
try:
|
||||
u_contents = contents.decode('UTF-8')
|
||||
except UnicodeDecodeError:
|
||||
e = sys.exc_info()[1]
|
||||
self.fail(e)
|
||||
|
||||
# The manifest should contain the UTF-8 filename
|
||||
if PY2:
|
||||
fs_enc = sys.getfilesystemencoding()
|
||||
filename = filename.decode(fs_enc)
|
||||
|
||||
self.assertTrue(posix(filename) in u_contents)
|
||||
|
||||
# Python 3 only
|
||||
if PY3:
|
||||
|
||||
def test_write_manifest_allows_utf8_filenames(self):
|
||||
# Test for #303.
|
||||
dist = Distribution(SETUP_ATTRS)
|
||||
dist.script_name = 'setup.py'
|
||||
mm = manifest_maker(dist)
|
||||
mm.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt')
|
||||
os.mkdir('sdist_test.egg-info')
|
||||
|
||||
# UTF-8 filename
|
||||
filename = os.path.join(b('sdist_test'), b('smörbröd.py'))
|
||||
|
||||
# Must touch the file or risk removal
|
||||
open(filename, "w").close()
|
||||
|
||||
# Add filename and write manifest
|
||||
with quiet():
|
||||
mm.run()
|
||||
u_filename = filename.decode('utf-8')
|
||||
mm.filelist.files.append(u_filename)
|
||||
# Re-write manifest
|
||||
mm.write_manifest()
|
||||
|
||||
manifest = open(mm.manifest, 'rbU')
|
||||
contents = manifest.read()
|
||||
manifest.close()
|
||||
|
||||
# The manifest should be UTF-8 encoded
|
||||
try:
|
||||
contents.decode('UTF-8')
|
||||
except UnicodeDecodeError:
|
||||
e = sys.exc_info()[1]
|
||||
self.fail(e)
|
||||
|
||||
# The manifest should contain the UTF-8 filename
|
||||
self.assertTrue(posix(filename) in contents)
|
||||
|
||||
# The filelist should have been updated as well
|
||||
self.assertTrue(u_filename in mm.filelist.files)
|
||||
|
||||
def test_write_manifest_skips_non_utf8_filenames(self):
|
||||
"""
|
||||
Files that cannot be encoded to UTF-8 (specifically, those that
|
||||
weren't originally successfully decoded and have surrogate
|
||||
escapes) should be omitted from the manifest.
|
||||
See https://bitbucket.org/tarek/distribute/issue/303 for history.
|
||||
"""
|
||||
dist = Distribution(SETUP_ATTRS)
|
||||
dist.script_name = 'setup.py'
|
||||
mm = manifest_maker(dist)
|
||||
mm.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt')
|
||||
os.mkdir('sdist_test.egg-info')
|
||||
|
||||
# Latin-1 filename
|
||||
filename = os.path.join(b('sdist_test'), LATIN1_FILENAME)
|
||||
|
||||
# Add filename with surrogates and write manifest
|
||||
with quiet():
|
||||
mm.run()
|
||||
u_filename = filename.decode('utf-8', 'surrogateescape')
|
||||
mm.filelist.append(u_filename)
|
||||
# Re-write manifest
|
||||
mm.write_manifest()
|
||||
|
||||
manifest = open(mm.manifest, 'rbU')
|
||||
contents = manifest.read()
|
||||
manifest.close()
|
||||
|
||||
# The manifest should be UTF-8 encoded
|
||||
try:
|
||||
contents.decode('UTF-8')
|
||||
except UnicodeDecodeError:
|
||||
e = sys.exc_info()[1]
|
||||
self.fail(e)
|
||||
|
||||
# The Latin-1 filename should have been skipped
|
||||
self.assertFalse(posix(filename) in contents)
|
||||
|
||||
# The filelist should have been updated as well
|
||||
self.assertFalse(u_filename in mm.filelist.files)
|
||||
|
||||
def test_manifest_is_read_with_utf8_encoding(self):
|
||||
# Test for #303.
|
||||
dist = Distribution(SETUP_ATTRS)
|
||||
dist.script_name = 'setup.py'
|
||||
cmd = sdist(dist)
|
||||
cmd.ensure_finalized()
|
||||
|
||||
# Create manifest
|
||||
with quiet():
|
||||
cmd.run()
|
||||
|
||||
# Add UTF-8 filename to manifest
|
||||
filename = os.path.join(b('sdist_test'), b('smörbröd.py'))
|
||||
cmd.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt')
|
||||
manifest = open(cmd.manifest, 'ab')
|
||||
manifest.write(b('\n') + filename)
|
||||
manifest.close()
|
||||
|
||||
# The file must exist to be included in the filelist
|
||||
open(filename, 'w').close()
|
||||
|
||||
# Re-read manifest
|
||||
cmd.filelist.files = []
|
||||
with quiet():
|
||||
cmd.read_manifest()
|
||||
|
||||
# The filelist should contain the UTF-8 filename
|
||||
if PY3:
|
||||
filename = filename.decode('utf-8')
|
||||
self.assertTrue(filename in cmd.filelist.files)
|
||||
|
||||
# Python 3 only
|
||||
if PY3:
|
||||
|
||||
def test_read_manifest_skips_non_utf8_filenames(self):
|
||||
# Test for #303.
|
||||
dist = Distribution(SETUP_ATTRS)
|
||||
dist.script_name = 'setup.py'
|
||||
cmd = sdist(dist)
|
||||
cmd.ensure_finalized()
|
||||
|
||||
# Create manifest
|
||||
with quiet():
|
||||
cmd.run()
|
||||
|
||||
# Add Latin-1 filename to manifest
|
||||
filename = os.path.join(b('sdist_test'), LATIN1_FILENAME)
|
||||
cmd.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt')
|
||||
manifest = open(cmd.manifest, 'ab')
|
||||
manifest.write(b('\n') + filename)
|
||||
manifest.close()
|
||||
|
||||
# The file must exist to be included in the filelist
|
||||
open(filename, 'w').close()
|
||||
|
||||
# Re-read manifest
|
||||
cmd.filelist.files = []
|
||||
with quiet():
|
||||
try:
|
||||
cmd.read_manifest()
|
||||
except UnicodeDecodeError:
|
||||
e = sys.exc_info()[1]
|
||||
self.fail(e)
|
||||
|
||||
# The Latin-1 filename should have been skipped
|
||||
filename = filename.decode('latin-1')
|
||||
self.assertFalse(filename in cmd.filelist.files)
|
||||
|
||||
@skipIf(PY3 and locale.getpreferredencoding() != 'UTF-8',
|
||||
'Unittest fails if locale is not utf-8 but the manifests is recorded correctly')
|
||||
def test_sdist_with_utf8_encoded_filename(self):
|
||||
# Test for #303.
|
||||
dist = Distribution(SETUP_ATTRS)
|
||||
dist.script_name = 'setup.py'
|
||||
cmd = sdist(dist)
|
||||
cmd.ensure_finalized()
|
||||
|
||||
# UTF-8 filename
|
||||
filename = os.path.join(b('sdist_test'), b('smörbröd.py'))
|
||||
open(filename, 'w').close()
|
||||
|
||||
with quiet():
|
||||
cmd.run()
|
||||
|
||||
if sys.platform == 'darwin':
|
||||
filename = decompose(filename)
|
||||
|
||||
if PY3:
|
||||
fs_enc = sys.getfilesystemencoding()
|
||||
|
||||
if sys.platform == 'win32':
|
||||
if fs_enc == 'cp1252':
|
||||
# Python 3 mangles the UTF-8 filename
|
||||
filename = filename.decode('cp1252')
|
||||
self.assertTrue(filename in cmd.filelist.files)
|
||||
else:
|
||||
filename = filename.decode('mbcs')
|
||||
self.assertTrue(filename in cmd.filelist.files)
|
||||
else:
|
||||
filename = filename.decode('utf-8')
|
||||
self.assertTrue(filename in cmd.filelist.files)
|
||||
else:
|
||||
self.assertTrue(filename in cmd.filelist.files)
|
||||
|
||||
def test_sdist_with_latin1_encoded_filename(self):
|
||||
# Test for #303.
|
||||
dist = Distribution(SETUP_ATTRS)
|
||||
dist.script_name = 'setup.py'
|
||||
cmd = sdist(dist)
|
||||
cmd.ensure_finalized()
|
||||
|
||||
# Latin-1 filename
|
||||
filename = os.path.join(b('sdist_test'), LATIN1_FILENAME)
|
||||
open(filename, 'w').close()
|
||||
self.assertTrue(os.path.isfile(filename))
|
||||
|
||||
with quiet():
|
||||
cmd.run()
|
||||
|
||||
if PY3:
|
||||
# not all windows systems have a default FS encoding of cp1252
|
||||
if sys.platform == 'win32':
|
||||
# Latin-1 is similar to Windows-1252 however
|
||||
# on mbcs filesys it is not in latin-1 encoding
|
||||
fs_enc = sys.getfilesystemencoding()
|
||||
if fs_enc == 'mbcs':
|
||||
filename = filename.decode('mbcs')
|
||||
else:
|
||||
filename = filename.decode('latin-1')
|
||||
|
||||
self.assertTrue(filename in cmd.filelist.files)
|
||||
else:
|
||||
# The Latin-1 filename should have been skipped
|
||||
filename = filename.decode('latin-1')
|
||||
self.assertFalse(filename in cmd.filelist.files)
|
||||
else:
|
||||
# Under Python 2 there seems to be no decoded string in the
|
||||
# filelist. However, due to decode and encoding of the
|
||||
# file name to get utf-8 Manifest the latin1 maybe excluded
|
||||
try:
|
||||
# fs_enc should match how one is expect the decoding to
|
||||
# be proformed for the manifest output.
|
||||
fs_enc = sys.getfilesystemencoding()
|
||||
filename.decode(fs_enc)
|
||||
self.assertTrue(filename in cmd.filelist.files)
|
||||
except UnicodeDecodeError:
|
||||
self.assertFalse(filename in cmd.filelist.files)
|
||||
|
||||
class TestDummyOutput(environment.ZippedEnvironment):
|
||||
|
||||
def setUp(self):
|
||||
self.datafile = os.path.join('setuptools', 'tests',
|
||||
'svn_data', "dummy.zip")
|
||||
self.dataname = "dummy"
|
||||
super(TestDummyOutput, self).setUp()
|
||||
|
||||
def _run(self):
|
||||
code, data = environment.run_setup_py(["sdist"],
|
||||
pypath=self.old_cwd,
|
||||
data_stream=0)
|
||||
if code:
|
||||
info = "DIR: " + os.path.abspath('.')
|
||||
info += "\n SDIST RETURNED: %i\n\n" % code
|
||||
info += data
|
||||
raise AssertionError(info)
|
||||
|
||||
datalines = data.splitlines()
|
||||
|
||||
possible = (
|
||||
"running sdist",
|
||||
"running egg_info",
|
||||
"creating dummy\.egg-info",
|
||||
"writing dummy\.egg-info",
|
||||
"writing top-level names to dummy\.egg-info",
|
||||
"writing dependency_links to dummy\.egg-info",
|
||||
"writing manifest file 'dummy\.egg-info",
|
||||
"reading manifest file 'dummy\.egg-info",
|
||||
"reading manifest template 'MANIFEST\.in'",
|
||||
"writing manifest file 'dummy\.egg-info",
|
||||
"creating dummy-0.1.1",
|
||||
"making hard links in dummy-0\.1\.1",
|
||||
"copying files to dummy-0\.1\.1",
|
||||
"copying \S+ -> dummy-0\.1\.1",
|
||||
"copying dummy",
|
||||
"copying dummy\.egg-info",
|
||||
"hard linking \S+ -> dummy-0\.1\.1",
|
||||
"hard linking dummy",
|
||||
"hard linking dummy\.egg-info",
|
||||
"Writing dummy-0\.1\.1",
|
||||
"creating dist",
|
||||
"creating 'dist",
|
||||
"Creating tar archive",
|
||||
"running check",
|
||||
"adding 'dummy-0\.1\.1",
|
||||
"tar .+ dist/dummy-0\.1\.1\.tar dummy-0\.1\.1",
|
||||
"gzip .+ dist/dummy-0\.1\.1\.tar",
|
||||
"removing 'dummy-0\.1\.1' \\(and everything under it\\)",
|
||||
)
|
||||
|
||||
print(" DIR: " + os.path.abspath('.'))
|
||||
for line in datalines:
|
||||
found = False
|
||||
for pattern in possible:
|
||||
if re.match(pattern, line):
|
||||
print(" READ: " + line)
|
||||
found = True
|
||||
break
|
||||
if not found:
|
||||
raise AssertionError("Unexpexected: %s\n-in-\n%s"
|
||||
% (line, data))
|
||||
|
||||
return data
|
||||
|
||||
def test_sources(self):
|
||||
self._run()
|
||||
|
||||
|
||||
class TestSvn(environment.ZippedEnvironment):
|
||||
|
||||
def setUp(self):
|
||||
version = svn_utils.SvnInfo.get_svn_version()
|
||||
if not version: # None or Empty
|
||||
return
|
||||
|
||||
self.base_version = tuple([int(x) for x in version.split('.')][:2])
|
||||
|
||||
if not self.base_version:
|
||||
raise ValueError('No SVN tools installed')
|
||||
elif self.base_version < (1, 3):
|
||||
raise ValueError('Insufficient SVN Version %s' % version)
|
||||
elif self.base_version >= (1, 9):
|
||||
# trying the latest version
|
||||
self.base_version = (1, 8)
|
||||
|
||||
self.dataname = "svn%i%i_example" % self.base_version
|
||||
self.datafile = os.path.join('setuptools', 'tests',
|
||||
'svn_data', self.dataname + ".zip")
|
||||
super(TestSvn, self).setUp()
|
||||
|
||||
@skipIf(not test_svn._svn_check, "No SVN to text, in the first place")
|
||||
def test_walksvn(self):
|
||||
if self.base_version >= (1, 6):
|
||||
folder2 = 'third party2'
|
||||
folder3 = 'third party3'
|
||||
else:
|
||||
folder2 = 'third_party2'
|
||||
folder3 = 'third_party3'
|
||||
|
||||
# TODO is this right
|
||||
expected = set([
|
||||
os.path.join('a file'),
|
||||
os.path.join(folder2, 'Changes.txt'),
|
||||
os.path.join(folder2, 'MD5SUMS'),
|
||||
os.path.join(folder2, 'README.txt'),
|
||||
os.path.join(folder3, 'Changes.txt'),
|
||||
os.path.join(folder3, 'MD5SUMS'),
|
||||
os.path.join(folder3, 'README.txt'),
|
||||
os.path.join(folder3, 'TODO.txt'),
|
||||
os.path.join(folder3, 'fin'),
|
||||
os.path.join('third_party', 'README.txt'),
|
||||
os.path.join('folder', folder2, 'Changes.txt'),
|
||||
os.path.join('folder', folder2, 'MD5SUMS'),
|
||||
os.path.join('folder', folder2, 'WatashiNiYomimasu.txt'),
|
||||
os.path.join('folder', folder3, 'Changes.txt'),
|
||||
os.path.join('folder', folder3, 'fin'),
|
||||
os.path.join('folder', folder3, 'MD5SUMS'),
|
||||
os.path.join('folder', folder3, 'oops'),
|
||||
os.path.join('folder', folder3, 'WatashiNiYomimasu.txt'),
|
||||
os.path.join('folder', folder3, 'ZuMachen.txt'),
|
||||
os.path.join('folder', 'third_party', 'WatashiNiYomimasu.txt'),
|
||||
os.path.join('folder', 'lalala.txt'),
|
||||
os.path.join('folder', 'quest.txt'),
|
||||
# The example will have a deleted file
|
||||
# (or should) but shouldn't return it
|
||||
])
|
||||
self.assertEqual(set(x for x in walk_revctrl()), expected)
|
||||
|
||||
|
||||
def test_suite():
|
||||
return unittest.defaultTestLoader.loadTestsFromName(__name__)
|
||||
|
|
@ -1,245 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""svn tests"""
|
||||
|
||||
import io
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
import unittest
|
||||
from setuptools.tests import environment
|
||||
from setuptools.compat import unicode, unichr
|
||||
|
||||
from setuptools import svn_utils
|
||||
from setuptools.tests.py26compat import skipIf
|
||||
|
||||
|
||||
def _do_svn_check():
|
||||
try:
|
||||
subprocess.check_call(["svn", "--version"],
|
||||
shell=(sys.platform == 'win32'))
|
||||
return True
|
||||
except (OSError, subprocess.CalledProcessError):
|
||||
return False
|
||||
_svn_check = _do_svn_check()
|
||||
|
||||
|
||||
class TestSvnVersion(unittest.TestCase):
|
||||
|
||||
def test_no_svn_found(self):
|
||||
path_variable = None
|
||||
for env in os.environ:
|
||||
if env.lower() == 'path':
|
||||
path_variable = env
|
||||
|
||||
if path_variable is None:
|
||||
try:
|
||||
self.skipTest('Cannot figure out how to modify path')
|
||||
except AttributeError: # PY26 doesn't have this
|
||||
return
|
||||
|
||||
old_path = os.environ[path_variable]
|
||||
os.environ[path_variable] = ''
|
||||
try:
|
||||
version = svn_utils.SvnInfo.get_svn_version()
|
||||
self.assertEqual(version, '')
|
||||
finally:
|
||||
os.environ[path_variable] = old_path
|
||||
|
||||
@skipIf(not _svn_check, "No SVN to text, in the first place")
|
||||
def test_svn_should_exist(self):
|
||||
version = svn_utils.SvnInfo.get_svn_version()
|
||||
self.assertNotEqual(version, '')
|
||||
|
||||
def _read_utf8_file(path):
|
||||
fileobj = None
|
||||
try:
|
||||
fileobj = io.open(path, 'r', encoding='utf-8')
|
||||
data = fileobj.read()
|
||||
return data
|
||||
finally:
|
||||
if fileobj:
|
||||
fileobj.close()
|
||||
|
||||
|
||||
class ParserInfoXML(unittest.TestCase):
|
||||
|
||||
def parse_tester(self, svn_name, ext_spaces):
|
||||
path = os.path.join('setuptools', 'tests',
|
||||
'svn_data', svn_name + '_info.xml')
|
||||
#Remember these are pre-generated to test XML parsing
|
||||
# so these paths might not valid on your system
|
||||
example_base = "%s_example" % svn_name
|
||||
|
||||
data = _read_utf8_file(path)
|
||||
|
||||
expected = set([
|
||||
("\\".join((example_base, 'a file')), 'file'),
|
||||
("\\".join((example_base, 'folder')), 'dir'),
|
||||
("\\".join((example_base, 'folder', 'lalala.txt')), 'file'),
|
||||
("\\".join((example_base, 'folder', 'quest.txt')), 'file'),
|
||||
])
|
||||
self.assertEqual(set(x for x in svn_utils.parse_dir_entries(data)),
|
||||
expected)
|
||||
|
||||
def test_svn13(self):
|
||||
self.parse_tester('svn13', False)
|
||||
|
||||
def test_svn14(self):
|
||||
self.parse_tester('svn14', False)
|
||||
|
||||
def test_svn15(self):
|
||||
self.parse_tester('svn15', False)
|
||||
|
||||
def test_svn16(self):
|
||||
self.parse_tester('svn16', True)
|
||||
|
||||
def test_svn17(self):
|
||||
self.parse_tester('svn17', True)
|
||||
|
||||
def test_svn18(self):
|
||||
self.parse_tester('svn18', True)
|
||||
|
||||
class ParserExternalXML(unittest.TestCase):
|
||||
|
||||
def parse_tester(self, svn_name, ext_spaces):
|
||||
path = os.path.join('setuptools', 'tests',
|
||||
'svn_data', svn_name + '_ext_list.xml')
|
||||
example_base = svn_name + '_example'
|
||||
data = _read_utf8_file(path)
|
||||
|
||||
if ext_spaces:
|
||||
folder2 = 'third party2'
|
||||
folder3 = 'third party3'
|
||||
else:
|
||||
folder2 = 'third_party2'
|
||||
folder3 = 'third_party3'
|
||||
|
||||
expected = set([
|
||||
os.sep.join((example_base, folder2)),
|
||||
os.sep.join((example_base, folder3)),
|
||||
# folder is third_party大介
|
||||
os.sep.join((example_base,
|
||||
unicode('third_party') +
|
||||
unichr(0x5927) + unichr(0x4ecb))),
|
||||
os.sep.join((example_base, 'folder', folder2)),
|
||||
os.sep.join((example_base, 'folder', folder3)),
|
||||
os.sep.join((example_base, 'folder',
|
||||
unicode('third_party') +
|
||||
unichr(0x5927) + unichr(0x4ecb))),
|
||||
])
|
||||
|
||||
expected = set(os.path.normpath(x) for x in expected)
|
||||
dir_base = os.sep.join(('C:', 'development', 'svn_example'))
|
||||
self.assertEqual(set(x for x
|
||||
in svn_utils.parse_externals_xml(data, dir_base)), expected)
|
||||
|
||||
def test_svn15(self):
|
||||
self.parse_tester('svn15', False)
|
||||
|
||||
def test_svn16(self):
|
||||
self.parse_tester('svn16', True)
|
||||
|
||||
def test_svn17(self):
|
||||
self.parse_tester('svn17', True)
|
||||
|
||||
def test_svn18(self):
|
||||
self.parse_tester('svn18', True)
|
||||
|
||||
|
||||
class ParseExternal(unittest.TestCase):
|
||||
|
||||
def parse_tester(self, svn_name, ext_spaces):
|
||||
path = os.path.join('setuptools', 'tests',
|
||||
'svn_data', svn_name + '_ext_list.txt')
|
||||
data = _read_utf8_file(path)
|
||||
|
||||
if ext_spaces:
|
||||
expected = set(['third party2', 'third party3',
|
||||
'third party3b', 'third_party'])
|
||||
else:
|
||||
expected = set(['third_party2', 'third_party3', 'third_party'])
|
||||
|
||||
self.assertEqual(set(x for x in svn_utils.parse_external_prop(data)),
|
||||
expected)
|
||||
|
||||
def test_svn13(self):
|
||||
self.parse_tester('svn13', False)
|
||||
|
||||
def test_svn14(self):
|
||||
self.parse_tester('svn14', False)
|
||||
|
||||
def test_svn15(self):
|
||||
self.parse_tester('svn15', False)
|
||||
|
||||
def test_svn16(self):
|
||||
self.parse_tester('svn16', True)
|
||||
|
||||
def test_svn17(self):
|
||||
self.parse_tester('svn17', True)
|
||||
|
||||
def test_svn18(self):
|
||||
self.parse_tester('svn18', True)
|
||||
|
||||
|
||||
class TestSvn(environment.ZippedEnvironment):
|
||||
|
||||
def setUp(self):
|
||||
version = svn_utils.SvnInfo.get_svn_version()
|
||||
if not version: # empty or null
|
||||
self.dataname = None
|
||||
self.datafile = None
|
||||
return
|
||||
|
||||
self.base_version = tuple([int(x) for x in version.split('.')[:2]])
|
||||
|
||||
if self.base_version < (1,3):
|
||||
raise ValueError('Insufficient SVN Version %s' % version)
|
||||
elif self.base_version >= (1,9):
|
||||
#trying the latest version
|
||||
self.base_version = (1,8)
|
||||
|
||||
self.dataname = "svn%i%i_example" % self.base_version
|
||||
self.datafile = os.path.join('setuptools', 'tests',
|
||||
'svn_data', self.dataname + ".zip")
|
||||
super(TestSvn, self).setUp()
|
||||
|
||||
@skipIf(not _svn_check, "No SVN to text, in the first place")
|
||||
def test_revision(self):
|
||||
rev = svn_utils.SvnInfo.load('.').get_revision()
|
||||
self.assertEqual(rev, 6)
|
||||
|
||||
@skipIf(not _svn_check, "No SVN to text, in the first place")
|
||||
def test_entries(self):
|
||||
expected = set([
|
||||
(os.path.join('a file'), 'file'),
|
||||
(os.path.join('folder'), 'dir'),
|
||||
(os.path.join('folder', 'lalala.txt'), 'file'),
|
||||
(os.path.join('folder', 'quest.txt'), 'file'),
|
||||
#The example will have a deleted file (or should)
|
||||
#but shouldn't return it
|
||||
])
|
||||
info = svn_utils.SvnInfo.load('.')
|
||||
self.assertEqual(set(x for x in info.entries), expected)
|
||||
|
||||
@skipIf(not _svn_check, "No SVN to text, in the first place")
|
||||
def test_externals(self):
|
||||
if self.base_version >= (1,6):
|
||||
folder2 = 'third party2'
|
||||
folder3 = 'third party3'
|
||||
else:
|
||||
folder2 = 'third_party2'
|
||||
folder3 = 'third_party3'
|
||||
|
||||
expected = set([
|
||||
os.path.join(folder2),
|
||||
os.path.join(folder3),
|
||||
os.path.join('third_party'),
|
||||
os.path.join('folder', folder2),
|
||||
os.path.join('folder', folder3),
|
||||
os.path.join('folder', 'third_party'),
|
||||
])
|
||||
info = svn_utils.SvnInfo.load('.')
|
||||
self.assertEqual(set([x for x in info.externals]), expected)
|
||||
|
||||
def test_suite():
|
||||
return unittest.defaultTestLoader.loadTestsFromName(__name__)
|
||||
|
|
@ -1,126 +0,0 @@
|
|||
# -*- coding: UTF-8 -*-
|
||||
|
||||
"""develop tests
|
||||
"""
|
||||
import os
|
||||
import shutil
|
||||
import site
|
||||
import sys
|
||||
import tempfile
|
||||
import unittest
|
||||
|
||||
from distutils.errors import DistutilsError
|
||||
from setuptools.compat import StringIO, PY2
|
||||
from setuptools.command.test import test
|
||||
from setuptools.command import easy_install as easy_install_pkg
|
||||
from setuptools.dist import Distribution
|
||||
|
||||
SETUP_PY = """\
|
||||
from setuptools import setup
|
||||
|
||||
setup(name='foo',
|
||||
packages=['name', 'name.space', 'name.space.tests'],
|
||||
namespace_packages=['name'],
|
||||
test_suite='name.space.tests.test_suite',
|
||||
)
|
||||
"""
|
||||
|
||||
NS_INIT = """# -*- coding: Latin-1 -*-
|
||||
# Söme Arbiträry Ünicode to test Issüé 310
|
||||
try:
|
||||
__import__('pkg_resources').declare_namespace(__name__)
|
||||
except ImportError:
|
||||
from pkgutil import extend_path
|
||||
__path__ = extend_path(__path__, __name__)
|
||||
"""
|
||||
# Make sure this is Latin-1 binary, before writing:
|
||||
if PY2:
|
||||
NS_INIT = NS_INIT.decode('UTF-8')
|
||||
NS_INIT = NS_INIT.encode('Latin-1')
|
||||
|
||||
TEST_PY = """import unittest
|
||||
|
||||
class TestTest(unittest.TestCase):
|
||||
def test_test(self):
|
||||
print "Foo" # Should fail under Python 3 unless 2to3 is used
|
||||
|
||||
test_suite = unittest.makeSuite(TestTest)
|
||||
"""
|
||||
|
||||
class TestTestTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
if sys.version < "2.6" or hasattr(sys, 'real_prefix'):
|
||||
return
|
||||
|
||||
# Directory structure
|
||||
self.dir = tempfile.mkdtemp()
|
||||
os.mkdir(os.path.join(self.dir, 'name'))
|
||||
os.mkdir(os.path.join(self.dir, 'name', 'space'))
|
||||
os.mkdir(os.path.join(self.dir, 'name', 'space', 'tests'))
|
||||
# setup.py
|
||||
setup = os.path.join(self.dir, 'setup.py')
|
||||
f = open(setup, 'wt')
|
||||
f.write(SETUP_PY)
|
||||
f.close()
|
||||
self.old_cwd = os.getcwd()
|
||||
# name/__init__.py
|
||||
init = os.path.join(self.dir, 'name', '__init__.py')
|
||||
f = open(init, 'wb')
|
||||
f.write(NS_INIT)
|
||||
f.close()
|
||||
# name/space/__init__.py
|
||||
init = os.path.join(self.dir, 'name', 'space', '__init__.py')
|
||||
f = open(init, 'wt')
|
||||
f.write('#empty\n')
|
||||
f.close()
|
||||
# name/space/tests/__init__.py
|
||||
init = os.path.join(self.dir, 'name', 'space', 'tests', '__init__.py')
|
||||
f = open(init, 'wt')
|
||||
f.write(TEST_PY)
|
||||
f.close()
|
||||
|
||||
os.chdir(self.dir)
|
||||
self.old_base = site.USER_BASE
|
||||
site.USER_BASE = tempfile.mkdtemp()
|
||||
self.old_site = site.USER_SITE
|
||||
site.USER_SITE = tempfile.mkdtemp()
|
||||
|
||||
def tearDown(self):
|
||||
if sys.version < "2.6" or hasattr(sys, 'real_prefix'):
|
||||
return
|
||||
|
||||
os.chdir(self.old_cwd)
|
||||
shutil.rmtree(self.dir)
|
||||
shutil.rmtree(site.USER_BASE)
|
||||
shutil.rmtree(site.USER_SITE)
|
||||
site.USER_BASE = self.old_base
|
||||
site.USER_SITE = self.old_site
|
||||
|
||||
def test_test(self):
|
||||
if sys.version < "2.6" or hasattr(sys, 'real_prefix'):
|
||||
return
|
||||
|
||||
dist = Distribution(dict(
|
||||
name='foo',
|
||||
packages=['name', 'name.space', 'name.space.tests'],
|
||||
namespace_packages=['name'],
|
||||
test_suite='name.space.tests.test_suite',
|
||||
use_2to3=True,
|
||||
))
|
||||
dist.script_name = 'setup.py'
|
||||
cmd = test(dist)
|
||||
cmd.user = 1
|
||||
cmd.ensure_finalized()
|
||||
cmd.install_dir = site.USER_SITE
|
||||
cmd.user = 1
|
||||
old_stdout = sys.stdout
|
||||
sys.stdout = StringIO()
|
||||
try:
|
||||
try: # try/except/finally doesn't work in Python 2.4, so we need nested try-statements.
|
||||
cmd.run()
|
||||
except SystemExit: # The test runner calls sys.exit, stop that making an error.
|
||||
pass
|
||||
finally:
|
||||
sys.stdout = old_stdout
|
||||
|
||||
|
|
@ -1,72 +0,0 @@
|
|||
"""build_ext tests
|
||||
"""
|
||||
import sys, os, shutil, tempfile, unittest, site, zipfile
|
||||
from setuptools.command.upload_docs import upload_docs
|
||||
from setuptools.dist import Distribution
|
||||
|
||||
SETUP_PY = """\
|
||||
from setuptools import setup
|
||||
|
||||
setup(name='foo')
|
||||
"""
|
||||
|
||||
class TestUploadDocsTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.dir = tempfile.mkdtemp()
|
||||
setup = os.path.join(self.dir, 'setup.py')
|
||||
f = open(setup, 'w')
|
||||
f.write(SETUP_PY)
|
||||
f.close()
|
||||
self.old_cwd = os.getcwd()
|
||||
os.chdir(self.dir)
|
||||
|
||||
self.upload_dir = os.path.join(self.dir, 'build')
|
||||
os.mkdir(self.upload_dir)
|
||||
|
||||
# A test document.
|
||||
f = open(os.path.join(self.upload_dir, 'index.html'), 'w')
|
||||
f.write("Hello world.")
|
||||
f.close()
|
||||
|
||||
# An empty folder.
|
||||
os.mkdir(os.path.join(self.upload_dir, 'empty'))
|
||||
|
||||
if sys.version >= "2.6":
|
||||
self.old_base = site.USER_BASE
|
||||
site.USER_BASE = upload_docs.USER_BASE = tempfile.mkdtemp()
|
||||
self.old_site = site.USER_SITE
|
||||
site.USER_SITE = upload_docs.USER_SITE = tempfile.mkdtemp()
|
||||
|
||||
def tearDown(self):
|
||||
os.chdir(self.old_cwd)
|
||||
shutil.rmtree(self.dir)
|
||||
if sys.version >= "2.6":
|
||||
shutil.rmtree(site.USER_BASE)
|
||||
shutil.rmtree(site.USER_SITE)
|
||||
site.USER_BASE = self.old_base
|
||||
site.USER_SITE = self.old_site
|
||||
|
||||
def test_create_zipfile(self):
|
||||
# Test to make sure zipfile creation handles common cases.
|
||||
# This explicitly includes a folder containing an empty folder.
|
||||
|
||||
dist = Distribution()
|
||||
|
||||
cmd = upload_docs(dist)
|
||||
cmd.upload_dir = self.upload_dir
|
||||
cmd.target_dir = self.upload_dir
|
||||
tmp_dir = tempfile.mkdtemp()
|
||||
tmp_file = os.path.join(tmp_dir, 'foo.zip')
|
||||
try:
|
||||
zip_file = cmd.create_zipfile(tmp_file)
|
||||
|
||||
assert zipfile.is_zipfile(tmp_file)
|
||||
|
||||
zip_file = zipfile.ZipFile(tmp_file) # woh...
|
||||
|
||||
assert zip_file.namelist() == ['index.html']
|
||||
|
||||
zip_file.close()
|
||||
finally:
|
||||
shutil.rmtree(tmp_dir)
|
||||
|
||||
|
|
@ -1 +1 @@
|
|||
__version__ = '6.0.2'
|
||||
__version__ = '18.5'
|
||||
|
|
|
|||
|
|
@ -0,0 +1,29 @@
|
|||
import platform
|
||||
import ctypes
|
||||
|
||||
|
||||
def windows_only(func):
|
||||
if platform.system() != 'Windows':
|
||||
return lambda *args, **kwargs: None
|
||||
return func
|
||||
|
||||
|
||||
@windows_only
|
||||
def hide_file(path):
|
||||
"""
|
||||
Set the hidden attribute on a file or directory.
|
||||
|
||||
From http://stackoverflow.com/questions/19622133/
|
||||
|
||||
`path` must be text.
|
||||
"""
|
||||
__import__('ctypes.wintypes')
|
||||
SetFileAttributes = ctypes.windll.kernel32.SetFileAttributesW
|
||||
SetFileAttributes.argtypes = ctypes.wintypes.LPWSTR, ctypes.wintypes.DWORD
|
||||
SetFileAttributes.restype = ctypes.wintypes.BOOL
|
||||
|
||||
FILE_ATTRIBUTE_HIDDEN = 0x02
|
||||
|
||||
ret = SetFileAttributes(path, FILE_ATTRIBUTE_HIDDEN)
|
||||
if not ret:
|
||||
raise ctypes.WinError()
|
||||
Loading…
Add table
Add a link
Reference in a new issue