update shared dependencies
This commit is contained in:
parent
d4d3d82be3
commit
736cd598a8
521 changed files with 45146 additions and 22574 deletions
315
Shared/lib/python3.4/site-packages/pip/__init__.py
Normal file
315
Shared/lib/python3.4/site-packages/pip/__init__.py
Normal file
|
|
@ -0,0 +1,315 @@
|
|||
#!/usr/bin/env python
|
||||
from __future__ import absolute_import
|
||||
|
||||
import logging
|
||||
import os
|
||||
import optparse
|
||||
import warnings
|
||||
|
||||
import sys
|
||||
import re
|
||||
|
||||
from pip.exceptions import InstallationError, CommandError, PipError
|
||||
from pip.utils import get_installed_distributions, get_prog
|
||||
from pip.utils import deprecation, dist_is_editable
|
||||
from pip.vcs import git, mercurial, subversion, bazaar # noqa
|
||||
from pip.baseparser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
|
||||
from pip.commands import get_summaries, get_similar_commands
|
||||
from pip.commands import commands_dict
|
||||
from pip._vendor.requests.packages.urllib3.exceptions import (
|
||||
InsecureRequestWarning,
|
||||
)
|
||||
|
||||
|
||||
# assignment for flake8 to be happy
|
||||
|
||||
# This fixes a peculiarity when importing via __import__ - as we are
|
||||
# initialising the pip module, "from pip import cmdoptions" is recursive
|
||||
# and appears not to work properly in that situation.
|
||||
import pip.cmdoptions
|
||||
cmdoptions = pip.cmdoptions
|
||||
|
||||
# The version as used in the setup.py and the docs conf.py
|
||||
__version__ = "8.0.2"
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Hide the InsecureRequestWArning from urllib3
|
||||
warnings.filterwarnings("ignore", category=InsecureRequestWarning)
|
||||
|
||||
|
||||
def autocomplete():
|
||||
"""Command and option completion for the main option parser (and options)
|
||||
and its subcommands (and options).
|
||||
|
||||
Enable by sourcing one of the completion shell scripts (bash or zsh).
|
||||
"""
|
||||
# Don't complete if user hasn't sourced bash_completion file.
|
||||
if 'PIP_AUTO_COMPLETE' not in os.environ:
|
||||
return
|
||||
cwords = os.environ['COMP_WORDS'].split()[1:]
|
||||
cword = int(os.environ['COMP_CWORD'])
|
||||
try:
|
||||
current = cwords[cword - 1]
|
||||
except IndexError:
|
||||
current = ''
|
||||
|
||||
subcommands = [cmd for cmd, summary in get_summaries()]
|
||||
options = []
|
||||
# subcommand
|
||||
try:
|
||||
subcommand_name = [w for w in cwords if w in subcommands][0]
|
||||
except IndexError:
|
||||
subcommand_name = None
|
||||
|
||||
parser = create_main_parser()
|
||||
# subcommand options
|
||||
if subcommand_name:
|
||||
# special case: 'help' subcommand has no options
|
||||
if subcommand_name == 'help':
|
||||
sys.exit(1)
|
||||
# special case: list locally installed dists for uninstall command
|
||||
if subcommand_name == 'uninstall' and not current.startswith('-'):
|
||||
installed = []
|
||||
lc = current.lower()
|
||||
for dist in get_installed_distributions(local_only=True):
|
||||
if dist.key.startswith(lc) and dist.key not in cwords[1:]:
|
||||
installed.append(dist.key)
|
||||
# if there are no dists installed, fall back to option completion
|
||||
if installed:
|
||||
for dist in installed:
|
||||
print(dist)
|
||||
sys.exit(1)
|
||||
|
||||
subcommand = commands_dict[subcommand_name]()
|
||||
options += [(opt.get_opt_string(), opt.nargs)
|
||||
for opt in subcommand.parser.option_list_all
|
||||
if opt.help != optparse.SUPPRESS_HELP]
|
||||
|
||||
# filter out previously specified options from available options
|
||||
prev_opts = [x.split('=')[0] for x in cwords[1:cword - 1]]
|
||||
options = [(x, v) for (x, v) in options if x not in prev_opts]
|
||||
# filter options by current input
|
||||
options = [(k, v) for k, v in options if k.startswith(current)]
|
||||
for option in options:
|
||||
opt_label = option[0]
|
||||
# append '=' to options which require args
|
||||
if option[1]:
|
||||
opt_label += '='
|
||||
print(opt_label)
|
||||
else:
|
||||
# show main parser options only when necessary
|
||||
if current.startswith('-') or current.startswith('--'):
|
||||
opts = [i.option_list for i in parser.option_groups]
|
||||
opts.append(parser.option_list)
|
||||
opts = (o for it in opts for o in it)
|
||||
|
||||
subcommands += [i.get_opt_string() for i in opts
|
||||
if i.help != optparse.SUPPRESS_HELP]
|
||||
|
||||
print(' '.join([x for x in subcommands if x.startswith(current)]))
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def create_main_parser():
|
||||
parser_kw = {
|
||||
'usage': '\n%prog <command> [options]',
|
||||
'add_help_option': False,
|
||||
'formatter': UpdatingDefaultsHelpFormatter(),
|
||||
'name': 'global',
|
||||
'prog': get_prog(),
|
||||
}
|
||||
|
||||
parser = ConfigOptionParser(**parser_kw)
|
||||
parser.disable_interspersed_args()
|
||||
|
||||
pip_pkg_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
parser.version = 'pip %s from %s (python %s)' % (
|
||||
__version__, pip_pkg_dir, sys.version[:3])
|
||||
|
||||
# add the general options
|
||||
gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser)
|
||||
parser.add_option_group(gen_opts)
|
||||
|
||||
parser.main = True # so the help formatter knows
|
||||
|
||||
# create command listing for description
|
||||
command_summaries = get_summaries()
|
||||
description = [''] + ['%-27s %s' % (i, j) for i, j in command_summaries]
|
||||
parser.description = '\n'.join(description)
|
||||
|
||||
return parser
|
||||
|
||||
|
||||
def parseopts(args):
|
||||
parser = create_main_parser()
|
||||
|
||||
# Note: parser calls disable_interspersed_args(), so the result of this
|
||||
# call is to split the initial args into the general options before the
|
||||
# subcommand and everything else.
|
||||
# For example:
|
||||
# args: ['--timeout=5', 'install', '--user', 'INITools']
|
||||
# general_options: ['--timeout==5']
|
||||
# args_else: ['install', '--user', 'INITools']
|
||||
general_options, args_else = parser.parse_args(args)
|
||||
|
||||
# --version
|
||||
if general_options.version:
|
||||
sys.stdout.write(parser.version)
|
||||
sys.stdout.write(os.linesep)
|
||||
sys.exit()
|
||||
|
||||
# pip || pip help -> print_help()
|
||||
if not args_else or (args_else[0] == 'help' and len(args_else) == 1):
|
||||
parser.print_help()
|
||||
sys.exit()
|
||||
|
||||
# the subcommand name
|
||||
cmd_name = args_else[0]
|
||||
|
||||
if cmd_name not in commands_dict:
|
||||
guess = get_similar_commands(cmd_name)
|
||||
|
||||
msg = ['unknown command "%s"' % cmd_name]
|
||||
if guess:
|
||||
msg.append('maybe you meant "%s"' % guess)
|
||||
|
||||
raise CommandError(' - '.join(msg))
|
||||
|
||||
# all the args without the subcommand
|
||||
cmd_args = args[:]
|
||||
cmd_args.remove(cmd_name)
|
||||
|
||||
return cmd_name, cmd_args
|
||||
|
||||
|
||||
def check_isolated(args):
|
||||
isolated = False
|
||||
|
||||
if "--isolated" in args:
|
||||
isolated = True
|
||||
|
||||
return isolated
|
||||
|
||||
|
||||
def main(args=None):
|
||||
if args is None:
|
||||
args = sys.argv[1:]
|
||||
|
||||
# Enable our Deprecation Warnings
|
||||
for deprecation_warning in deprecation.DEPRECATIONS:
|
||||
warnings.simplefilter("default", deprecation_warning)
|
||||
|
||||
# Configure our deprecation warnings to be sent through loggers
|
||||
deprecation.install_warning_logger()
|
||||
|
||||
autocomplete()
|
||||
|
||||
try:
|
||||
cmd_name, cmd_args = parseopts(args)
|
||||
except PipError as exc:
|
||||
sys.stderr.write("ERROR: %s" % exc)
|
||||
sys.stderr.write(os.linesep)
|
||||
sys.exit(1)
|
||||
|
||||
command = commands_dict[cmd_name](isolated=check_isolated(cmd_args))
|
||||
return command.main(cmd_args)
|
||||
|
||||
|
||||
# ###########################################################
|
||||
# # Writing freeze files
|
||||
|
||||
class FrozenRequirement(object):
|
||||
|
||||
def __init__(self, name, req, editable, comments=()):
|
||||
self.name = name
|
||||
self.req = req
|
||||
self.editable = editable
|
||||
self.comments = comments
|
||||
|
||||
_rev_re = re.compile(r'-r(\d+)$')
|
||||
_date_re = re.compile(r'-(20\d\d\d\d\d\d)$')
|
||||
|
||||
@classmethod
|
||||
def from_dist(cls, dist, dependency_links):
|
||||
location = os.path.normcase(os.path.abspath(dist.location))
|
||||
comments = []
|
||||
from pip.vcs import vcs, get_src_requirement
|
||||
if dist_is_editable(dist) and vcs.get_backend_name(location):
|
||||
editable = True
|
||||
try:
|
||||
req = get_src_requirement(dist, location)
|
||||
except InstallationError as exc:
|
||||
logger.warning(
|
||||
"Error when trying to get requirement for VCS system %s, "
|
||||
"falling back to uneditable format", exc
|
||||
)
|
||||
req = None
|
||||
if req is None:
|
||||
logger.warning(
|
||||
'Could not determine repository location of %s', location
|
||||
)
|
||||
comments.append(
|
||||
'## !! Could not determine repository location'
|
||||
)
|
||||
req = dist.as_requirement()
|
||||
editable = False
|
||||
else:
|
||||
editable = False
|
||||
req = dist.as_requirement()
|
||||
specs = req.specs
|
||||
assert len(specs) == 1 and specs[0][0] in ["==", "==="], \
|
||||
'Expected 1 spec with == or ===; specs = %r; dist = %r' % \
|
||||
(specs, dist)
|
||||
version = specs[0][1]
|
||||
ver_match = cls._rev_re.search(version)
|
||||
date_match = cls._date_re.search(version)
|
||||
if ver_match or date_match:
|
||||
svn_backend = vcs.get_backend('svn')
|
||||
if svn_backend:
|
||||
svn_location = svn_backend().get_location(
|
||||
dist,
|
||||
dependency_links,
|
||||
)
|
||||
if not svn_location:
|
||||
logger.warning(
|
||||
'Warning: cannot find svn location for %s', req)
|
||||
comments.append(
|
||||
'## FIXME: could not find svn URL in dependency_links '
|
||||
'for this package:'
|
||||
)
|
||||
else:
|
||||
comments.append(
|
||||
'# Installing as editable to satisfy requirement %s:' %
|
||||
req
|
||||
)
|
||||
if ver_match:
|
||||
rev = ver_match.group(1)
|
||||
else:
|
||||
rev = '{%s}' % date_match.group(1)
|
||||
editable = True
|
||||
req = '%s@%s#egg=%s' % (
|
||||
svn_location,
|
||||
rev,
|
||||
cls.egg_name(dist)
|
||||
)
|
||||
return cls(dist.project_name, req, editable, comments)
|
||||
|
||||
@staticmethod
|
||||
def egg_name(dist):
|
||||
name = dist.egg_name()
|
||||
match = re.search(r'-py\d\.\d$', name)
|
||||
if match:
|
||||
name = name[:match.start()]
|
||||
return name
|
||||
|
||||
def __str__(self):
|
||||
req = self.req
|
||||
if self.editable:
|
||||
req = '-e %s' % req
|
||||
return '\n'.join(list(self.comments) + [str(req)]) + '\n'
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
19
Shared/lib/python3.4/site-packages/pip/__main__.py
Normal file
19
Shared/lib/python3.4/site-packages/pip/__main__.py
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
# If we are running from a wheel, add the wheel to sys.path
|
||||
# This allows the usage python pip-*.whl/pip install pip-*.whl
|
||||
if __package__ == '':
|
||||
# __file__ is pip-*.whl/pip/__main__.py
|
||||
# first dirname call strips of '/__main__.py', second strips off '/pip'
|
||||
# Resulting path is the name of the wheel itself
|
||||
# Add that to sys.path so we can import pip
|
||||
path = os.path.dirname(os.path.dirname(__file__))
|
||||
sys.path.insert(0, path)
|
||||
|
||||
import pip # noqa
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(pip.main())
|
||||
104
Shared/lib/python3.4/site-packages/pip/_vendor/__init__.py
Normal file
104
Shared/lib/python3.4/site-packages/pip/_vendor/__init__.py
Normal file
|
|
@ -0,0 +1,104 @@
|
|||
"""
|
||||
pip._vendor is for vendoring dependencies of pip to prevent needing pip to
|
||||
depend on something external.
|
||||
|
||||
Files inside of pip._vendor should be considered immutable and should only be
|
||||
updated to versions from upstream.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
import glob
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
# Downstream redistributors which have debundled our dependencies should also
|
||||
# patch this value to be true. This will trigger the additional patching
|
||||
# to cause things like "six" to be available as pip.
|
||||
DEBUNDLED = True
|
||||
|
||||
# By default, look in this directory for a bunch of .whl files which we will
|
||||
# add to the beginning of sys.path before attempting to import anything. This
|
||||
# is done to support downstream re-distributors like Debian and Fedora who
|
||||
# wish to create their own Wheels for our dependencies to aid in debundling.
|
||||
WHEEL_DIR = os.path.abspath(os.path.join(sys.prefix, 'share', 'python-wheels'))
|
||||
|
||||
|
||||
# Define a small helper function to alias our vendored modules to the real ones
|
||||
# if the vendored ones do not exist. This idea of this was taken from
|
||||
# https://github.com/kennethreitz/requests/pull/2567.
|
||||
def vendored(modulename):
|
||||
vendored_name = "{0}.{1}".format(__name__, modulename)
|
||||
|
||||
try:
|
||||
__import__(vendored_name, globals(), locals(), level=0)
|
||||
except ImportError:
|
||||
__import__(modulename, globals(), locals(), level=0)
|
||||
sys.modules[vendored_name] = sys.modules[modulename]
|
||||
base, head = vendored_name.rsplit(".", 1)
|
||||
setattr(sys.modules[base], head, sys.modules[modulename])
|
||||
|
||||
|
||||
# If we're operating in a debundled setup, then we want to go ahead and trigger
|
||||
# the aliasing of our vendored libraries as well as looking for wheels to add
|
||||
# to our sys.path. This will cause all of this code to be a no-op typically
|
||||
# however downstream redistributors can enable it in a consistent way across
|
||||
# all platforms.
|
||||
if DEBUNDLED:
|
||||
# Actually look inside of WHEEL_DIR to find .whl files and add them to the
|
||||
# front of our sys.path.
|
||||
sys.path[:] = glob.glob(os.path.join(WHEEL_DIR, "*.whl")) + sys.path
|
||||
|
||||
# Actually alias all of our vendored dependencies.
|
||||
vendored("cachecontrol")
|
||||
vendored("colorama")
|
||||
vendored("distlib")
|
||||
vendored("html5lib")
|
||||
vendored("lockfile")
|
||||
vendored("six")
|
||||
vendored("six.moves")
|
||||
vendored("six.moves.urllib")
|
||||
vendored("packaging")
|
||||
vendored("packaging.version")
|
||||
vendored("packaging.specifiers")
|
||||
vendored("pkg_resources")
|
||||
vendored("progress")
|
||||
vendored("retrying")
|
||||
vendored("requests")
|
||||
vendored("requests.packages")
|
||||
vendored("requests.packages.urllib3")
|
||||
vendored("requests.packages.urllib3._collections")
|
||||
vendored("requests.packages.urllib3.connection")
|
||||
vendored("requests.packages.urllib3.connectionpool")
|
||||
vendored("requests.packages.urllib3.contrib")
|
||||
try:
|
||||
vendored("requests.packages.urllib3.contrib.ntlmpool")
|
||||
except ImportError:
|
||||
pass
|
||||
try:
|
||||
vendored("requests.packages.urllib3.contrib.pyopenssl")
|
||||
except ImportError:
|
||||
pass
|
||||
vendored("requests.packages.urllib3.exceptions")
|
||||
vendored("requests.packages.urllib3.fields")
|
||||
vendored("requests.packages.urllib3.filepost")
|
||||
vendored("requests.packages.urllib3.packages")
|
||||
try:
|
||||
vendored("requests.packages.urllib3.packages.ordered_dict")
|
||||
vendored("requests.packages.urllib3.packages.six")
|
||||
except ImportError:
|
||||
# Debian already unbundles these from requests.
|
||||
pass
|
||||
vendored("requests.packages.urllib3.packages.ssl_match_hostname")
|
||||
vendored("requests.packages.urllib3.packages.ssl_match_hostname."
|
||||
"_implementation")
|
||||
vendored("requests.packages.urllib3.poolmanager")
|
||||
vendored("requests.packages.urllib3.request")
|
||||
vendored("requests.packages.urllib3.response")
|
||||
vendored("requests.packages.urllib3.util")
|
||||
vendored("requests.packages.urllib3.util.connection")
|
||||
vendored("requests.packages.urllib3.util.request")
|
||||
vendored("requests.packages.urllib3.util.response")
|
||||
vendored("requests.packages.urllib3.util.retry")
|
||||
vendored("requests.packages.urllib3.util.ssl_")
|
||||
vendored("requests.packages.urllib3.util.timeout")
|
||||
vendored("requests.packages.urllib3.util.url")
|
||||
325
Shared/lib/python3.4/site-packages/pip/basecommand.py
Normal file
325
Shared/lib/python3.4/site-packages/pip/basecommand.py
Normal file
|
|
@ -0,0 +1,325 @@
|
|||
"""Base Command class, and related routines"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import optparse
|
||||
import warnings
|
||||
|
||||
from pip import cmdoptions
|
||||
from pip.index import PackageFinder
|
||||
from pip.locations import running_under_virtualenv
|
||||
from pip.download import PipSession
|
||||
from pip.exceptions import (BadCommand, InstallationError, UninstallationError,
|
||||
CommandError, PreviousBuildDirError)
|
||||
|
||||
from pip.compat import logging_dictConfig
|
||||
from pip.baseparser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
|
||||
from pip.req import InstallRequirement, parse_requirements
|
||||
from pip.status_codes import (
|
||||
SUCCESS, ERROR, UNKNOWN_ERROR, VIRTUALENV_NOT_FOUND,
|
||||
PREVIOUS_BUILD_DIR_ERROR,
|
||||
)
|
||||
from pip.utils import deprecation, get_prog, normalize_path
|
||||
from pip.utils.logging import IndentingFormatter
|
||||
from pip.utils.outdated import pip_version_check
|
||||
|
||||
|
||||
__all__ = ['Command']
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Command(object):
|
||||
name = None
|
||||
usage = None
|
||||
hidden = False
|
||||
log_streams = ("ext://sys.stdout", "ext://sys.stderr")
|
||||
|
||||
def __init__(self, isolated=False):
|
||||
parser_kw = {
|
||||
'usage': self.usage,
|
||||
'prog': '%s %s' % (get_prog(), self.name),
|
||||
'formatter': UpdatingDefaultsHelpFormatter(),
|
||||
'add_help_option': False,
|
||||
'name': self.name,
|
||||
'description': self.__doc__,
|
||||
'isolated': isolated,
|
||||
}
|
||||
|
||||
self.parser = ConfigOptionParser(**parser_kw)
|
||||
|
||||
# Commands should add options to this option group
|
||||
optgroup_name = '%s Options' % self.name.capitalize()
|
||||
self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name)
|
||||
|
||||
# Add the general options
|
||||
gen_opts = cmdoptions.make_option_group(
|
||||
cmdoptions.general_group,
|
||||
self.parser,
|
||||
)
|
||||
self.parser.add_option_group(gen_opts)
|
||||
|
||||
def _build_session(self, options, retries=None, timeout=None):
|
||||
session = PipSession(
|
||||
cache=(
|
||||
normalize_path(os.path.join(options.cache_dir, "http"))
|
||||
if options.cache_dir else None
|
||||
),
|
||||
retries=retries if retries is not None else options.retries,
|
||||
insecure_hosts=options.trusted_hosts,
|
||||
)
|
||||
|
||||
# Handle custom ca-bundles from the user
|
||||
if options.cert:
|
||||
session.verify = options.cert
|
||||
|
||||
# Handle SSL client certificate
|
||||
if options.client_cert:
|
||||
session.cert = options.client_cert
|
||||
|
||||
# Handle timeouts
|
||||
if options.timeout or timeout:
|
||||
session.timeout = (
|
||||
timeout if timeout is not None else options.timeout
|
||||
)
|
||||
|
||||
# Handle configured proxies
|
||||
if options.proxy:
|
||||
session.proxies = {
|
||||
"http": options.proxy,
|
||||
"https": options.proxy,
|
||||
}
|
||||
|
||||
# Determine if we can prompt the user for authentication or not
|
||||
session.auth.prompting = not options.no_input
|
||||
|
||||
return session
|
||||
|
||||
def parse_args(self, args):
|
||||
# factored out for testability
|
||||
return self.parser.parse_args(args)
|
||||
|
||||
def main(self, args):
|
||||
options, args = self.parse_args(args)
|
||||
|
||||
if options.quiet:
|
||||
if options.quiet == 1:
|
||||
level = "WARNING"
|
||||
if options.quiet == 2:
|
||||
level = "ERROR"
|
||||
else:
|
||||
level = "CRITICAL"
|
||||
elif options.verbose:
|
||||
level = "DEBUG"
|
||||
else:
|
||||
level = "INFO"
|
||||
|
||||
logging_dictConfig({
|
||||
"version": 1,
|
||||
"disable_existing_loggers": False,
|
||||
"filters": {
|
||||
"exclude_warnings": {
|
||||
"()": "pip.utils.logging.MaxLevelFilter",
|
||||
"level": logging.WARNING,
|
||||
},
|
||||
},
|
||||
"formatters": {
|
||||
"indent": {
|
||||
"()": IndentingFormatter,
|
||||
"format": "%(message)s",
|
||||
},
|
||||
},
|
||||
"handlers": {
|
||||
"console": {
|
||||
"level": level,
|
||||
"class": "pip.utils.logging.ColorizedStreamHandler",
|
||||
"stream": self.log_streams[0],
|
||||
"filters": ["exclude_warnings"],
|
||||
"formatter": "indent",
|
||||
},
|
||||
"console_errors": {
|
||||
"level": "WARNING",
|
||||
"class": "pip.utils.logging.ColorizedStreamHandler",
|
||||
"stream": self.log_streams[1],
|
||||
"formatter": "indent",
|
||||
},
|
||||
"user_log": {
|
||||
"level": "DEBUG",
|
||||
"class": "pip.utils.logging.BetterRotatingFileHandler",
|
||||
"filename": options.log or "/dev/null",
|
||||
"delay": True,
|
||||
"formatter": "indent",
|
||||
},
|
||||
},
|
||||
"root": {
|
||||
"level": level,
|
||||
"handlers": list(filter(None, [
|
||||
"console",
|
||||
"console_errors",
|
||||
"user_log" if options.log else None,
|
||||
])),
|
||||
},
|
||||
# Disable any logging besides WARNING unless we have DEBUG level
|
||||
# logging enabled. These use both pip._vendor and the bare names
|
||||
# for the case where someone unbundles our libraries.
|
||||
"loggers": dict(
|
||||
(
|
||||
name,
|
||||
{
|
||||
"level": (
|
||||
"WARNING"
|
||||
if level in ["INFO", "ERROR"]
|
||||
else "DEBUG"
|
||||
),
|
||||
},
|
||||
)
|
||||
for name in ["pip._vendor", "distlib", "requests", "urllib3"]
|
||||
),
|
||||
})
|
||||
|
||||
if sys.version_info[:2] == (2, 6):
|
||||
warnings.warn(
|
||||
"Python 2.6 is no longer supported by the Python core team, "
|
||||
"please upgrade your Python. A future version of pip will "
|
||||
"drop support for Python 2.6",
|
||||
deprecation.Python26DeprecationWarning
|
||||
)
|
||||
|
||||
# TODO: try to get these passing down from the command?
|
||||
# without resorting to os.environ to hold these.
|
||||
|
||||
if options.no_input:
|
||||
os.environ['PIP_NO_INPUT'] = '1'
|
||||
|
||||
if options.exists_action:
|
||||
os.environ['PIP_EXISTS_ACTION'] = ' '.join(options.exists_action)
|
||||
|
||||
if options.require_venv:
|
||||
# If a venv is required check if it can really be found
|
||||
if not running_under_virtualenv():
|
||||
logger.critical(
|
||||
'Could not find an activated virtualenv (required).'
|
||||
)
|
||||
sys.exit(VIRTUALENV_NOT_FOUND)
|
||||
|
||||
try:
|
||||
status = self.run(options, args)
|
||||
# FIXME: all commands should return an exit status
|
||||
# and when it is done, isinstance is not needed anymore
|
||||
if isinstance(status, int):
|
||||
return status
|
||||
except PreviousBuildDirError as exc:
|
||||
logger.critical(str(exc))
|
||||
logger.debug('Exception information:', exc_info=True)
|
||||
|
||||
return PREVIOUS_BUILD_DIR_ERROR
|
||||
except (InstallationError, UninstallationError, BadCommand) as exc:
|
||||
logger.critical(str(exc))
|
||||
logger.debug('Exception information:', exc_info=True)
|
||||
|
||||
return ERROR
|
||||
except CommandError as exc:
|
||||
logger.critical('ERROR: %s', exc)
|
||||
logger.debug('Exception information:', exc_info=True)
|
||||
|
||||
return ERROR
|
||||
except KeyboardInterrupt:
|
||||
logger.critical('Operation cancelled by user')
|
||||
logger.debug('Exception information:', exc_info=True)
|
||||
|
||||
return ERROR
|
||||
except:
|
||||
logger.critical('Exception:', exc_info=True)
|
||||
|
||||
return UNKNOWN_ERROR
|
||||
finally:
|
||||
# Check if we're using the latest version of pip available
|
||||
if (not options.disable_pip_version_check and not
|
||||
getattr(options, "no_index", False)):
|
||||
with self._build_session(
|
||||
options,
|
||||
retries=0,
|
||||
timeout=min(5, options.timeout)) as session:
|
||||
pip_version_check(session)
|
||||
|
||||
return SUCCESS
|
||||
|
||||
|
||||
class RequirementCommand(Command):
|
||||
|
||||
@staticmethod
|
||||
def populate_requirement_set(requirement_set, args, options, finder,
|
||||
session, name, wheel_cache):
|
||||
"""
|
||||
Marshal cmd line args into a requirement set.
|
||||
"""
|
||||
for filename in options.constraints:
|
||||
for req in parse_requirements(
|
||||
filename,
|
||||
constraint=True, finder=finder, options=options,
|
||||
session=session, wheel_cache=wheel_cache):
|
||||
requirement_set.add_requirement(req)
|
||||
|
||||
for req in args:
|
||||
requirement_set.add_requirement(
|
||||
InstallRequirement.from_line(
|
||||
req, None, isolated=options.isolated_mode,
|
||||
wheel_cache=wheel_cache
|
||||
)
|
||||
)
|
||||
|
||||
for req in options.editables:
|
||||
requirement_set.add_requirement(
|
||||
InstallRequirement.from_editable(
|
||||
req,
|
||||
default_vcs=options.default_vcs,
|
||||
isolated=options.isolated_mode,
|
||||
wheel_cache=wheel_cache
|
||||
)
|
||||
)
|
||||
|
||||
found_req_in_file = False
|
||||
for filename in options.requirements:
|
||||
for req in parse_requirements(
|
||||
filename,
|
||||
finder=finder, options=options, session=session,
|
||||
wheel_cache=wheel_cache):
|
||||
found_req_in_file = True
|
||||
requirement_set.add_requirement(req)
|
||||
# If --require-hashes was a line in a requirements file, tell
|
||||
# RequirementSet about it:
|
||||
requirement_set.require_hashes = options.require_hashes
|
||||
|
||||
if not (args or options.editables or found_req_in_file):
|
||||
opts = {'name': name}
|
||||
if options.find_links:
|
||||
msg = ('You must give at least one requirement to '
|
||||
'%(name)s (maybe you meant "pip %(name)s '
|
||||
'%(links)s"?)' %
|
||||
dict(opts, links=' '.join(options.find_links)))
|
||||
else:
|
||||
msg = ('You must give at least one requirement '
|
||||
'to %(name)s (see "pip help %(name)s")' % opts)
|
||||
logger.warning(msg)
|
||||
|
||||
def _build_package_finder(self, options, session):
|
||||
"""
|
||||
Create a package finder appropriate to this requirement command.
|
||||
"""
|
||||
index_urls = [options.index_url] + options.extra_index_urls
|
||||
if options.no_index:
|
||||
logger.info('Ignoring indexes: %s', ','.join(index_urls))
|
||||
index_urls = []
|
||||
|
||||
return PackageFinder(
|
||||
find_links=options.find_links,
|
||||
format_control=options.format_control,
|
||||
index_urls=index_urls,
|
||||
trusted_hosts=options.trusted_hosts,
|
||||
allow_all_prereleases=options.pre,
|
||||
process_dependency_links=options.process_dependency_links,
|
||||
session=session,
|
||||
)
|
||||
292
Shared/lib/python3.4/site-packages/pip/baseparser.py
Normal file
292
Shared/lib/python3.4/site-packages/pip/baseparser.py
Normal file
|
|
@ -0,0 +1,292 @@
|
|||
"""Base option parser setup"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
import sys
|
||||
import optparse
|
||||
import os
|
||||
import re
|
||||
import textwrap
|
||||
from distutils.util import strtobool
|
||||
|
||||
from pip._vendor.six import string_types
|
||||
from pip._vendor.six.moves import configparser
|
||||
from pip.locations import (
|
||||
legacy_config_file, config_basename, running_under_virtualenv,
|
||||
site_config_files
|
||||
)
|
||||
from pip.utils import appdirs, get_terminal_size
|
||||
|
||||
|
||||
_environ_prefix_re = re.compile(r"^PIP_", re.I)
|
||||
|
||||
|
||||
class PrettyHelpFormatter(optparse.IndentedHelpFormatter):
|
||||
"""A prettier/less verbose help formatter for optparse."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
# help position must be aligned with __init__.parseopts.description
|
||||
kwargs['max_help_position'] = 30
|
||||
kwargs['indent_increment'] = 1
|
||||
kwargs['width'] = get_terminal_size()[0] - 2
|
||||
optparse.IndentedHelpFormatter.__init__(self, *args, **kwargs)
|
||||
|
||||
def format_option_strings(self, option):
|
||||
return self._format_option_strings(option, ' <%s>', ', ')
|
||||
|
||||
def _format_option_strings(self, option, mvarfmt=' <%s>', optsep=', '):
|
||||
"""
|
||||
Return a comma-separated list of option strings and metavars.
|
||||
|
||||
:param option: tuple of (short opt, long opt), e.g: ('-f', '--format')
|
||||
:param mvarfmt: metavar format string - evaluated as mvarfmt % metavar
|
||||
:param optsep: separator
|
||||
"""
|
||||
opts = []
|
||||
|
||||
if option._short_opts:
|
||||
opts.append(option._short_opts[0])
|
||||
if option._long_opts:
|
||||
opts.append(option._long_opts[0])
|
||||
if len(opts) > 1:
|
||||
opts.insert(1, optsep)
|
||||
|
||||
if option.takes_value():
|
||||
metavar = option.metavar or option.dest.lower()
|
||||
opts.append(mvarfmt % metavar.lower())
|
||||
|
||||
return ''.join(opts)
|
||||
|
||||
def format_heading(self, heading):
|
||||
if heading == 'Options':
|
||||
return ''
|
||||
return heading + ':\n'
|
||||
|
||||
def format_usage(self, usage):
|
||||
"""
|
||||
Ensure there is only one newline between usage and the first heading
|
||||
if there is no description.
|
||||
"""
|
||||
msg = '\nUsage: %s\n' % self.indent_lines(textwrap.dedent(usage), " ")
|
||||
return msg
|
||||
|
||||
def format_description(self, description):
|
||||
# leave full control over description to us
|
||||
if description:
|
||||
if hasattr(self.parser, 'main'):
|
||||
label = 'Commands'
|
||||
else:
|
||||
label = 'Description'
|
||||
# some doc strings have initial newlines, some don't
|
||||
description = description.lstrip('\n')
|
||||
# some doc strings have final newlines and spaces, some don't
|
||||
description = description.rstrip()
|
||||
# dedent, then reindent
|
||||
description = self.indent_lines(textwrap.dedent(description), " ")
|
||||
description = '%s:\n%s\n' % (label, description)
|
||||
return description
|
||||
else:
|
||||
return ''
|
||||
|
||||
def format_epilog(self, epilog):
|
||||
# leave full control over epilog to us
|
||||
if epilog:
|
||||
return epilog
|
||||
else:
|
||||
return ''
|
||||
|
||||
def indent_lines(self, text, indent):
|
||||
new_lines = [indent + line for line in text.split('\n')]
|
||||
return "\n".join(new_lines)
|
||||
|
||||
|
||||
class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter):
|
||||
"""Custom help formatter for use in ConfigOptionParser.
|
||||
|
||||
This is updates the defaults before expanding them, allowing
|
||||
them to show up correctly in the help listing.
|
||||
"""
|
||||
|
||||
def expand_default(self, option):
|
||||
if self.parser is not None:
|
||||
self.parser._update_defaults(self.parser.defaults)
|
||||
return optparse.IndentedHelpFormatter.expand_default(self, option)
|
||||
|
||||
|
||||
class CustomOptionParser(optparse.OptionParser):
|
||||
def insert_option_group(self, idx, *args, **kwargs):
|
||||
"""Insert an OptionGroup at a given position."""
|
||||
group = self.add_option_group(*args, **kwargs)
|
||||
|
||||
self.option_groups.pop()
|
||||
self.option_groups.insert(idx, group)
|
||||
|
||||
return group
|
||||
|
||||
@property
|
||||
def option_list_all(self):
|
||||
"""Get a list of all options, including those in option groups."""
|
||||
res = self.option_list[:]
|
||||
for i in self.option_groups:
|
||||
res.extend(i.option_list)
|
||||
|
||||
return res
|
||||
|
||||
|
||||
class ConfigOptionParser(CustomOptionParser):
|
||||
"""Custom option parser which updates its defaults by checking the
|
||||
configuration files and environmental variables"""
|
||||
|
||||
isolated = False
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.config = configparser.RawConfigParser()
|
||||
self.name = kwargs.pop('name')
|
||||
self.isolated = kwargs.pop("isolated", False)
|
||||
self.files = self.get_config_files()
|
||||
if self.files:
|
||||
self.config.read(self.files)
|
||||
assert self.name
|
||||
optparse.OptionParser.__init__(self, *args, **kwargs)
|
||||
|
||||
def get_config_files(self):
|
||||
# the files returned by this method will be parsed in order with the
|
||||
# first files listed being overridden by later files in standard
|
||||
# ConfigParser fashion
|
||||
config_file = os.environ.get('PIP_CONFIG_FILE', False)
|
||||
if config_file == os.devnull:
|
||||
return []
|
||||
|
||||
# at the base we have any site-wide configuration
|
||||
files = list(site_config_files)
|
||||
|
||||
# per-user configuration next
|
||||
if not self.isolated:
|
||||
if config_file and os.path.exists(config_file):
|
||||
files.append(config_file)
|
||||
else:
|
||||
# This is the legacy config file, we consider it to be a lower
|
||||
# priority than the new file location.
|
||||
files.append(legacy_config_file)
|
||||
|
||||
# This is the new config file, we consider it to be a higher
|
||||
# priority than the legacy file.
|
||||
files.append(
|
||||
os.path.join(
|
||||
appdirs.user_config_dir("pip"),
|
||||
config_basename,
|
||||
)
|
||||
)
|
||||
|
||||
# finally virtualenv configuration first trumping others
|
||||
if running_under_virtualenv():
|
||||
venv_config_file = os.path.join(
|
||||
sys.prefix,
|
||||
config_basename,
|
||||
)
|
||||
if os.path.exists(venv_config_file):
|
||||
files.append(venv_config_file)
|
||||
|
||||
return files
|
||||
|
||||
def check_default(self, option, key, val):
|
||||
try:
|
||||
return option.check_value(key, val)
|
||||
except optparse.OptionValueError as exc:
|
||||
print("An error occurred during configuration: %s" % exc)
|
||||
sys.exit(3)
|
||||
|
||||
def _update_defaults(self, defaults):
|
||||
"""Updates the given defaults with values from the config files and
|
||||
the environ. Does a little special handling for certain types of
|
||||
options (lists)."""
|
||||
# Then go and look for the other sources of configuration:
|
||||
config = {}
|
||||
# 1. config files
|
||||
for section in ('global', self.name):
|
||||
config.update(
|
||||
self.normalize_keys(self.get_config_section(section))
|
||||
)
|
||||
# 2. environmental variables
|
||||
if not self.isolated:
|
||||
config.update(self.normalize_keys(self.get_environ_vars()))
|
||||
# Accumulate complex default state.
|
||||
self.values = optparse.Values(self.defaults)
|
||||
late_eval = set()
|
||||
# Then set the options with those values
|
||||
for key, val in config.items():
|
||||
# ignore empty values
|
||||
if not val:
|
||||
continue
|
||||
|
||||
option = self.get_option(key)
|
||||
# Ignore options not present in this parser. E.g. non-globals put
|
||||
# in [global] by users that want them to apply to all applicable
|
||||
# commands.
|
||||
if option is None:
|
||||
continue
|
||||
|
||||
if option.action in ('store_true', 'store_false', 'count'):
|
||||
val = strtobool(val)
|
||||
elif option.action == 'append':
|
||||
val = val.split()
|
||||
val = [self.check_default(option, key, v) for v in val]
|
||||
elif option.action == 'callback':
|
||||
late_eval.add(option.dest)
|
||||
opt_str = option.get_opt_string()
|
||||
val = option.convert_value(opt_str, val)
|
||||
# From take_action
|
||||
args = option.callback_args or ()
|
||||
kwargs = option.callback_kwargs or {}
|
||||
option.callback(option, opt_str, val, self, *args, **kwargs)
|
||||
else:
|
||||
val = self.check_default(option, key, val)
|
||||
|
||||
defaults[option.dest] = val
|
||||
|
||||
for key in late_eval:
|
||||
defaults[key] = getattr(self.values, key)
|
||||
self.values = None
|
||||
return defaults
|
||||
|
||||
def normalize_keys(self, items):
|
||||
"""Return a config dictionary with normalized keys regardless of
|
||||
whether the keys were specified in environment variables or in config
|
||||
files"""
|
||||
normalized = {}
|
||||
for key, val in items:
|
||||
key = key.replace('_', '-')
|
||||
if not key.startswith('--'):
|
||||
key = '--%s' % key # only prefer long opts
|
||||
normalized[key] = val
|
||||
return normalized
|
||||
|
||||
def get_config_section(self, name):
|
||||
"""Get a section of a configuration"""
|
||||
if self.config.has_section(name):
|
||||
return self.config.items(name)
|
||||
return []
|
||||
|
||||
def get_environ_vars(self):
|
||||
"""Returns a generator with all environmental vars with prefix PIP_"""
|
||||
for key, val in os.environ.items():
|
||||
if _environ_prefix_re.search(key):
|
||||
yield (_environ_prefix_re.sub("", key).lower(), val)
|
||||
|
||||
def get_default_values(self):
|
||||
"""Overridding to make updating the defaults after instantiation of
|
||||
the option parser possible, _update_defaults() does the dirty work."""
|
||||
if not self.process_default_values:
|
||||
# Old, pre-Optik 1.5 behaviour.
|
||||
return optparse.Values(self.defaults)
|
||||
|
||||
defaults = self._update_defaults(self.defaults.copy()) # ours
|
||||
for option in self._get_all_options():
|
||||
default = defaults.get(option.dest)
|
||||
if isinstance(default, string_types):
|
||||
opt_str = option.get_opt_string()
|
||||
defaults[option.dest] = option.check_value(opt_str, default)
|
||||
return optparse.Values(defaults)
|
||||
|
||||
def error(self, msg):
|
||||
self.print_usage(sys.stderr)
|
||||
self.exit(2, "%s\n" % msg)
|
||||
618
Shared/lib/python3.4/site-packages/pip/cmdoptions.py
Normal file
618
Shared/lib/python3.4/site-packages/pip/cmdoptions.py
Normal file
|
|
@ -0,0 +1,618 @@
|
|||
"""
|
||||
shared options and groups
|
||||
|
||||
The principle here is to define options once, but *not* instantiate them
|
||||
globally. One reason being that options with action='append' can carry state
|
||||
between parses. pip parses general options twice internally, and shouldn't
|
||||
pass on state. To be consistent, all options will follow this design.
|
||||
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
from functools import partial
|
||||
from optparse import OptionGroup, SUPPRESS_HELP, Option
|
||||
import warnings
|
||||
|
||||
from pip.index import (
|
||||
FormatControl, fmt_ctl_handle_mutual_exclude, fmt_ctl_no_binary,
|
||||
fmt_ctl_no_use_wheel)
|
||||
from pip.models import PyPI
|
||||
from pip.locations import USER_CACHE_DIR, src_prefix
|
||||
from pip.utils.hashes import STRONG_HASHES
|
||||
|
||||
|
||||
def make_option_group(group, parser):
|
||||
"""
|
||||
Return an OptionGroup object
|
||||
group -- assumed to be dict with 'name' and 'options' keys
|
||||
parser -- an optparse Parser
|
||||
"""
|
||||
option_group = OptionGroup(parser, group['name'])
|
||||
for option in group['options']:
|
||||
option_group.add_option(option())
|
||||
return option_group
|
||||
|
||||
|
||||
def resolve_wheel_no_use_binary(options):
|
||||
if not options.use_wheel:
|
||||
control = options.format_control
|
||||
fmt_ctl_no_use_wheel(control)
|
||||
|
||||
|
||||
def check_install_build_global(options, check_options=None):
|
||||
"""Disable wheels if per-setup.py call options are set.
|
||||
|
||||
:param options: The OptionParser options to update.
|
||||
:param check_options: The options to check, if not supplied defaults to
|
||||
options.
|
||||
"""
|
||||
if check_options is None:
|
||||
check_options = options
|
||||
|
||||
def getname(n):
|
||||
return getattr(check_options, n, None)
|
||||
names = ["build_options", "global_options", "install_options"]
|
||||
if any(map(getname, names)):
|
||||
control = options.format_control
|
||||
fmt_ctl_no_binary(control)
|
||||
warnings.warn(
|
||||
'Disabling all use of wheels due to the use of --build-options '
|
||||
'/ --global-options / --install-options.', stacklevel=2)
|
||||
|
||||
|
||||
###########
|
||||
# options #
|
||||
###########
|
||||
|
||||
help_ = partial(
|
||||
Option,
|
||||
'-h', '--help',
|
||||
dest='help',
|
||||
action='help',
|
||||
help='Show help.')
|
||||
|
||||
isolated_mode = partial(
|
||||
Option,
|
||||
"--isolated",
|
||||
dest="isolated_mode",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=(
|
||||
"Run pip in an isolated mode, ignoring environment variables and user "
|
||||
"configuration."
|
||||
),
|
||||
)
|
||||
|
||||
require_virtualenv = partial(
|
||||
Option,
|
||||
# Run only if inside a virtualenv, bail if not.
|
||||
'--require-virtualenv', '--require-venv',
|
||||
dest='require_venv',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help=SUPPRESS_HELP)
|
||||
|
||||
verbose = partial(
|
||||
Option,
|
||||
'-v', '--verbose',
|
||||
dest='verbose',
|
||||
action='count',
|
||||
default=0,
|
||||
help='Give more output. Option is additive, and can be used up to 3 times.'
|
||||
)
|
||||
|
||||
version = partial(
|
||||
Option,
|
||||
'-V', '--version',
|
||||
dest='version',
|
||||
action='store_true',
|
||||
help='Show version and exit.')
|
||||
|
||||
quiet = partial(
|
||||
Option,
|
||||
'-q', '--quiet',
|
||||
dest='quiet',
|
||||
action='count',
|
||||
default=0,
|
||||
help='Give less output.')
|
||||
|
||||
log = partial(
|
||||
Option,
|
||||
"--log", "--log-file", "--local-log",
|
||||
dest="log",
|
||||
metavar="path",
|
||||
help="Path to a verbose appending log."
|
||||
)
|
||||
|
||||
no_input = partial(
|
||||
Option,
|
||||
# Don't ask for input
|
||||
'--no-input',
|
||||
dest='no_input',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help=SUPPRESS_HELP)
|
||||
|
||||
proxy = partial(
|
||||
Option,
|
||||
'--proxy',
|
||||
dest='proxy',
|
||||
type='str',
|
||||
default='',
|
||||
help="Specify a proxy in the form [user:passwd@]proxy.server:port.")
|
||||
|
||||
retries = partial(
|
||||
Option,
|
||||
'--retries',
|
||||
dest='retries',
|
||||
type='int',
|
||||
default=5,
|
||||
help="Maximum number of retries each connection should attempt "
|
||||
"(default %default times).")
|
||||
|
||||
timeout = partial(
|
||||
Option,
|
||||
'--timeout', '--default-timeout',
|
||||
metavar='sec',
|
||||
dest='timeout',
|
||||
type='float',
|
||||
default=15,
|
||||
help='Set the socket timeout (default %default seconds).')
|
||||
|
||||
default_vcs = partial(
|
||||
Option,
|
||||
# The default version control system for editables, e.g. 'svn'
|
||||
'--default-vcs',
|
||||
dest='default_vcs',
|
||||
type='str',
|
||||
default='',
|
||||
help=SUPPRESS_HELP)
|
||||
|
||||
skip_requirements_regex = partial(
|
||||
Option,
|
||||
# A regex to be used to skip requirements
|
||||
'--skip-requirements-regex',
|
||||
dest='skip_requirements_regex',
|
||||
type='str',
|
||||
default='',
|
||||
help=SUPPRESS_HELP)
|
||||
|
||||
|
||||
def exists_action():
|
||||
return Option(
|
||||
# Option when path already exist
|
||||
'--exists-action',
|
||||
dest='exists_action',
|
||||
type='choice',
|
||||
choices=['s', 'i', 'w', 'b'],
|
||||
default=[],
|
||||
action='append',
|
||||
metavar='action',
|
||||
help="Default action when a path already exists: "
|
||||
"(s)witch, (i)gnore, (w)ipe, (b)ackup.")
|
||||
|
||||
|
||||
cert = partial(
|
||||
Option,
|
||||
'--cert',
|
||||
dest='cert',
|
||||
type='str',
|
||||
metavar='path',
|
||||
help="Path to alternate CA bundle.")
|
||||
|
||||
client_cert = partial(
|
||||
Option,
|
||||
'--client-cert',
|
||||
dest='client_cert',
|
||||
type='str',
|
||||
default=None,
|
||||
metavar='path',
|
||||
help="Path to SSL client certificate, a single file containing the "
|
||||
"private key and the certificate in PEM format.")
|
||||
|
||||
index_url = partial(
|
||||
Option,
|
||||
'-i', '--index-url', '--pypi-url',
|
||||
dest='index_url',
|
||||
metavar='URL',
|
||||
default=PyPI.simple_url,
|
||||
help='Base URL of Python Package Index (default %default).')
|
||||
|
||||
|
||||
def extra_index_url():
|
||||
return Option(
|
||||
'--extra-index-url',
|
||||
dest='extra_index_urls',
|
||||
metavar='URL',
|
||||
action='append',
|
||||
default=[],
|
||||
help='Extra URLs of package indexes to use in addition to --index-url.'
|
||||
)
|
||||
|
||||
|
||||
no_index = partial(
|
||||
Option,
|
||||
'--no-index',
|
||||
dest='no_index',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Ignore package index (only looking at --find-links URLs instead).')
|
||||
|
||||
|
||||
def find_links():
|
||||
return Option(
|
||||
'-f', '--find-links',
|
||||
dest='find_links',
|
||||
action='append',
|
||||
default=[],
|
||||
metavar='url',
|
||||
help="If a url or path to an html file, then parse for links to "
|
||||
"archives. If a local path or file:// url that's a directory,"
|
||||
"then look for archives in the directory listing.")
|
||||
|
||||
|
||||
def allow_external():
|
||||
return Option(
|
||||
"--allow-external",
|
||||
dest="allow_external",
|
||||
action="append",
|
||||
default=[],
|
||||
metavar="PACKAGE",
|
||||
help=SUPPRESS_HELP,
|
||||
)
|
||||
|
||||
|
||||
allow_all_external = partial(
|
||||
Option,
|
||||
"--allow-all-external",
|
||||
dest="allow_all_external",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=SUPPRESS_HELP,
|
||||
)
|
||||
|
||||
|
||||
def trusted_host():
|
||||
return Option(
|
||||
"--trusted-host",
|
||||
dest="trusted_hosts",
|
||||
action="append",
|
||||
metavar="HOSTNAME",
|
||||
default=[],
|
||||
help="Mark this host as trusted, even though it does not have valid "
|
||||
"or any HTTPS.",
|
||||
)
|
||||
|
||||
|
||||
# Remove after 7.0
|
||||
no_allow_external = partial(
|
||||
Option,
|
||||
"--no-allow-external",
|
||||
dest="allow_all_external",
|
||||
action="store_false",
|
||||
default=False,
|
||||
help=SUPPRESS_HELP,
|
||||
)
|
||||
|
||||
|
||||
# Remove --allow-insecure after 7.0
|
||||
def allow_unsafe():
|
||||
return Option(
|
||||
"--allow-unverified", "--allow-insecure",
|
||||
dest="allow_unverified",
|
||||
action="append",
|
||||
default=[],
|
||||
metavar="PACKAGE",
|
||||
help=SUPPRESS_HELP,
|
||||
)
|
||||
|
||||
# Remove after 7.0
|
||||
no_allow_unsafe = partial(
|
||||
Option,
|
||||
"--no-allow-insecure",
|
||||
dest="allow_all_insecure",
|
||||
action="store_false",
|
||||
default=False,
|
||||
help=SUPPRESS_HELP
|
||||
)
|
||||
|
||||
# Remove after 1.5
|
||||
process_dependency_links = partial(
|
||||
Option,
|
||||
"--process-dependency-links",
|
||||
dest="process_dependency_links",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Enable the processing of dependency links.",
|
||||
)
|
||||
|
||||
|
||||
def constraints():
|
||||
return Option(
|
||||
'-c', '--constraint',
|
||||
dest='constraints',
|
||||
action='append',
|
||||
default=[],
|
||||
metavar='file',
|
||||
help='Constrain versions using the given constraints file. '
|
||||
'This option can be used multiple times.')
|
||||
|
||||
|
||||
def requirements():
|
||||
return Option(
|
||||
'-r', '--requirement',
|
||||
dest='requirements',
|
||||
action='append',
|
||||
default=[],
|
||||
metavar='file',
|
||||
help='Install from the given requirements file. '
|
||||
'This option can be used multiple times.')
|
||||
|
||||
|
||||
def editable():
|
||||
return Option(
|
||||
'-e', '--editable',
|
||||
dest='editables',
|
||||
action='append',
|
||||
default=[],
|
||||
metavar='path/url',
|
||||
help=('Install a project in editable mode (i.e. setuptools '
|
||||
'"develop mode") from a local project path or a VCS url.'),
|
||||
)
|
||||
|
||||
src = partial(
|
||||
Option,
|
||||
'--src', '--source', '--source-dir', '--source-directory',
|
||||
dest='src_dir',
|
||||
metavar='dir',
|
||||
default=src_prefix,
|
||||
help='Directory to check out editable projects into. '
|
||||
'The default in a virtualenv is "<venv path>/src". '
|
||||
'The default for global installs is "<current dir>/src".'
|
||||
)
|
||||
|
||||
# XXX: deprecated, remove in 9.0
|
||||
use_wheel = partial(
|
||||
Option,
|
||||
'--use-wheel',
|
||||
dest='use_wheel',
|
||||
action='store_true',
|
||||
default=True,
|
||||
help=SUPPRESS_HELP,
|
||||
)
|
||||
|
||||
# XXX: deprecated, remove in 9.0
|
||||
no_use_wheel = partial(
|
||||
Option,
|
||||
'--no-use-wheel',
|
||||
dest='use_wheel',
|
||||
action='store_false',
|
||||
default=True,
|
||||
help=('Do not Find and prefer wheel archives when searching indexes and '
|
||||
'find-links locations. DEPRECATED in favour of --no-binary.'),
|
||||
)
|
||||
|
||||
|
||||
def _get_format_control(values, option):
|
||||
"""Get a format_control object."""
|
||||
return getattr(values, option.dest)
|
||||
|
||||
|
||||
def _handle_no_binary(option, opt_str, value, parser):
|
||||
existing = getattr(parser.values, option.dest)
|
||||
fmt_ctl_handle_mutual_exclude(
|
||||
value, existing.no_binary, existing.only_binary)
|
||||
|
||||
|
||||
def _handle_only_binary(option, opt_str, value, parser):
|
||||
existing = getattr(parser.values, option.dest)
|
||||
fmt_ctl_handle_mutual_exclude(
|
||||
value, existing.only_binary, existing.no_binary)
|
||||
|
||||
|
||||
def no_binary():
|
||||
return Option(
|
||||
"--no-binary", dest="format_control", action="callback",
|
||||
callback=_handle_no_binary, type="str",
|
||||
default=FormatControl(set(), set()),
|
||||
help="Do not use binary packages. Can be supplied multiple times, and "
|
||||
"each time adds to the existing value. Accepts either :all: to "
|
||||
"disable all binary packages, :none: to empty the set, or one or "
|
||||
"more package names with commas between them. Note that some "
|
||||
"packages are tricky to compile and may fail to install when "
|
||||
"this option is used on them.")
|
||||
|
||||
|
||||
def only_binary():
|
||||
return Option(
|
||||
"--only-binary", dest="format_control", action="callback",
|
||||
callback=_handle_only_binary, type="str",
|
||||
default=FormatControl(set(), set()),
|
||||
help="Do not use source packages. Can be supplied multiple times, and "
|
||||
"each time adds to the existing value. Accepts either :all: to "
|
||||
"disable all source packages, :none: to empty the set, or one or "
|
||||
"more package names with commas between them. Packages without "
|
||||
"binary distributions will fail to install when this option is "
|
||||
"used on them.")
|
||||
|
||||
|
||||
cache_dir = partial(
|
||||
Option,
|
||||
"--cache-dir",
|
||||
dest="cache_dir",
|
||||
default=USER_CACHE_DIR,
|
||||
metavar="dir",
|
||||
help="Store the cache data in <dir>."
|
||||
)
|
||||
|
||||
no_cache = partial(
|
||||
Option,
|
||||
"--no-cache-dir",
|
||||
dest="cache_dir",
|
||||
action="store_false",
|
||||
help="Disable the cache.",
|
||||
)
|
||||
|
||||
no_deps = partial(
|
||||
Option,
|
||||
'--no-deps', '--no-dependencies',
|
||||
dest='ignore_dependencies',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help="Don't install package dependencies.")
|
||||
|
||||
build_dir = partial(
|
||||
Option,
|
||||
'-b', '--build', '--build-dir', '--build-directory',
|
||||
dest='build_dir',
|
||||
metavar='dir',
|
||||
help='Directory to unpack packages into and build in.'
|
||||
)
|
||||
|
||||
install_options = partial(
|
||||
Option,
|
||||
'--install-option',
|
||||
dest='install_options',
|
||||
action='append',
|
||||
metavar='options',
|
||||
help="Extra arguments to be supplied to the setup.py install "
|
||||
"command (use like --install-option=\"--install-scripts=/usr/local/"
|
||||
"bin\"). Use multiple --install-option options to pass multiple "
|
||||
"options to setup.py install. If you are using an option with a "
|
||||
"directory path, be sure to use absolute path.")
|
||||
|
||||
global_options = partial(
|
||||
Option,
|
||||
'--global-option',
|
||||
dest='global_options',
|
||||
action='append',
|
||||
metavar='options',
|
||||
help="Extra global options to be supplied to the setup.py "
|
||||
"call before the install command.")
|
||||
|
||||
no_clean = partial(
|
||||
Option,
|
||||
'--no-clean',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help="Don't clean up build directories.")
|
||||
|
||||
pre = partial(
|
||||
Option,
|
||||
'--pre',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help="Include pre-release and development versions. By default, "
|
||||
"pip only finds stable versions.")
|
||||
|
||||
disable_pip_version_check = partial(
|
||||
Option,
|
||||
"--disable-pip-version-check",
|
||||
dest="disable_pip_version_check",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Don't periodically check PyPI to determine whether a new version "
|
||||
"of pip is available for download. Implied with --no-index.")
|
||||
|
||||
# Deprecated, Remove later
|
||||
always_unzip = partial(
|
||||
Option,
|
||||
'-Z', '--always-unzip',
|
||||
dest='always_unzip',
|
||||
action='store_true',
|
||||
help=SUPPRESS_HELP,
|
||||
)
|
||||
|
||||
|
||||
def _merge_hash(option, opt_str, value, parser):
|
||||
"""Given a value spelled "algo:digest", append the digest to a list
|
||||
pointed to in a dict by the algo name."""
|
||||
if not parser.values.hashes:
|
||||
parser.values.hashes = {}
|
||||
try:
|
||||
algo, digest = value.split(':', 1)
|
||||
except ValueError:
|
||||
parser.error('Arguments to %s must be a hash name '
|
||||
'followed by a value, like --hash=sha256:abcde...' %
|
||||
opt_str)
|
||||
if algo not in STRONG_HASHES:
|
||||
parser.error('Allowed hash algorithms for %s are %s.' %
|
||||
(opt_str, ', '.join(STRONG_HASHES)))
|
||||
parser.values.hashes.setdefault(algo, []).append(digest)
|
||||
|
||||
|
||||
hash = partial(
|
||||
Option,
|
||||
'--hash',
|
||||
# Hash values eventually end up in InstallRequirement.hashes due to
|
||||
# __dict__ copying in process_line().
|
||||
dest='hashes',
|
||||
action='callback',
|
||||
callback=_merge_hash,
|
||||
type='string',
|
||||
help="Verify that the package's archive matches this "
|
||||
'hash before installing. Example: --hash=sha256:abcdef...')
|
||||
|
||||
|
||||
require_hashes = partial(
|
||||
Option,
|
||||
'--require-hashes',
|
||||
dest='require_hashes',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Require a hash to check each requirement against, for '
|
||||
'repeatable installs. This option is implied when any package in a '
|
||||
'requirements file has a --hash option.')
|
||||
|
||||
|
||||
##########
|
||||
# groups #
|
||||
##########
|
||||
|
||||
general_group = {
|
||||
'name': 'General Options',
|
||||
'options': [
|
||||
help_,
|
||||
isolated_mode,
|
||||
require_virtualenv,
|
||||
verbose,
|
||||
version,
|
||||
quiet,
|
||||
log,
|
||||
no_input,
|
||||
proxy,
|
||||
retries,
|
||||
timeout,
|
||||
default_vcs,
|
||||
skip_requirements_regex,
|
||||
exists_action,
|
||||
trusted_host,
|
||||
cert,
|
||||
client_cert,
|
||||
cache_dir,
|
||||
no_cache,
|
||||
disable_pip_version_check,
|
||||
]
|
||||
}
|
||||
|
||||
non_deprecated_index_group = {
|
||||
'name': 'Package Index Options',
|
||||
'options': [
|
||||
index_url,
|
||||
extra_index_url,
|
||||
no_index,
|
||||
find_links,
|
||||
process_dependency_links,
|
||||
]
|
||||
}
|
||||
|
||||
index_group = {
|
||||
'name': 'Package Index Options (including deprecated options)',
|
||||
'options': non_deprecated_index_group['options'] + [
|
||||
allow_external,
|
||||
allow_all_external,
|
||||
no_allow_external,
|
||||
allow_unsafe,
|
||||
no_allow_unsafe,
|
||||
]
|
||||
}
|
||||
85
Shared/lib/python3.4/site-packages/pip/commands/__init__.py
Normal file
85
Shared/lib/python3.4/site-packages/pip/commands/__init__.py
Normal file
|
|
@ -0,0 +1,85 @@
|
|||
"""
|
||||
Package containing all pip commands
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
from pip.commands.completion import CompletionCommand
|
||||
from pip.commands.download import DownloadCommand
|
||||
from pip.commands.freeze import FreezeCommand
|
||||
from pip.commands.hash import HashCommand
|
||||
from pip.commands.help import HelpCommand
|
||||
from pip.commands.list import ListCommand
|
||||
from pip.commands.search import SearchCommand
|
||||
from pip.commands.show import ShowCommand
|
||||
from pip.commands.install import InstallCommand
|
||||
from pip.commands.uninstall import UninstallCommand
|
||||
from pip.commands.wheel import WheelCommand
|
||||
|
||||
|
||||
commands_dict = {
|
||||
CompletionCommand.name: CompletionCommand,
|
||||
FreezeCommand.name: FreezeCommand,
|
||||
HashCommand.name: HashCommand,
|
||||
HelpCommand.name: HelpCommand,
|
||||
SearchCommand.name: SearchCommand,
|
||||
ShowCommand.name: ShowCommand,
|
||||
InstallCommand.name: InstallCommand,
|
||||
UninstallCommand.name: UninstallCommand,
|
||||
DownloadCommand.name: DownloadCommand,
|
||||
ListCommand.name: ListCommand,
|
||||
WheelCommand.name: WheelCommand,
|
||||
}
|
||||
|
||||
|
||||
commands_order = [
|
||||
InstallCommand,
|
||||
DownloadCommand,
|
||||
UninstallCommand,
|
||||
FreezeCommand,
|
||||
ListCommand,
|
||||
ShowCommand,
|
||||
SearchCommand,
|
||||
WheelCommand,
|
||||
HashCommand,
|
||||
HelpCommand,
|
||||
]
|
||||
|
||||
|
||||
def get_summaries(ignore_hidden=True, ordered=True):
|
||||
"""Yields sorted (command name, command summary) tuples."""
|
||||
|
||||
if ordered:
|
||||
cmditems = _sort_commands(commands_dict, commands_order)
|
||||
else:
|
||||
cmditems = commands_dict.items()
|
||||
|
||||
for name, command_class in cmditems:
|
||||
if ignore_hidden and command_class.hidden:
|
||||
continue
|
||||
|
||||
yield (name, command_class.summary)
|
||||
|
||||
|
||||
def get_similar_commands(name):
|
||||
"""Command name auto-correct."""
|
||||
from difflib import get_close_matches
|
||||
|
||||
name = name.lower()
|
||||
|
||||
close_commands = get_close_matches(name, commands_dict.keys())
|
||||
|
||||
if close_commands:
|
||||
return close_commands[0]
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def _sort_commands(cmddict, order):
|
||||
def keyfn(key):
|
||||
try:
|
||||
return order.index(key[1])
|
||||
except ValueError:
|
||||
# unordered items should come last
|
||||
return 0xff
|
||||
|
||||
return sorted(cmddict.items(), key=keyfn)
|
||||
|
|
@ -0,0 +1,68 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import sys
|
||||
from pip.basecommand import Command
|
||||
|
||||
BASE_COMPLETION = """
|
||||
# pip %(shell)s completion start%(script)s# pip %(shell)s completion end
|
||||
"""
|
||||
|
||||
COMPLETION_SCRIPTS = {
|
||||
'bash': """
|
||||
_pip_completion()
|
||||
{
|
||||
COMPREPLY=( $( COMP_WORDS="${COMP_WORDS[*]}" \\
|
||||
COMP_CWORD=$COMP_CWORD \\
|
||||
PIP_AUTO_COMPLETE=1 $1 ) )
|
||||
}
|
||||
complete -o default -F _pip_completion pip
|
||||
""", 'zsh': """
|
||||
function _pip_completion {
|
||||
local words cword
|
||||
read -Ac words
|
||||
read -cn cword
|
||||
reply=( $( COMP_WORDS="$words[*]" \\
|
||||
COMP_CWORD=$(( cword-1 )) \\
|
||||
PIP_AUTO_COMPLETE=1 $words[1] ) )
|
||||
}
|
||||
compctl -K _pip_completion pip
|
||||
"""}
|
||||
|
||||
|
||||
class CompletionCommand(Command):
|
||||
"""A helper command to be used for command completion."""
|
||||
name = 'completion'
|
||||
summary = 'A helper command to be used for command completion'
|
||||
hidden = True
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
super(CompletionCommand, self).__init__(*args, **kw)
|
||||
|
||||
cmd_opts = self.cmd_opts
|
||||
|
||||
cmd_opts.add_option(
|
||||
'--bash', '-b',
|
||||
action='store_const',
|
||||
const='bash',
|
||||
dest='shell',
|
||||
help='Emit completion code for bash')
|
||||
cmd_opts.add_option(
|
||||
'--zsh', '-z',
|
||||
action='store_const',
|
||||
const='zsh',
|
||||
dest='shell',
|
||||
help='Emit completion code for zsh')
|
||||
|
||||
self.parser.insert_option_group(0, cmd_opts)
|
||||
|
||||
def run(self, options, args):
|
||||
"""Prints the completion code of the given shell"""
|
||||
shells = COMPLETION_SCRIPTS.keys()
|
||||
shell_options = ['--' + shell for shell in sorted(shells)]
|
||||
if options.shell in shells:
|
||||
script = COMPLETION_SCRIPTS.get(options.shell, '')
|
||||
print(BASE_COMPLETION % {'script': script, 'shell': options.shell})
|
||||
else:
|
||||
sys.stderr.write(
|
||||
'ERROR: You must pass %s\n' % ' or '.join(shell_options)
|
||||
)
|
||||
136
Shared/lib/python3.4/site-packages/pip/commands/download.py
Normal file
136
Shared/lib/python3.4/site-packages/pip/commands/download.py
Normal file
|
|
@ -0,0 +1,136 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
from pip.req import RequirementSet
|
||||
from pip.basecommand import RequirementCommand
|
||||
from pip import cmdoptions
|
||||
from pip.utils import ensure_dir, normalize_path
|
||||
from pip.utils.build import BuildDirectory
|
||||
from pip.utils.filesystem import check_path_owner
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DownloadCommand(RequirementCommand):
|
||||
"""
|
||||
Download packages from:
|
||||
|
||||
- PyPI (and other indexes) using requirement specifiers.
|
||||
- VCS project urls.
|
||||
- Local project directories.
|
||||
- Local or remote source archives.
|
||||
|
||||
pip also supports downloading from "requirements files", which provide
|
||||
an easy way to specify a whole environment to be downloaded.
|
||||
"""
|
||||
name = 'download'
|
||||
|
||||
usage = """
|
||||
%prog [options] <requirement specifier> [package-index-options] ...
|
||||
%prog [options] -r <requirements file> [package-index-options] ...
|
||||
%prog [options] [-e] <vcs project url> ...
|
||||
%prog [options] [-e] <local project path> ...
|
||||
%prog [options] <archive url/path> ..."""
|
||||
|
||||
summary = 'Download packages.'
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
super(DownloadCommand, self).__init__(*args, **kw)
|
||||
|
||||
cmd_opts = self.cmd_opts
|
||||
|
||||
cmd_opts.add_option(cmdoptions.constraints())
|
||||
cmd_opts.add_option(cmdoptions.editable())
|
||||
cmd_opts.add_option(cmdoptions.requirements())
|
||||
cmd_opts.add_option(cmdoptions.build_dir())
|
||||
cmd_opts.add_option(cmdoptions.no_deps())
|
||||
cmd_opts.add_option(cmdoptions.global_options())
|
||||
cmd_opts.add_option(cmdoptions.no_binary())
|
||||
cmd_opts.add_option(cmdoptions.only_binary())
|
||||
cmd_opts.add_option(cmdoptions.src())
|
||||
cmd_opts.add_option(cmdoptions.pre())
|
||||
cmd_opts.add_option(cmdoptions.no_clean())
|
||||
cmd_opts.add_option(cmdoptions.require_hashes())
|
||||
|
||||
cmd_opts.add_option(
|
||||
'-d', '--dest', '--destination-dir', '--destination-directory',
|
||||
dest='download_dir',
|
||||
metavar='dir',
|
||||
default=os.curdir,
|
||||
help=("Download packages into <dir>."),
|
||||
)
|
||||
|
||||
index_opts = cmdoptions.make_option_group(
|
||||
cmdoptions.non_deprecated_index_group,
|
||||
self.parser,
|
||||
)
|
||||
|
||||
self.parser.insert_option_group(0, index_opts)
|
||||
self.parser.insert_option_group(0, cmd_opts)
|
||||
|
||||
def run(self, options, args):
|
||||
options.ignore_installed = True
|
||||
options.src_dir = os.path.abspath(options.src_dir)
|
||||
options.download_dir = normalize_path(options.download_dir)
|
||||
|
||||
ensure_dir(options.download_dir)
|
||||
|
||||
with self._build_session(options) as session:
|
||||
|
||||
finder = self._build_package_finder(options, session)
|
||||
build_delete = (not (options.no_clean or options.build_dir))
|
||||
if options.cache_dir and not check_path_owner(options.cache_dir):
|
||||
logger.warning(
|
||||
"The directory '%s' or its parent directory is not owned "
|
||||
"by the current user and caching wheels has been "
|
||||
"disabled. check the permissions and owner of that "
|
||||
"directory. If executing pip with sudo, you may want "
|
||||
"sudo's -H flag.",
|
||||
options.cache_dir,
|
||||
)
|
||||
options.cache_dir = None
|
||||
|
||||
with BuildDirectory(options.build_dir,
|
||||
delete=build_delete) as build_dir:
|
||||
|
||||
requirement_set = RequirementSet(
|
||||
build_dir=build_dir,
|
||||
src_dir=options.src_dir,
|
||||
download_dir=options.download_dir,
|
||||
ignore_installed=True,
|
||||
ignore_dependencies=options.ignore_dependencies,
|
||||
session=session,
|
||||
isolated=options.isolated_mode,
|
||||
require_hashes=options.require_hashes
|
||||
)
|
||||
self.populate_requirement_set(
|
||||
requirement_set,
|
||||
args,
|
||||
options,
|
||||
finder,
|
||||
session,
|
||||
self.name,
|
||||
None
|
||||
)
|
||||
|
||||
if not requirement_set.has_requirements:
|
||||
return
|
||||
|
||||
requirement_set.prepare_files(finder)
|
||||
|
||||
downloaded = ' '.join([
|
||||
req.name for req in requirement_set.successfully_downloaded
|
||||
])
|
||||
if downloaded:
|
||||
logger.info(
|
||||
'Successfully downloaded %s', downloaded
|
||||
)
|
||||
|
||||
# Clean up
|
||||
if not options.no_clean:
|
||||
requirement_set.cleanup_files()
|
||||
|
||||
return requirement_set
|
||||
71
Shared/lib/python3.4/site-packages/pip/commands/freeze.py
Normal file
71
Shared/lib/python3.4/site-packages/pip/commands/freeze.py
Normal file
|
|
@ -0,0 +1,71 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import sys
|
||||
|
||||
import pip
|
||||
from pip.basecommand import Command
|
||||
from pip.operations.freeze import freeze
|
||||
from pip.wheel import WheelCache
|
||||
|
||||
|
||||
class FreezeCommand(Command):
|
||||
"""
|
||||
Output installed packages in requirements format.
|
||||
|
||||
packages are listed in a case-insensitive sorted order.
|
||||
"""
|
||||
name = 'freeze'
|
||||
usage = """
|
||||
%prog [options]"""
|
||||
summary = 'Output installed packages in requirements format.'
|
||||
log_streams = ("ext://sys.stderr", "ext://sys.stderr")
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
super(FreezeCommand, self).__init__(*args, **kw)
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
'-r', '--requirement',
|
||||
dest='requirement',
|
||||
action='store',
|
||||
default=None,
|
||||
metavar='file',
|
||||
help="Use the order in the given requirements file and its "
|
||||
"comments when generating output.")
|
||||
self.cmd_opts.add_option(
|
||||
'-f', '--find-links',
|
||||
dest='find_links',
|
||||
action='append',
|
||||
default=[],
|
||||
metavar='URL',
|
||||
help='URL for finding packages, which will be added to the '
|
||||
'output.')
|
||||
self.cmd_opts.add_option(
|
||||
'-l', '--local',
|
||||
dest='local',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='If in a virtualenv that has global access, do not output '
|
||||
'globally-installed packages.')
|
||||
self.cmd_opts.add_option(
|
||||
'--user',
|
||||
dest='user',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Only output packages installed in user-site.')
|
||||
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
def run(self, options, args):
|
||||
format_control = pip.index.FormatControl(set(), set())
|
||||
wheel_cache = WheelCache(options.cache_dir, format_control)
|
||||
freeze_kwargs = dict(
|
||||
requirement=options.requirement,
|
||||
find_links=options.find_links,
|
||||
local_only=options.local,
|
||||
user_only=options.user,
|
||||
skip_regex=options.skip_requirements_regex,
|
||||
isolated=options.isolated_mode,
|
||||
wheel_cache=wheel_cache)
|
||||
|
||||
for line in freeze(**freeze_kwargs):
|
||||
sys.stdout.write(line + '\n')
|
||||
57
Shared/lib/python3.4/site-packages/pip/commands/hash.py
Normal file
57
Shared/lib/python3.4/site-packages/pip/commands/hash.py
Normal file
|
|
@ -0,0 +1,57 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import hashlib
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from pip.basecommand import Command
|
||||
from pip.status_codes import ERROR
|
||||
from pip.utils import read_chunks
|
||||
from pip.utils.hashes import FAVORITE_HASH, STRONG_HASHES
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HashCommand(Command):
|
||||
"""
|
||||
Compute a hash of a local package archive.
|
||||
|
||||
These can be used with --hash in a requirements file to do repeatable
|
||||
installs.
|
||||
|
||||
"""
|
||||
name = 'hash'
|
||||
usage = '%prog [options] <file> ...'
|
||||
summary = 'Compute hashes of package archives.'
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
super(HashCommand, self).__init__(*args, **kw)
|
||||
self.cmd_opts.add_option(
|
||||
'-a', '--algorithm',
|
||||
dest='algorithm',
|
||||
choices=STRONG_HASHES,
|
||||
action='store',
|
||||
default=FAVORITE_HASH,
|
||||
help='The hash algorithm to use: one of %s' %
|
||||
', '.join(STRONG_HASHES))
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
def run(self, options, args):
|
||||
if not args:
|
||||
self.parser.print_usage(sys.stderr)
|
||||
return ERROR
|
||||
|
||||
algorithm = options.algorithm
|
||||
for path in args:
|
||||
logger.info('%s:\n--hash=%s:%s',
|
||||
path, algorithm, _hash_of_file(path, algorithm))
|
||||
|
||||
|
||||
def _hash_of_file(path, algorithm):
|
||||
"""Return the hash digest of a file."""
|
||||
with open(path, 'rb') as archive:
|
||||
hash = hashlib.new(algorithm)
|
||||
for chunk in read_chunks(archive):
|
||||
hash.update(chunk)
|
||||
return hash.hexdigest()
|
||||
35
Shared/lib/python3.4/site-packages/pip/commands/help.py
Normal file
35
Shared/lib/python3.4/site-packages/pip/commands/help.py
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
from pip.basecommand import Command, SUCCESS
|
||||
from pip.exceptions import CommandError
|
||||
|
||||
|
||||
class HelpCommand(Command):
|
||||
"""Show help for commands"""
|
||||
name = 'help'
|
||||
usage = """
|
||||
%prog <command>"""
|
||||
summary = 'Show help for commands.'
|
||||
|
||||
def run(self, options, args):
|
||||
from pip.commands import commands_dict, get_similar_commands
|
||||
|
||||
try:
|
||||
# 'pip help' with no args is handled by pip.__init__.parseopt()
|
||||
cmd_name = args[0] # the command we need help for
|
||||
except IndexError:
|
||||
return SUCCESS
|
||||
|
||||
if cmd_name not in commands_dict:
|
||||
guess = get_similar_commands(cmd_name)
|
||||
|
||||
msg = ['unknown command "%s"' % cmd_name]
|
||||
if guess:
|
||||
msg.append('maybe you meant "%s"' % guess)
|
||||
|
||||
raise CommandError(' - '.join(msg))
|
||||
|
||||
command = commands_dict[cmd_name]()
|
||||
command.parser.print_help()
|
||||
|
||||
return SUCCESS
|
||||
386
Shared/lib/python3.4/site-packages/pip/commands/install.py
Normal file
386
Shared/lib/python3.4/site-packages/pip/commands/install.py
Normal file
|
|
@ -0,0 +1,386 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import logging
|
||||
import operator
|
||||
import os
|
||||
import tempfile
|
||||
import shutil
|
||||
import warnings
|
||||
try:
|
||||
import wheel
|
||||
except ImportError:
|
||||
wheel = None
|
||||
|
||||
from pip.req import RequirementSet
|
||||
from pip.basecommand import RequirementCommand
|
||||
from pip.locations import virtualenv_no_global, distutils_scheme
|
||||
from pip.exceptions import (
|
||||
InstallationError, CommandError, PreviousBuildDirError,
|
||||
)
|
||||
from pip import cmdoptions
|
||||
from pip.utils import ensure_dir
|
||||
from pip.utils.build import BuildDirectory
|
||||
from pip.utils.deprecation import RemovedInPip10Warning
|
||||
from pip.utils.filesystem import check_path_owner
|
||||
from pip.wheel import WheelCache, WheelBuilder
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class InstallCommand(RequirementCommand):
|
||||
"""
|
||||
Install packages from:
|
||||
|
||||
- PyPI (and other indexes) using requirement specifiers.
|
||||
- VCS project urls.
|
||||
- Local project directories.
|
||||
- Local or remote source archives.
|
||||
|
||||
pip also supports installing from "requirements files", which provide
|
||||
an easy way to specify a whole environment to be installed.
|
||||
"""
|
||||
name = 'install'
|
||||
|
||||
usage = """
|
||||
%prog [options] <requirement specifier> [package-index-options] ...
|
||||
%prog [options] -r <requirements file> [package-index-options] ...
|
||||
%prog [options] [-e] <vcs project url> ...
|
||||
%prog [options] [-e] <local project path> ...
|
||||
%prog [options] <archive url/path> ..."""
|
||||
|
||||
summary = 'Install packages.'
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
super(InstallCommand, self).__init__(*args, **kw)
|
||||
|
||||
cmd_opts = self.cmd_opts
|
||||
|
||||
cmd_opts.add_option(cmdoptions.constraints())
|
||||
cmd_opts.add_option(cmdoptions.editable())
|
||||
cmd_opts.add_option(cmdoptions.requirements())
|
||||
cmd_opts.add_option(cmdoptions.build_dir())
|
||||
|
||||
cmd_opts.add_option(
|
||||
'-t', '--target',
|
||||
dest='target_dir',
|
||||
metavar='dir',
|
||||
default=None,
|
||||
help='Install packages into <dir>. '
|
||||
'By default this will not replace existing files/folders in '
|
||||
'<dir>. Use --upgrade to replace existing packages in <dir> '
|
||||
'with new versions.'
|
||||
)
|
||||
|
||||
cmd_opts.add_option(
|
||||
'-d', '--download', '--download-dir', '--download-directory',
|
||||
dest='download_dir',
|
||||
metavar='dir',
|
||||
default=None,
|
||||
help=("Download packages into <dir> instead of installing them, "
|
||||
"regardless of what's already installed."),
|
||||
)
|
||||
|
||||
cmd_opts.add_option(cmdoptions.src())
|
||||
|
||||
cmd_opts.add_option(
|
||||
'-U', '--upgrade',
|
||||
dest='upgrade',
|
||||
action='store_true',
|
||||
help='Upgrade all specified packages to the newest available '
|
||||
'version. This process is recursive regardless of whether '
|
||||
'a dependency is already satisfied.'
|
||||
)
|
||||
|
||||
cmd_opts.add_option(
|
||||
'--force-reinstall',
|
||||
dest='force_reinstall',
|
||||
action='store_true',
|
||||
help='When upgrading, reinstall all packages even if they are '
|
||||
'already up-to-date.')
|
||||
|
||||
cmd_opts.add_option(
|
||||
'-I', '--ignore-installed',
|
||||
dest='ignore_installed',
|
||||
action='store_true',
|
||||
help='Ignore the installed packages (reinstalling instead).')
|
||||
|
||||
cmd_opts.add_option(cmdoptions.no_deps())
|
||||
|
||||
cmd_opts.add_option(cmdoptions.install_options())
|
||||
cmd_opts.add_option(cmdoptions.global_options())
|
||||
|
||||
cmd_opts.add_option(
|
||||
'--user',
|
||||
dest='use_user_site',
|
||||
action='store_true',
|
||||
help="Install to the Python user install directory for your "
|
||||
"platform. Typically ~/.local/, or %APPDATA%\Python on "
|
||||
"Windows. (See the Python documentation for site.USER_BASE "
|
||||
"for full details.)")
|
||||
|
||||
cmd_opts.add_option(
|
||||
'--egg',
|
||||
dest='as_egg',
|
||||
action='store_true',
|
||||
help="Install packages as eggs, not 'flat', like pip normally "
|
||||
"does. This option is not about installing *from* eggs. "
|
||||
"(WARNING: Because this option overrides pip's normal install"
|
||||
" logic, requirements files may not behave as expected.)")
|
||||
|
||||
cmd_opts.add_option(
|
||||
'--root',
|
||||
dest='root_path',
|
||||
metavar='dir',
|
||||
default=None,
|
||||
help="Install everything relative to this alternate root "
|
||||
"directory.")
|
||||
|
||||
cmd_opts.add_option(
|
||||
'--prefix',
|
||||
dest='prefix_path',
|
||||
metavar='dir',
|
||||
default=None,
|
||||
help="Installation prefix where lib, bin and other top-level "
|
||||
"folders are placed")
|
||||
|
||||
cmd_opts.add_option(
|
||||
"--compile",
|
||||
action="store_true",
|
||||
dest="compile",
|
||||
default=True,
|
||||
help="Compile py files to pyc",
|
||||
)
|
||||
|
||||
cmd_opts.add_option(
|
||||
"--no-compile",
|
||||
action="store_false",
|
||||
dest="compile",
|
||||
help="Do not compile py files to pyc",
|
||||
)
|
||||
|
||||
cmd_opts.add_option(cmdoptions.use_wheel())
|
||||
cmd_opts.add_option(cmdoptions.no_use_wheel())
|
||||
cmd_opts.add_option(cmdoptions.no_binary())
|
||||
cmd_opts.add_option(cmdoptions.only_binary())
|
||||
cmd_opts.add_option(cmdoptions.pre())
|
||||
cmd_opts.add_option(cmdoptions.no_clean())
|
||||
cmd_opts.add_option(cmdoptions.require_hashes())
|
||||
|
||||
index_opts = cmdoptions.make_option_group(
|
||||
cmdoptions.index_group,
|
||||
self.parser,
|
||||
)
|
||||
|
||||
self.parser.insert_option_group(0, index_opts)
|
||||
self.parser.insert_option_group(0, cmd_opts)
|
||||
|
||||
def run(self, options, args):
|
||||
cmdoptions.resolve_wheel_no_use_binary(options)
|
||||
cmdoptions.check_install_build_global(options)
|
||||
|
||||
if options.allow_external:
|
||||
warnings.warn(
|
||||
"--allow-external has been deprecated and will be removed in "
|
||||
"the future. Due to changes in the repository protocol, it no "
|
||||
"longer has any effect.",
|
||||
RemovedInPip10Warning,
|
||||
)
|
||||
|
||||
if options.allow_all_external:
|
||||
warnings.warn(
|
||||
"--allow-all-external has been deprecated and will be removed "
|
||||
"in the future. Due to changes in the repository protocol, it "
|
||||
"no longer has any effect.",
|
||||
RemovedInPip10Warning,
|
||||
)
|
||||
|
||||
if options.allow_unverified:
|
||||
warnings.warn(
|
||||
"--allow-unverified has been deprecated and will be removed "
|
||||
"in the future. Due to changes in the repository protocol, it "
|
||||
"no longer has any effect.",
|
||||
RemovedInPip10Warning,
|
||||
)
|
||||
|
||||
if options.download_dir:
|
||||
warnings.warn(
|
||||
"pip install --download has been deprecated and will be "
|
||||
"removed in the future. Pip now has a download command that "
|
||||
"should be used instead.",
|
||||
RemovedInPip10Warning,
|
||||
)
|
||||
options.ignore_installed = True
|
||||
|
||||
if options.build_dir:
|
||||
options.build_dir = os.path.abspath(options.build_dir)
|
||||
|
||||
options.src_dir = os.path.abspath(options.src_dir)
|
||||
install_options = options.install_options or []
|
||||
if options.use_user_site:
|
||||
if options.prefix_path:
|
||||
raise CommandError(
|
||||
"Can not combine '--user' and '--prefix' as they imply "
|
||||
"different installation locations"
|
||||
)
|
||||
if virtualenv_no_global():
|
||||
raise InstallationError(
|
||||
"Can not perform a '--user' install. User site-packages "
|
||||
"are not visible in this virtualenv."
|
||||
)
|
||||
install_options.append('--user')
|
||||
install_options.append('--prefix=')
|
||||
|
||||
temp_target_dir = None
|
||||
if options.target_dir:
|
||||
options.ignore_installed = True
|
||||
temp_target_dir = tempfile.mkdtemp()
|
||||
options.target_dir = os.path.abspath(options.target_dir)
|
||||
if (os.path.exists(options.target_dir) and not
|
||||
os.path.isdir(options.target_dir)):
|
||||
raise CommandError(
|
||||
"Target path exists but is not a directory, will not "
|
||||
"continue."
|
||||
)
|
||||
install_options.append('--home=' + temp_target_dir)
|
||||
|
||||
global_options = options.global_options or []
|
||||
|
||||
with self._build_session(options) as session:
|
||||
|
||||
finder = self._build_package_finder(options, session)
|
||||
build_delete = (not (options.no_clean or options.build_dir))
|
||||
wheel_cache = WheelCache(options.cache_dir, options.format_control)
|
||||
if options.cache_dir and not check_path_owner(options.cache_dir):
|
||||
logger.warning(
|
||||
"The directory '%s' or its parent directory is not owned "
|
||||
"by the current user and caching wheels has been "
|
||||
"disabled. check the permissions and owner of that "
|
||||
"directory. If executing pip with sudo, you may want "
|
||||
"sudo's -H flag.",
|
||||
options.cache_dir,
|
||||
)
|
||||
options.cache_dir = None
|
||||
|
||||
with BuildDirectory(options.build_dir,
|
||||
delete=build_delete) as build_dir:
|
||||
requirement_set = RequirementSet(
|
||||
build_dir=build_dir,
|
||||
src_dir=options.src_dir,
|
||||
download_dir=options.download_dir,
|
||||
upgrade=options.upgrade,
|
||||
as_egg=options.as_egg,
|
||||
ignore_installed=options.ignore_installed,
|
||||
ignore_dependencies=options.ignore_dependencies,
|
||||
force_reinstall=options.force_reinstall,
|
||||
use_user_site=options.use_user_site,
|
||||
target_dir=temp_target_dir,
|
||||
session=session,
|
||||
pycompile=options.compile,
|
||||
isolated=options.isolated_mode,
|
||||
wheel_cache=wheel_cache,
|
||||
require_hashes=options.require_hashes,
|
||||
)
|
||||
|
||||
self.populate_requirement_set(
|
||||
requirement_set, args, options, finder, session, self.name,
|
||||
wheel_cache
|
||||
)
|
||||
|
||||
if not requirement_set.has_requirements:
|
||||
return
|
||||
|
||||
try:
|
||||
if (options.download_dir or not wheel or not
|
||||
options.cache_dir):
|
||||
# on -d don't do complex things like building
|
||||
# wheels, and don't try to build wheels when wheel is
|
||||
# not installed.
|
||||
requirement_set.prepare_files(finder)
|
||||
else:
|
||||
# build wheels before install.
|
||||
wb = WheelBuilder(
|
||||
requirement_set,
|
||||
finder,
|
||||
build_options=[],
|
||||
global_options=[],
|
||||
)
|
||||
# Ignore the result: a failed wheel will be
|
||||
# installed from the sdist/vcs whatever.
|
||||
wb.build(autobuilding=True)
|
||||
|
||||
if not options.download_dir:
|
||||
requirement_set.install(
|
||||
install_options,
|
||||
global_options,
|
||||
root=options.root_path,
|
||||
prefix=options.prefix_path,
|
||||
)
|
||||
reqs = sorted(
|
||||
requirement_set.successfully_installed,
|
||||
key=operator.attrgetter('name'))
|
||||
items = []
|
||||
for req in reqs:
|
||||
item = req.name
|
||||
try:
|
||||
if hasattr(req, 'installed_version'):
|
||||
if req.installed_version:
|
||||
item += '-' + req.installed_version
|
||||
except Exception:
|
||||
pass
|
||||
items.append(item)
|
||||
installed = ' '.join(items)
|
||||
if installed:
|
||||
logger.info('Successfully installed %s', installed)
|
||||
else:
|
||||
downloaded = ' '.join([
|
||||
req.name
|
||||
for req in requirement_set.successfully_downloaded
|
||||
])
|
||||
if downloaded:
|
||||
logger.info(
|
||||
'Successfully downloaded %s', downloaded
|
||||
)
|
||||
except PreviousBuildDirError:
|
||||
options.no_clean = True
|
||||
raise
|
||||
finally:
|
||||
# Clean up
|
||||
if not options.no_clean:
|
||||
requirement_set.cleanup_files()
|
||||
|
||||
if options.target_dir:
|
||||
ensure_dir(options.target_dir)
|
||||
|
||||
lib_dir = distutils_scheme('', home=temp_target_dir)['purelib']
|
||||
|
||||
for item in os.listdir(lib_dir):
|
||||
target_item_dir = os.path.join(options.target_dir, item)
|
||||
if os.path.exists(target_item_dir):
|
||||
if not options.upgrade:
|
||||
logger.warning(
|
||||
'Target directory %s already exists. Specify '
|
||||
'--upgrade to force replacement.',
|
||||
target_item_dir
|
||||
)
|
||||
continue
|
||||
if os.path.islink(target_item_dir):
|
||||
logger.warning(
|
||||
'Target directory %s already exists and is '
|
||||
'a link. Pip will not automatically replace '
|
||||
'links, please remove if replacement is '
|
||||
'desired.',
|
||||
target_item_dir
|
||||
)
|
||||
continue
|
||||
if os.path.isdir(target_item_dir):
|
||||
shutil.rmtree(target_item_dir)
|
||||
else:
|
||||
os.remove(target_item_dir)
|
||||
|
||||
shutil.move(
|
||||
os.path.join(lib_dir, item),
|
||||
target_item_dir
|
||||
)
|
||||
shutil.rmtree(temp_target_dir)
|
||||
return requirement_set
|
||||
209
Shared/lib/python3.4/site-packages/pip/commands/list.py
Normal file
209
Shared/lib/python3.4/site-packages/pip/commands/list.py
Normal file
|
|
@ -0,0 +1,209 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import logging
|
||||
import warnings
|
||||
|
||||
from pip.basecommand import Command
|
||||
from pip.exceptions import CommandError
|
||||
from pip.index import PackageFinder
|
||||
from pip.utils import (
|
||||
get_installed_distributions, dist_is_editable)
|
||||
from pip.utils.deprecation import RemovedInPip10Warning
|
||||
from pip.cmdoptions import make_option_group, index_group
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ListCommand(Command):
|
||||
"""
|
||||
List installed packages, including editables.
|
||||
|
||||
Packages are listed in a case-insensitive sorted order.
|
||||
"""
|
||||
name = 'list'
|
||||
usage = """
|
||||
%prog [options]"""
|
||||
summary = 'List installed packages.'
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
super(ListCommand, self).__init__(*args, **kw)
|
||||
|
||||
cmd_opts = self.cmd_opts
|
||||
|
||||
cmd_opts.add_option(
|
||||
'-o', '--outdated',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='List outdated packages')
|
||||
cmd_opts.add_option(
|
||||
'-u', '--uptodate',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='List uptodate packages')
|
||||
cmd_opts.add_option(
|
||||
'-e', '--editable',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='List editable projects.')
|
||||
cmd_opts.add_option(
|
||||
'-l', '--local',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help=('If in a virtualenv that has global access, do not list '
|
||||
'globally-installed packages.'),
|
||||
)
|
||||
self.cmd_opts.add_option(
|
||||
'--user',
|
||||
dest='user',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Only output packages installed in user-site.')
|
||||
|
||||
cmd_opts.add_option(
|
||||
'--pre',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help=("Include pre-release and development versions. By default, "
|
||||
"pip only finds stable versions."),
|
||||
)
|
||||
|
||||
index_opts = make_option_group(index_group, self.parser)
|
||||
|
||||
self.parser.insert_option_group(0, index_opts)
|
||||
self.parser.insert_option_group(0, cmd_opts)
|
||||
|
||||
def _build_package_finder(self, options, index_urls, session):
|
||||
"""
|
||||
Create a package finder appropriate to this list command.
|
||||
"""
|
||||
return PackageFinder(
|
||||
find_links=options.find_links,
|
||||
index_urls=index_urls,
|
||||
allow_all_prereleases=options.pre,
|
||||
trusted_hosts=options.trusted_hosts,
|
||||
process_dependency_links=options.process_dependency_links,
|
||||
session=session,
|
||||
)
|
||||
|
||||
def run(self, options, args):
|
||||
if options.allow_external:
|
||||
warnings.warn(
|
||||
"--allow-external has been deprecated and will be removed in "
|
||||
"the future. Due to changes in the repository protocol, it no "
|
||||
"longer has any effect.",
|
||||
RemovedInPip10Warning,
|
||||
)
|
||||
|
||||
if options.allow_all_external:
|
||||
warnings.warn(
|
||||
"--allow-all-external has been deprecated and will be removed "
|
||||
"in the future. Due to changes in the repository protocol, it "
|
||||
"no longer has any effect.",
|
||||
RemovedInPip10Warning,
|
||||
)
|
||||
|
||||
if options.allow_unverified:
|
||||
warnings.warn(
|
||||
"--allow-unverified has been deprecated and will be removed "
|
||||
"in the future. Due to changes in the repository protocol, it "
|
||||
"no longer has any effect.",
|
||||
RemovedInPip10Warning,
|
||||
)
|
||||
if options.outdated and options.uptodate:
|
||||
raise CommandError(
|
||||
"Options --outdated and --uptodate cannot be combined.")
|
||||
|
||||
if options.outdated:
|
||||
self.run_outdated(options)
|
||||
elif options.uptodate:
|
||||
self.run_uptodate(options)
|
||||
else:
|
||||
self.run_listing(options)
|
||||
|
||||
def run_outdated(self, options):
|
||||
for dist, latest_version, typ in sorted(
|
||||
self.find_packages_latest_versions(options),
|
||||
key=lambda p: p[0].project_name.lower()):
|
||||
if latest_version > dist.parsed_version:
|
||||
logger.info(
|
||||
'%s - Latest: %s [%s]',
|
||||
self.output_package(dist), latest_version, typ,
|
||||
)
|
||||
|
||||
def find_packages_latest_versions(self, options):
|
||||
index_urls = [options.index_url] + options.extra_index_urls
|
||||
if options.no_index:
|
||||
logger.info('Ignoring indexes: %s', ','.join(index_urls))
|
||||
index_urls = []
|
||||
|
||||
dependency_links = []
|
||||
for dist in get_installed_distributions(
|
||||
local_only=options.local,
|
||||
user_only=options.user,
|
||||
editables_only=options.editable):
|
||||
if dist.has_metadata('dependency_links.txt'):
|
||||
dependency_links.extend(
|
||||
dist.get_metadata_lines('dependency_links.txt'),
|
||||
)
|
||||
|
||||
with self._build_session(options) as session:
|
||||
finder = self._build_package_finder(options, index_urls, session)
|
||||
finder.add_dependency_links(dependency_links)
|
||||
|
||||
installed_packages = get_installed_distributions(
|
||||
local_only=options.local,
|
||||
user_only=options.user,
|
||||
editables_only=options.editable,
|
||||
)
|
||||
for dist in installed_packages:
|
||||
typ = 'unknown'
|
||||
all_candidates = finder.find_all_candidates(dist.key)
|
||||
if not options.pre:
|
||||
# Remove prereleases
|
||||
all_candidates = [candidate for candidate in all_candidates
|
||||
if not candidate.version.is_prerelease]
|
||||
|
||||
if not all_candidates:
|
||||
continue
|
||||
best_candidate = max(all_candidates,
|
||||
key=finder._candidate_sort_key)
|
||||
remote_version = best_candidate.version
|
||||
if best_candidate.location.is_wheel:
|
||||
typ = 'wheel'
|
||||
else:
|
||||
typ = 'sdist'
|
||||
yield dist, remote_version, typ
|
||||
|
||||
def run_listing(self, options):
|
||||
installed_packages = get_installed_distributions(
|
||||
local_only=options.local,
|
||||
user_only=options.user,
|
||||
editables_only=options.editable,
|
||||
)
|
||||
self.output_package_listing(installed_packages)
|
||||
|
||||
def output_package(self, dist):
|
||||
if dist_is_editable(dist):
|
||||
return '%s (%s, %s)' % (
|
||||
dist.project_name,
|
||||
dist.version,
|
||||
dist.location,
|
||||
)
|
||||
else:
|
||||
return '%s (%s)' % (dist.project_name, dist.version)
|
||||
|
||||
def output_package_listing(self, installed_packages):
|
||||
installed_packages = sorted(
|
||||
installed_packages,
|
||||
key=lambda dist: dist.project_name.lower(),
|
||||
)
|
||||
for dist in installed_packages:
|
||||
logger.info(self.output_package(dist))
|
||||
|
||||
def run_uptodate(self, options):
|
||||
uptodate = []
|
||||
for dist, version, typ in self.find_packages_latest_versions(options):
|
||||
if dist.parsed_version == version:
|
||||
uptodate.append(dist)
|
||||
self.output_package_listing(uptodate)
|
||||
139
Shared/lib/python3.4/site-packages/pip/commands/search.py
Normal file
139
Shared/lib/python3.4/site-packages/pip/commands/search.py
Normal file
|
|
@ -0,0 +1,139 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import logging
|
||||
import sys
|
||||
import textwrap
|
||||
|
||||
from pip.basecommand import Command, SUCCESS
|
||||
from pip.download import PipXmlrpcTransport
|
||||
from pip.models import PyPI
|
||||
from pip.utils import get_terminal_size
|
||||
from pip.utils.logging import indent_log
|
||||
from pip.exceptions import CommandError
|
||||
from pip.status_codes import NO_MATCHES_FOUND
|
||||
from pip._vendor import pkg_resources
|
||||
from pip._vendor.six.moves import xmlrpc_client
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SearchCommand(Command):
|
||||
"""Search for PyPI packages whose name or summary contains <query>."""
|
||||
name = 'search'
|
||||
usage = """
|
||||
%prog [options] <query>"""
|
||||
summary = 'Search PyPI for packages.'
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
super(SearchCommand, self).__init__(*args, **kw)
|
||||
self.cmd_opts.add_option(
|
||||
'--index',
|
||||
dest='index',
|
||||
metavar='URL',
|
||||
default=PyPI.pypi_url,
|
||||
help='Base URL of Python Package Index (default %default)')
|
||||
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
def run(self, options, args):
|
||||
if not args:
|
||||
raise CommandError('Missing required argument (search query).')
|
||||
query = args
|
||||
pypi_hits = self.search(query, options)
|
||||
hits = transform_hits(pypi_hits)
|
||||
|
||||
terminal_width = None
|
||||
if sys.stdout.isatty():
|
||||
terminal_width = get_terminal_size()[0]
|
||||
|
||||
print_results(hits, terminal_width=terminal_width)
|
||||
if pypi_hits:
|
||||
return SUCCESS
|
||||
return NO_MATCHES_FOUND
|
||||
|
||||
def search(self, query, options):
|
||||
index_url = options.index
|
||||
with self._build_session(options) as session:
|
||||
transport = PipXmlrpcTransport(index_url, session)
|
||||
pypi = xmlrpc_client.ServerProxy(index_url, transport)
|
||||
hits = pypi.search({'name': query, 'summary': query}, 'or')
|
||||
return hits
|
||||
|
||||
|
||||
def transform_hits(hits):
|
||||
"""
|
||||
The list from pypi is really a list of versions. We want a list of
|
||||
packages with the list of versions stored inline. This converts the
|
||||
list from pypi into one we can use.
|
||||
"""
|
||||
packages = {}
|
||||
for hit in hits:
|
||||
name = hit['name']
|
||||
summary = hit['summary']
|
||||
version = hit['version']
|
||||
score = hit['_pypi_ordering']
|
||||
if score is None:
|
||||
score = 0
|
||||
|
||||
if name not in packages.keys():
|
||||
packages[name] = {
|
||||
'name': name,
|
||||
'summary': summary,
|
||||
'versions': [version],
|
||||
'score': score,
|
||||
}
|
||||
else:
|
||||
packages[name]['versions'].append(version)
|
||||
|
||||
# if this is the highest version, replace summary and score
|
||||
if version == highest_version(packages[name]['versions']):
|
||||
packages[name]['summary'] = summary
|
||||
packages[name]['score'] = score
|
||||
|
||||
# each record has a unique name now, so we will convert the dict into a
|
||||
# list sorted by score
|
||||
package_list = sorted(
|
||||
packages.values(),
|
||||
key=lambda x: x['score'],
|
||||
reverse=True,
|
||||
)
|
||||
return package_list
|
||||
|
||||
|
||||
def print_results(hits, name_column_width=None, terminal_width=None):
|
||||
if not hits:
|
||||
return
|
||||
if name_column_width is None:
|
||||
name_column_width = max((len(hit['name']) for hit in hits)) + 4
|
||||
installed_packages = [p.project_name for p in pkg_resources.working_set]
|
||||
for hit in hits:
|
||||
name = hit['name']
|
||||
summary = hit['summary'] or ''
|
||||
if terminal_width is not None:
|
||||
# wrap and indent summary to fit terminal
|
||||
summary = textwrap.wrap(
|
||||
summary,
|
||||
terminal_width - name_column_width - 5,
|
||||
)
|
||||
summary = ('\n' + ' ' * (name_column_width + 3)).join(summary)
|
||||
line = '%s - %s' % (name.ljust(name_column_width), summary)
|
||||
try:
|
||||
logger.info(line)
|
||||
if name in installed_packages:
|
||||
dist = pkg_resources.get_distribution(name)
|
||||
with indent_log():
|
||||
latest = highest_version(hit['versions'])
|
||||
if dist.version == latest:
|
||||
logger.info('INSTALLED: %s (latest)', dist.version)
|
||||
else:
|
||||
logger.info('INSTALLED: %s', dist.version)
|
||||
logger.info('LATEST: %s', latest)
|
||||
except UnicodeEncodeError:
|
||||
pass
|
||||
|
||||
|
||||
def highest_version(versions):
|
||||
return next(iter(
|
||||
sorted(versions, key=pkg_resources.parse_version, reverse=True)
|
||||
))
|
||||
131
Shared/lib/python3.4/site-packages/pip/commands/show.py
Normal file
131
Shared/lib/python3.4/site-packages/pip/commands/show.py
Normal file
|
|
@ -0,0 +1,131 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
from email.parser import FeedParser
|
||||
import logging
|
||||
import os
|
||||
|
||||
from pip.basecommand import Command
|
||||
from pip.status_codes import SUCCESS, ERROR
|
||||
from pip._vendor import pkg_resources
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ShowCommand(Command):
|
||||
"""Show information about one or more installed packages."""
|
||||
name = 'show'
|
||||
usage = """
|
||||
%prog [options] <package> ..."""
|
||||
summary = 'Show information about installed packages.'
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
super(ShowCommand, self).__init__(*args, **kw)
|
||||
self.cmd_opts.add_option(
|
||||
'-f', '--files',
|
||||
dest='files',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Show the full list of installed files for each package.')
|
||||
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
def run(self, options, args):
|
||||
if not args:
|
||||
logger.warning('ERROR: Please provide a package name or names.')
|
||||
return ERROR
|
||||
query = args
|
||||
|
||||
results = search_packages_info(query)
|
||||
if not print_results(results, options.files):
|
||||
return ERROR
|
||||
return SUCCESS
|
||||
|
||||
|
||||
def search_packages_info(query):
|
||||
"""
|
||||
Gather details from installed distributions. Print distribution name,
|
||||
version, location, and installed files. Installed files requires a
|
||||
pip generated 'installed-files.txt' in the distributions '.egg-info'
|
||||
directory.
|
||||
"""
|
||||
installed = dict(
|
||||
[(p.project_name.lower(), p) for p in pkg_resources.working_set])
|
||||
query_names = [name.lower() for name in query]
|
||||
for dist in [installed[pkg] for pkg in query_names if pkg in installed]:
|
||||
package = {
|
||||
'name': dist.project_name,
|
||||
'version': dist.version,
|
||||
'location': dist.location,
|
||||
'requires': [dep.project_name for dep in dist.requires()],
|
||||
}
|
||||
file_list = None
|
||||
metadata = None
|
||||
if isinstance(dist, pkg_resources.DistInfoDistribution):
|
||||
# RECORDs should be part of .dist-info metadatas
|
||||
if dist.has_metadata('RECORD'):
|
||||
lines = dist.get_metadata_lines('RECORD')
|
||||
paths = [l.split(',')[0] for l in lines]
|
||||
paths = [os.path.join(dist.location, p) for p in paths]
|
||||
file_list = [os.path.relpath(p, dist.location) for p in paths]
|
||||
|
||||
if dist.has_metadata('METADATA'):
|
||||
metadata = dist.get_metadata('METADATA')
|
||||
else:
|
||||
# Otherwise use pip's log for .egg-info's
|
||||
if dist.has_metadata('installed-files.txt'):
|
||||
paths = dist.get_metadata_lines('installed-files.txt')
|
||||
paths = [os.path.join(dist.egg_info, p) for p in paths]
|
||||
file_list = [os.path.relpath(p, dist.location) for p in paths]
|
||||
|
||||
if dist.has_metadata('PKG-INFO'):
|
||||
metadata = dist.get_metadata('PKG-INFO')
|
||||
|
||||
if dist.has_metadata('entry_points.txt'):
|
||||
entry_points = dist.get_metadata_lines('entry_points.txt')
|
||||
package['entry_points'] = entry_points
|
||||
|
||||
# @todo: Should pkg_resources.Distribution have a
|
||||
# `get_pkg_info` method?
|
||||
feed_parser = FeedParser()
|
||||
feed_parser.feed(metadata)
|
||||
pkg_info_dict = feed_parser.close()
|
||||
for key in ('metadata-version', 'summary',
|
||||
'home-page', 'author', 'author-email', 'license'):
|
||||
package[key] = pkg_info_dict.get(key)
|
||||
|
||||
if file_list:
|
||||
package['files'] = sorted(file_list)
|
||||
yield package
|
||||
|
||||
|
||||
def print_results(distributions, list_all_files):
|
||||
"""
|
||||
Print the informations from installed distributions found.
|
||||
"""
|
||||
results_printed = False
|
||||
for dist in distributions:
|
||||
results_printed = True
|
||||
logger.info("---")
|
||||
logger.info("Metadata-Version: %s", dist.get('metadata-version'))
|
||||
logger.info("Name: %s", dist['name'])
|
||||
logger.info("Version: %s", dist['version'])
|
||||
logger.info("Summary: %s", dist.get('summary'))
|
||||
logger.info("Home-page: %s", dist.get('home-page'))
|
||||
logger.info("Author: %s", dist.get('author'))
|
||||
logger.info("Author-email: %s", dist.get('author-email'))
|
||||
logger.info("License: %s", dist.get('license'))
|
||||
logger.info("Location: %s", dist['location'])
|
||||
logger.info("Requires: %s", ', '.join(dist['requires']))
|
||||
if list_all_files:
|
||||
logger.info("Files:")
|
||||
if 'files' in dist:
|
||||
for line in dist['files']:
|
||||
logger.info(" %s", line.strip())
|
||||
else:
|
||||
logger.info("Cannot locate installed-files.txt")
|
||||
if 'entry_points' in dist:
|
||||
logger.info("Entry-points:")
|
||||
for line in dist['entry_points']:
|
||||
logger.info(" %s", line.strip())
|
||||
return results_printed
|
||||
76
Shared/lib/python3.4/site-packages/pip/commands/uninstall.py
Normal file
76
Shared/lib/python3.4/site-packages/pip/commands/uninstall.py
Normal file
|
|
@ -0,0 +1,76 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import pip
|
||||
from pip.wheel import WheelCache
|
||||
from pip.req import InstallRequirement, RequirementSet, parse_requirements
|
||||
from pip.basecommand import Command
|
||||
from pip.exceptions import InstallationError
|
||||
|
||||
|
||||
class UninstallCommand(Command):
|
||||
"""
|
||||
Uninstall packages.
|
||||
|
||||
pip is able to uninstall most installed packages. Known exceptions are:
|
||||
|
||||
- Pure distutils packages installed with ``python setup.py install``, which
|
||||
leave behind no metadata to determine what files were installed.
|
||||
- Script wrappers installed by ``python setup.py develop``.
|
||||
"""
|
||||
name = 'uninstall'
|
||||
usage = """
|
||||
%prog [options] <package> ...
|
||||
%prog [options] -r <requirements file> ..."""
|
||||
summary = 'Uninstall packages.'
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
super(UninstallCommand, self).__init__(*args, **kw)
|
||||
self.cmd_opts.add_option(
|
||||
'-r', '--requirement',
|
||||
dest='requirements',
|
||||
action='append',
|
||||
default=[],
|
||||
metavar='file',
|
||||
help='Uninstall all the packages listed in the given requirements '
|
||||
'file. This option can be used multiple times.',
|
||||
)
|
||||
self.cmd_opts.add_option(
|
||||
'-y', '--yes',
|
||||
dest='yes',
|
||||
action='store_true',
|
||||
help="Don't ask for confirmation of uninstall deletions.")
|
||||
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
def run(self, options, args):
|
||||
with self._build_session(options) as session:
|
||||
format_control = pip.index.FormatControl(set(), set())
|
||||
wheel_cache = WheelCache(options.cache_dir, format_control)
|
||||
requirement_set = RequirementSet(
|
||||
build_dir=None,
|
||||
src_dir=None,
|
||||
download_dir=None,
|
||||
isolated=options.isolated_mode,
|
||||
session=session,
|
||||
wheel_cache=wheel_cache,
|
||||
)
|
||||
for name in args:
|
||||
requirement_set.add_requirement(
|
||||
InstallRequirement.from_line(
|
||||
name, isolated=options.isolated_mode,
|
||||
wheel_cache=wheel_cache
|
||||
)
|
||||
)
|
||||
for filename in options.requirements:
|
||||
for req in parse_requirements(
|
||||
filename,
|
||||
options=options,
|
||||
session=session,
|
||||
wheel_cache=wheel_cache):
|
||||
requirement_set.add_requirement(req)
|
||||
if not requirement_set.has_requirements:
|
||||
raise InstallationError(
|
||||
'You must give at least one requirement to %(name)s (see '
|
||||
'"pip help %(name)s")' % dict(name=self.name)
|
||||
)
|
||||
requirement_set.uninstall(auto_confirm=options.yes)
|
||||
204
Shared/lib/python3.4/site-packages/pip/commands/wheel.py
Normal file
204
Shared/lib/python3.4/site-packages/pip/commands/wheel.py
Normal file
|
|
@ -0,0 +1,204 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from __future__ import absolute_import
|
||||
|
||||
import logging
|
||||
import os
|
||||
import warnings
|
||||
|
||||
from pip.basecommand import RequirementCommand
|
||||
from pip.exceptions import CommandError, PreviousBuildDirError
|
||||
from pip.req import RequirementSet
|
||||
from pip.utils import import_or_raise
|
||||
from pip.utils.build import BuildDirectory
|
||||
from pip.utils.deprecation import RemovedInPip10Warning
|
||||
from pip.wheel import WheelCache, WheelBuilder
|
||||
from pip import cmdoptions
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class WheelCommand(RequirementCommand):
|
||||
"""
|
||||
Build Wheel archives for your requirements and dependencies.
|
||||
|
||||
Wheel is a built-package format, and offers the advantage of not
|
||||
recompiling your software during every install. For more details, see the
|
||||
wheel docs: http://wheel.readthedocs.org/en/latest.
|
||||
|
||||
Requirements: setuptools>=0.8, and wheel.
|
||||
|
||||
'pip wheel' uses the bdist_wheel setuptools extension from the wheel
|
||||
package to build individual wheels.
|
||||
|
||||
"""
|
||||
|
||||
name = 'wheel'
|
||||
usage = """
|
||||
%prog [options] <requirement specifier> ...
|
||||
%prog [options] -r <requirements file> ...
|
||||
%prog [options] [-e] <vcs project url> ...
|
||||
%prog [options] [-e] <local project path> ...
|
||||
%prog [options] <archive url/path> ..."""
|
||||
|
||||
summary = 'Build wheels from your requirements.'
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
super(WheelCommand, self).__init__(*args, **kw)
|
||||
|
||||
cmd_opts = self.cmd_opts
|
||||
|
||||
cmd_opts.add_option(
|
||||
'-w', '--wheel-dir',
|
||||
dest='wheel_dir',
|
||||
metavar='dir',
|
||||
default=os.curdir,
|
||||
help=("Build wheels into <dir>, where the default is the "
|
||||
"current working directory."),
|
||||
)
|
||||
cmd_opts.add_option(cmdoptions.use_wheel())
|
||||
cmd_opts.add_option(cmdoptions.no_use_wheel())
|
||||
cmd_opts.add_option(cmdoptions.no_binary())
|
||||
cmd_opts.add_option(cmdoptions.only_binary())
|
||||
cmd_opts.add_option(
|
||||
'--build-option',
|
||||
dest='build_options',
|
||||
metavar='options',
|
||||
action='append',
|
||||
help="Extra arguments to be supplied to 'setup.py bdist_wheel'.")
|
||||
cmd_opts.add_option(cmdoptions.constraints())
|
||||
cmd_opts.add_option(cmdoptions.editable())
|
||||
cmd_opts.add_option(cmdoptions.requirements())
|
||||
cmd_opts.add_option(cmdoptions.src())
|
||||
cmd_opts.add_option(cmdoptions.no_deps())
|
||||
cmd_opts.add_option(cmdoptions.build_dir())
|
||||
|
||||
cmd_opts.add_option(
|
||||
'--global-option',
|
||||
dest='global_options',
|
||||
action='append',
|
||||
metavar='options',
|
||||
help="Extra global options to be supplied to the setup.py "
|
||||
"call before the 'bdist_wheel' command.")
|
||||
|
||||
cmd_opts.add_option(
|
||||
'--pre',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help=("Include pre-release and development versions. By default, "
|
||||
"pip only finds stable versions."),
|
||||
)
|
||||
|
||||
cmd_opts.add_option(cmdoptions.no_clean())
|
||||
cmd_opts.add_option(cmdoptions.require_hashes())
|
||||
|
||||
index_opts = cmdoptions.make_option_group(
|
||||
cmdoptions.index_group,
|
||||
self.parser,
|
||||
)
|
||||
|
||||
self.parser.insert_option_group(0, index_opts)
|
||||
self.parser.insert_option_group(0, cmd_opts)
|
||||
|
||||
def check_required_packages(self):
|
||||
import_or_raise(
|
||||
'wheel.bdist_wheel',
|
||||
CommandError,
|
||||
"'pip wheel' requires the 'wheel' package. To fix this, run: "
|
||||
"pip install wheel"
|
||||
)
|
||||
pkg_resources = import_or_raise(
|
||||
'pkg_resources',
|
||||
CommandError,
|
||||
"'pip wheel' requires setuptools >= 0.8 for dist-info support."
|
||||
" To fix this, run: pip install --upgrade setuptools"
|
||||
)
|
||||
if not hasattr(pkg_resources, 'DistInfoDistribution'):
|
||||
raise CommandError(
|
||||
"'pip wheel' requires setuptools >= 0.8 for dist-info "
|
||||
"support. To fix this, run: pip install --upgrade "
|
||||
"setuptools"
|
||||
)
|
||||
|
||||
def run(self, options, args):
|
||||
self.check_required_packages()
|
||||
cmdoptions.resolve_wheel_no_use_binary(options)
|
||||
cmdoptions.check_install_build_global(options)
|
||||
|
||||
if options.allow_external:
|
||||
warnings.warn(
|
||||
"--allow-external has been deprecated and will be removed in "
|
||||
"the future. Due to changes in the repository protocol, it no "
|
||||
"longer has any effect.",
|
||||
RemovedInPip10Warning,
|
||||
)
|
||||
|
||||
if options.allow_all_external:
|
||||
warnings.warn(
|
||||
"--allow-all-external has been deprecated and will be removed "
|
||||
"in the future. Due to changes in the repository protocol, it "
|
||||
"no longer has any effect.",
|
||||
RemovedInPip10Warning,
|
||||
)
|
||||
|
||||
if options.allow_unverified:
|
||||
warnings.warn(
|
||||
"--allow-unverified has been deprecated and will be removed "
|
||||
"in the future. Due to changes in the repository protocol, it "
|
||||
"no longer has any effect.",
|
||||
RemovedInPip10Warning,
|
||||
)
|
||||
|
||||
index_urls = [options.index_url] + options.extra_index_urls
|
||||
if options.no_index:
|
||||
logger.info('Ignoring indexes: %s', ','.join(index_urls))
|
||||
index_urls = []
|
||||
|
||||
if options.build_dir:
|
||||
options.build_dir = os.path.abspath(options.build_dir)
|
||||
|
||||
with self._build_session(options) as session:
|
||||
finder = self._build_package_finder(options, session)
|
||||
build_delete = (not (options.no_clean or options.build_dir))
|
||||
wheel_cache = WheelCache(options.cache_dir, options.format_control)
|
||||
with BuildDirectory(options.build_dir,
|
||||
delete=build_delete) as build_dir:
|
||||
requirement_set = RequirementSet(
|
||||
build_dir=build_dir,
|
||||
src_dir=options.src_dir,
|
||||
download_dir=None,
|
||||
ignore_dependencies=options.ignore_dependencies,
|
||||
ignore_installed=True,
|
||||
isolated=options.isolated_mode,
|
||||
session=session,
|
||||
wheel_cache=wheel_cache,
|
||||
wheel_download_dir=options.wheel_dir,
|
||||
require_hashes=options.require_hashes
|
||||
)
|
||||
|
||||
self.populate_requirement_set(
|
||||
requirement_set, args, options, finder, session, self.name,
|
||||
wheel_cache
|
||||
)
|
||||
|
||||
if not requirement_set.has_requirements:
|
||||
return
|
||||
|
||||
try:
|
||||
# build wheels
|
||||
wb = WheelBuilder(
|
||||
requirement_set,
|
||||
finder,
|
||||
build_options=options.build_options or [],
|
||||
global_options=options.global_options or [],
|
||||
)
|
||||
if not wb.build():
|
||||
raise CommandError(
|
||||
"Failed to build one or more wheels"
|
||||
)
|
||||
except PreviousBuildDirError:
|
||||
options.no_clean = True
|
||||
raise
|
||||
finally:
|
||||
if not options.no_clean:
|
||||
requirement_set.cleanup_files()
|
||||
158
Shared/lib/python3.4/site-packages/pip/compat/__init__.py
Normal file
158
Shared/lib/python3.4/site-packages/pip/compat/__init__.py
Normal file
|
|
@ -0,0 +1,158 @@
|
|||
"""Stuff that differs in different Python versions and platform
|
||||
distributions."""
|
||||
from __future__ import absolute_import, division
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
from pip._vendor.six import text_type
|
||||
|
||||
try:
|
||||
from logging.config import dictConfig as logging_dictConfig
|
||||
except ImportError:
|
||||
from pip.compat.dictconfig import dictConfig as logging_dictConfig
|
||||
|
||||
try:
|
||||
import ipaddress
|
||||
except ImportError:
|
||||
try:
|
||||
from pip._vendor import ipaddress
|
||||
except ImportError:
|
||||
import ipaddr as ipaddress
|
||||
ipaddress.ip_address = ipaddress.IPAddress
|
||||
ipaddress.ip_network = ipaddress.IPNetwork
|
||||
|
||||
|
||||
try:
|
||||
import sysconfig
|
||||
|
||||
def get_stdlib():
|
||||
paths = [
|
||||
sysconfig.get_path("stdlib"),
|
||||
sysconfig.get_path("platstdlib"),
|
||||
]
|
||||
return set(filter(bool, paths))
|
||||
except ImportError:
|
||||
from distutils import sysconfig
|
||||
|
||||
def get_stdlib():
|
||||
paths = [
|
||||
sysconfig.get_python_lib(standard_lib=True),
|
||||
sysconfig.get_python_lib(standard_lib=True, plat_specific=True),
|
||||
]
|
||||
return set(filter(bool, paths))
|
||||
|
||||
|
||||
__all__ = [
|
||||
"logging_dictConfig", "ipaddress", "uses_pycache", "console_to_str",
|
||||
"native_str", "get_path_uid", "stdlib_pkgs", "WINDOWS", "samefile"
|
||||
]
|
||||
|
||||
|
||||
if sys.version_info >= (3, 4):
|
||||
uses_pycache = True
|
||||
from importlib.util import cache_from_source
|
||||
else:
|
||||
import imp
|
||||
uses_pycache = hasattr(imp, 'cache_from_source')
|
||||
if uses_pycache:
|
||||
cache_from_source = imp.cache_from_source
|
||||
else:
|
||||
cache_from_source = None
|
||||
|
||||
|
||||
if sys.version_info >= (3,):
|
||||
def console_to_str(s):
|
||||
try:
|
||||
return s.decode(sys.__stdout__.encoding)
|
||||
except UnicodeDecodeError:
|
||||
return s.decode('utf_8')
|
||||
|
||||
def native_str(s, replace=False):
|
||||
if isinstance(s, bytes):
|
||||
return s.decode('utf-8', 'replace' if replace else 'strict')
|
||||
return s
|
||||
|
||||
else:
|
||||
def console_to_str(s):
|
||||
return s
|
||||
|
||||
def native_str(s, replace=False):
|
||||
# Replace is ignored -- unicode to UTF-8 can't fail
|
||||
if isinstance(s, text_type):
|
||||
return s.encode('utf-8')
|
||||
return s
|
||||
|
||||
|
||||
def total_seconds(td):
|
||||
if hasattr(td, "total_seconds"):
|
||||
return td.total_seconds()
|
||||
else:
|
||||
val = td.microseconds + (td.seconds + td.days * 24 * 3600) * 10 ** 6
|
||||
return val / 10 ** 6
|
||||
|
||||
|
||||
def get_path_uid(path):
|
||||
"""
|
||||
Return path's uid.
|
||||
|
||||
Does not follow symlinks:
|
||||
https://github.com/pypa/pip/pull/935#discussion_r5307003
|
||||
|
||||
Placed this function in compat due to differences on AIX and
|
||||
Jython, that should eventually go away.
|
||||
|
||||
:raises OSError: When path is a symlink or can't be read.
|
||||
"""
|
||||
if hasattr(os, 'O_NOFOLLOW'):
|
||||
fd = os.open(path, os.O_RDONLY | os.O_NOFOLLOW)
|
||||
file_uid = os.fstat(fd).st_uid
|
||||
os.close(fd)
|
||||
else: # AIX and Jython
|
||||
# WARNING: time of check vulnerabity, but best we can do w/o NOFOLLOW
|
||||
if not os.path.islink(path):
|
||||
# older versions of Jython don't have `os.fstat`
|
||||
file_uid = os.stat(path).st_uid
|
||||
else:
|
||||
# raise OSError for parity with os.O_NOFOLLOW above
|
||||
raise OSError(
|
||||
"%s is a symlink; Will not return uid for symlinks" % path
|
||||
)
|
||||
return file_uid
|
||||
|
||||
|
||||
def expanduser(path):
|
||||
"""
|
||||
Expand ~ and ~user constructions.
|
||||
|
||||
Includes a workaround for http://bugs.python.org/issue14768
|
||||
"""
|
||||
expanded = os.path.expanduser(path)
|
||||
if path.startswith('~/') and expanded.startswith('//'):
|
||||
expanded = expanded[1:]
|
||||
return expanded
|
||||
|
||||
|
||||
# packages in the stdlib that may have installation metadata, but should not be
|
||||
# considered 'installed'. this theoretically could be determined based on
|
||||
# dist.location (py27:`sysconfig.get_paths()['stdlib']`,
|
||||
# py26:sysconfig.get_config_vars('LIBDEST')), but fear platform variation may
|
||||
# make this ineffective, so hard-coding
|
||||
stdlib_pkgs = ['python', 'wsgiref']
|
||||
if sys.version_info >= (2, 7):
|
||||
stdlib_pkgs.extend(['argparse'])
|
||||
|
||||
|
||||
# windows detection, covers cpython and ironpython
|
||||
WINDOWS = (sys.platform.startswith("win") or
|
||||
(sys.platform == 'cli' and os.name == 'nt'))
|
||||
|
||||
|
||||
def samefile(file1, file2):
|
||||
"""Provide an alternative for os.path.samefile on Windows/Python2"""
|
||||
if hasattr(os.path, 'samefile'):
|
||||
return os.path.samefile(file1, file2)
|
||||
else:
|
||||
path1 = os.path.normcase(os.path.abspath(file1))
|
||||
path2 = os.path.normcase(os.path.abspath(file2))
|
||||
return path1 == path2
|
||||
565
Shared/lib/python3.4/site-packages/pip/compat/dictconfig.py
Normal file
565
Shared/lib/python3.4/site-packages/pip/compat/dictconfig.py
Normal file
|
|
@ -0,0 +1,565 @@
|
|||
# This is a copy of the Python logging.config.dictconfig module,
|
||||
# reproduced with permission. It is provided here for backwards
|
||||
# compatibility for Python versions prior to 2.7.
|
||||
#
|
||||
# Copyright 2009-2010 by Vinay Sajip. All Rights Reserved.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose and without fee is hereby granted,
|
||||
# provided that the above copyright notice appear in all copies and that
|
||||
# both that copyright notice and this permission notice appear in
|
||||
# supporting documentation, and that the name of Vinay Sajip
|
||||
# not be used in advertising or publicity pertaining to distribution
|
||||
# of the software without specific, written prior permission.
|
||||
# VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
|
||||
# ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
|
||||
# VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR
|
||||
# ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
|
||||
# IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
from __future__ import absolute_import
|
||||
|
||||
import logging.handlers
|
||||
import re
|
||||
import sys
|
||||
import types
|
||||
|
||||
from pip._vendor import six
|
||||
|
||||
# flake8: noqa
|
||||
|
||||
IDENTIFIER = re.compile('^[a-z_][a-z0-9_]*$', re.I)
|
||||
|
||||
|
||||
def valid_ident(s):
|
||||
m = IDENTIFIER.match(s)
|
||||
if not m:
|
||||
raise ValueError('Not a valid Python identifier: %r' % s)
|
||||
return True
|
||||
|
||||
#
|
||||
# This function is defined in logging only in recent versions of Python
|
||||
#
|
||||
try:
|
||||
from logging import _checkLevel
|
||||
except ImportError:
|
||||
def _checkLevel(level):
|
||||
if isinstance(level, int):
|
||||
rv = level
|
||||
elif str(level) == level:
|
||||
if level not in logging._levelNames:
|
||||
raise ValueError('Unknown level: %r' % level)
|
||||
rv = logging._levelNames[level]
|
||||
else:
|
||||
raise TypeError('Level not an integer or a '
|
||||
'valid string: %r' % level)
|
||||
return rv
|
||||
|
||||
# The ConvertingXXX classes are wrappers around standard Python containers,
|
||||
# and they serve to convert any suitable values in the container. The
|
||||
# conversion converts base dicts, lists and tuples to their wrapped
|
||||
# equivalents, whereas strings which match a conversion format are converted
|
||||
# appropriately.
|
||||
#
|
||||
# Each wrapper should have a configurator attribute holding the actual
|
||||
# configurator to use for conversion.
|
||||
|
||||
|
||||
class ConvertingDict(dict):
|
||||
"""A converting dictionary wrapper."""
|
||||
|
||||
def __getitem__(self, key):
|
||||
value = dict.__getitem__(self, key)
|
||||
result = self.configurator.convert(value)
|
||||
# If the converted value is different, save for next time
|
||||
if value is not result:
|
||||
self[key] = result
|
||||
if type(result) in (ConvertingDict, ConvertingList,
|
||||
ConvertingTuple):
|
||||
result.parent = self
|
||||
result.key = key
|
||||
return result
|
||||
|
||||
def get(self, key, default=None):
|
||||
value = dict.get(self, key, default)
|
||||
result = self.configurator.convert(value)
|
||||
# If the converted value is different, save for next time
|
||||
if value is not result:
|
||||
self[key] = result
|
||||
if type(result) in (ConvertingDict, ConvertingList,
|
||||
ConvertingTuple):
|
||||
result.parent = self
|
||||
result.key = key
|
||||
return result
|
||||
|
||||
def pop(self, key, default=None):
|
||||
value = dict.pop(self, key, default)
|
||||
result = self.configurator.convert(value)
|
||||
if value is not result:
|
||||
if type(result) in (ConvertingDict, ConvertingList,
|
||||
ConvertingTuple):
|
||||
result.parent = self
|
||||
result.key = key
|
||||
return result
|
||||
|
||||
|
||||
class ConvertingList(list):
|
||||
"""A converting list wrapper."""
|
||||
def __getitem__(self, key):
|
||||
value = list.__getitem__(self, key)
|
||||
result = self.configurator.convert(value)
|
||||
# If the converted value is different, save for next time
|
||||
if value is not result:
|
||||
self[key] = result
|
||||
if type(result) in (ConvertingDict, ConvertingList,
|
||||
ConvertingTuple):
|
||||
result.parent = self
|
||||
result.key = key
|
||||
return result
|
||||
|
||||
def pop(self, idx=-1):
|
||||
value = list.pop(self, idx)
|
||||
result = self.configurator.convert(value)
|
||||
if value is not result:
|
||||
if type(result) in (ConvertingDict, ConvertingList,
|
||||
ConvertingTuple):
|
||||
result.parent = self
|
||||
return result
|
||||
|
||||
|
||||
class ConvertingTuple(tuple):
|
||||
"""A converting tuple wrapper."""
|
||||
def __getitem__(self, key):
|
||||
value = tuple.__getitem__(self, key)
|
||||
result = self.configurator.convert(value)
|
||||
if value is not result:
|
||||
if type(result) in (ConvertingDict, ConvertingList,
|
||||
ConvertingTuple):
|
||||
result.parent = self
|
||||
result.key = key
|
||||
return result
|
||||
|
||||
|
||||
class BaseConfigurator(object):
|
||||
"""
|
||||
The configurator base class which defines some useful defaults.
|
||||
"""
|
||||
|
||||
CONVERT_PATTERN = re.compile(r'^(?P<prefix>[a-z]+)://(?P<suffix>.*)$')
|
||||
|
||||
WORD_PATTERN = re.compile(r'^\s*(\w+)\s*')
|
||||
DOT_PATTERN = re.compile(r'^\.\s*(\w+)\s*')
|
||||
INDEX_PATTERN = re.compile(r'^\[\s*(\w+)\s*\]\s*')
|
||||
DIGIT_PATTERN = re.compile(r'^\d+$')
|
||||
|
||||
value_converters = {
|
||||
'ext' : 'ext_convert',
|
||||
'cfg' : 'cfg_convert',
|
||||
}
|
||||
|
||||
# We might want to use a different one, e.g. importlib
|
||||
importer = __import__
|
||||
|
||||
def __init__(self, config):
|
||||
self.config = ConvertingDict(config)
|
||||
self.config.configurator = self
|
||||
|
||||
def resolve(self, s):
|
||||
"""
|
||||
Resolve strings to objects using standard import and attribute
|
||||
syntax.
|
||||
"""
|
||||
name = s.split('.')
|
||||
used = name.pop(0)
|
||||
try:
|
||||
found = self.importer(used)
|
||||
for frag in name:
|
||||
used += '.' + frag
|
||||
try:
|
||||
found = getattr(found, frag)
|
||||
except AttributeError:
|
||||
self.importer(used)
|
||||
found = getattr(found, frag)
|
||||
return found
|
||||
except ImportError:
|
||||
e, tb = sys.exc_info()[1:]
|
||||
v = ValueError('Cannot resolve %r: %s' % (s, e))
|
||||
v.__cause__, v.__traceback__ = e, tb
|
||||
raise v
|
||||
|
||||
def ext_convert(self, value):
|
||||
"""Default converter for the ext:// protocol."""
|
||||
return self.resolve(value)
|
||||
|
||||
def cfg_convert(self, value):
|
||||
"""Default converter for the cfg:// protocol."""
|
||||
rest = value
|
||||
m = self.WORD_PATTERN.match(rest)
|
||||
if m is None:
|
||||
raise ValueError("Unable to convert %r" % value)
|
||||
else:
|
||||
rest = rest[m.end():]
|
||||
d = self.config[m.groups()[0]]
|
||||
# print d, rest
|
||||
while rest:
|
||||
m = self.DOT_PATTERN.match(rest)
|
||||
if m:
|
||||
d = d[m.groups()[0]]
|
||||
else:
|
||||
m = self.INDEX_PATTERN.match(rest)
|
||||
if m:
|
||||
idx = m.groups()[0]
|
||||
if not self.DIGIT_PATTERN.match(idx):
|
||||
d = d[idx]
|
||||
else:
|
||||
try:
|
||||
n = int(idx) # try as number first (most likely)
|
||||
d = d[n]
|
||||
except TypeError:
|
||||
d = d[idx]
|
||||
if m:
|
||||
rest = rest[m.end():]
|
||||
else:
|
||||
raise ValueError('Unable to convert '
|
||||
'%r at %r' % (value, rest))
|
||||
# rest should be empty
|
||||
return d
|
||||
|
||||
def convert(self, value):
|
||||
"""
|
||||
Convert values to an appropriate type. dicts, lists and tuples are
|
||||
replaced by their converting alternatives. Strings are checked to
|
||||
see if they have a conversion format and are converted if they do.
|
||||
"""
|
||||
if not isinstance(value, ConvertingDict) and isinstance(value, dict):
|
||||
value = ConvertingDict(value)
|
||||
value.configurator = self
|
||||
elif not isinstance(value, ConvertingList) and isinstance(value, list):
|
||||
value = ConvertingList(value)
|
||||
value.configurator = self
|
||||
elif not isinstance(value, ConvertingTuple) and\
|
||||
isinstance(value, tuple):
|
||||
value = ConvertingTuple(value)
|
||||
value.configurator = self
|
||||
elif isinstance(value, six.string_types): # str for py3k
|
||||
m = self.CONVERT_PATTERN.match(value)
|
||||
if m:
|
||||
d = m.groupdict()
|
||||
prefix = d['prefix']
|
||||
converter = self.value_converters.get(prefix, None)
|
||||
if converter:
|
||||
suffix = d['suffix']
|
||||
converter = getattr(self, converter)
|
||||
value = converter(suffix)
|
||||
return value
|
||||
|
||||
def configure_custom(self, config):
|
||||
"""Configure an object with a user-supplied factory."""
|
||||
c = config.pop('()')
|
||||
if not hasattr(c, '__call__') and hasattr(types, 'ClassType') and type(c) != types.ClassType:
|
||||
c = self.resolve(c)
|
||||
props = config.pop('.', None)
|
||||
# Check for valid identifiers
|
||||
kwargs = dict((k, config[k]) for k in config if valid_ident(k))
|
||||
result = c(**kwargs)
|
||||
if props:
|
||||
for name, value in props.items():
|
||||
setattr(result, name, value)
|
||||
return result
|
||||
|
||||
def as_tuple(self, value):
|
||||
"""Utility function which converts lists to tuples."""
|
||||
if isinstance(value, list):
|
||||
value = tuple(value)
|
||||
return value
|
||||
|
||||
|
||||
class DictConfigurator(BaseConfigurator):
|
||||
"""
|
||||
Configure logging using a dictionary-like object to describe the
|
||||
configuration.
|
||||
"""
|
||||
|
||||
def configure(self):
|
||||
"""Do the configuration."""
|
||||
|
||||
config = self.config
|
||||
if 'version' not in config:
|
||||
raise ValueError("dictionary doesn't specify a version")
|
||||
if config['version'] != 1:
|
||||
raise ValueError("Unsupported version: %s" % config['version'])
|
||||
incremental = config.pop('incremental', False)
|
||||
EMPTY_DICT = {}
|
||||
logging._acquireLock()
|
||||
try:
|
||||
if incremental:
|
||||
handlers = config.get('handlers', EMPTY_DICT)
|
||||
# incremental handler config only if handler name
|
||||
# ties in to logging._handlers (Python 2.7)
|
||||
if sys.version_info[:2] == (2, 7):
|
||||
for name in handlers:
|
||||
if name not in logging._handlers:
|
||||
raise ValueError('No handler found with '
|
||||
'name %r' % name)
|
||||
else:
|
||||
try:
|
||||
handler = logging._handlers[name]
|
||||
handler_config = handlers[name]
|
||||
level = handler_config.get('level', None)
|
||||
if level:
|
||||
handler.setLevel(_checkLevel(level))
|
||||
except StandardError as e:
|
||||
raise ValueError('Unable to configure handler '
|
||||
'%r: %s' % (name, e))
|
||||
loggers = config.get('loggers', EMPTY_DICT)
|
||||
for name in loggers:
|
||||
try:
|
||||
self.configure_logger(name, loggers[name], True)
|
||||
except StandardError as e:
|
||||
raise ValueError('Unable to configure logger '
|
||||
'%r: %s' % (name, e))
|
||||
root = config.get('root', None)
|
||||
if root:
|
||||
try:
|
||||
self.configure_root(root, True)
|
||||
except StandardError as e:
|
||||
raise ValueError('Unable to configure root '
|
||||
'logger: %s' % e)
|
||||
else:
|
||||
disable_existing = config.pop('disable_existing_loggers', True)
|
||||
|
||||
logging._handlers.clear()
|
||||
del logging._handlerList[:]
|
||||
|
||||
# Do formatters first - they don't refer to anything else
|
||||
formatters = config.get('formatters', EMPTY_DICT)
|
||||
for name in formatters:
|
||||
try:
|
||||
formatters[name] = self.configure_formatter(
|
||||
formatters[name])
|
||||
except StandardError as e:
|
||||
raise ValueError('Unable to configure '
|
||||
'formatter %r: %s' % (name, e))
|
||||
# Next, do filters - they don't refer to anything else, either
|
||||
filters = config.get('filters', EMPTY_DICT)
|
||||
for name in filters:
|
||||
try:
|
||||
filters[name] = self.configure_filter(filters[name])
|
||||
except StandardError as e:
|
||||
raise ValueError('Unable to configure '
|
||||
'filter %r: %s' % (name, e))
|
||||
|
||||
# Next, do handlers - they refer to formatters and filters
|
||||
# As handlers can refer to other handlers, sort the keys
|
||||
# to allow a deterministic order of configuration
|
||||
handlers = config.get('handlers', EMPTY_DICT)
|
||||
for name in sorted(handlers):
|
||||
try:
|
||||
handler = self.configure_handler(handlers[name])
|
||||
handler.name = name
|
||||
handlers[name] = handler
|
||||
except StandardError as e:
|
||||
raise ValueError('Unable to configure handler '
|
||||
'%r: %s' % (name, e))
|
||||
# Next, do loggers - they refer to handlers and filters
|
||||
|
||||
# we don't want to lose the existing loggers,
|
||||
# since other threads may have pointers to them.
|
||||
# existing is set to contain all existing loggers,
|
||||
# and as we go through the new configuration we
|
||||
# remove any which are configured. At the end,
|
||||
# what's left in existing is the set of loggers
|
||||
# which were in the previous configuration but
|
||||
# which are not in the new configuration.
|
||||
root = logging.root
|
||||
existing = list(root.manager.loggerDict)
|
||||
# The list needs to be sorted so that we can
|
||||
# avoid disabling child loggers of explicitly
|
||||
# named loggers. With a sorted list it is easier
|
||||
# to find the child loggers.
|
||||
existing.sort()
|
||||
# We'll keep the list of existing loggers
|
||||
# which are children of named loggers here...
|
||||
child_loggers = []
|
||||
# now set up the new ones...
|
||||
loggers = config.get('loggers', EMPTY_DICT)
|
||||
for name in loggers:
|
||||
if name in existing:
|
||||
i = existing.index(name)
|
||||
prefixed = name + "."
|
||||
pflen = len(prefixed)
|
||||
num_existing = len(existing)
|
||||
i = i + 1 # look at the entry after name
|
||||
while (i < num_existing) and\
|
||||
(existing[i][:pflen] == prefixed):
|
||||
child_loggers.append(existing[i])
|
||||
i = i + 1
|
||||
existing.remove(name)
|
||||
try:
|
||||
self.configure_logger(name, loggers[name])
|
||||
except StandardError as e:
|
||||
raise ValueError('Unable to configure logger '
|
||||
'%r: %s' % (name, e))
|
||||
|
||||
# Disable any old loggers. There's no point deleting
|
||||
# them as other threads may continue to hold references
|
||||
# and by disabling them, you stop them doing any logging.
|
||||
# However, don't disable children of named loggers, as that's
|
||||
# probably not what was intended by the user.
|
||||
for log in existing:
|
||||
logger = root.manager.loggerDict[log]
|
||||
if log in child_loggers:
|
||||
logger.level = logging.NOTSET
|
||||
logger.handlers = []
|
||||
logger.propagate = True
|
||||
elif disable_existing:
|
||||
logger.disabled = True
|
||||
|
||||
# And finally, do the root logger
|
||||
root = config.get('root', None)
|
||||
if root:
|
||||
try:
|
||||
self.configure_root(root)
|
||||
except StandardError as e:
|
||||
raise ValueError('Unable to configure root '
|
||||
'logger: %s' % e)
|
||||
finally:
|
||||
logging._releaseLock()
|
||||
|
||||
def configure_formatter(self, config):
|
||||
"""Configure a formatter from a dictionary."""
|
||||
if '()' in config:
|
||||
factory = config['()'] # for use in exception handler
|
||||
try:
|
||||
result = self.configure_custom(config)
|
||||
except TypeError as te:
|
||||
if "'format'" not in str(te):
|
||||
raise
|
||||
# Name of parameter changed from fmt to format.
|
||||
# Retry with old name.
|
||||
# This is so that code can be used with older Python versions
|
||||
#(e.g. by Django)
|
||||
config['fmt'] = config.pop('format')
|
||||
config['()'] = factory
|
||||
result = self.configure_custom(config)
|
||||
else:
|
||||
fmt = config.get('format', None)
|
||||
dfmt = config.get('datefmt', None)
|
||||
result = logging.Formatter(fmt, dfmt)
|
||||
return result
|
||||
|
||||
def configure_filter(self, config):
|
||||
"""Configure a filter from a dictionary."""
|
||||
if '()' in config:
|
||||
result = self.configure_custom(config)
|
||||
else:
|
||||
name = config.get('name', '')
|
||||
result = logging.Filter(name)
|
||||
return result
|
||||
|
||||
def add_filters(self, filterer, filters):
|
||||
"""Add filters to a filterer from a list of names."""
|
||||
for f in filters:
|
||||
try:
|
||||
filterer.addFilter(self.config['filters'][f])
|
||||
except StandardError as e:
|
||||
raise ValueError('Unable to add filter %r: %s' % (f, e))
|
||||
|
||||
def configure_handler(self, config):
|
||||
"""Configure a handler from a dictionary."""
|
||||
formatter = config.pop('formatter', None)
|
||||
if formatter:
|
||||
try:
|
||||
formatter = self.config['formatters'][formatter]
|
||||
except StandardError as e:
|
||||
raise ValueError('Unable to set formatter '
|
||||
'%r: %s' % (formatter, e))
|
||||
level = config.pop('level', None)
|
||||
filters = config.pop('filters', None)
|
||||
if '()' in config:
|
||||
c = config.pop('()')
|
||||
if not hasattr(c, '__call__') and hasattr(types, 'ClassType') and type(c) != types.ClassType:
|
||||
c = self.resolve(c)
|
||||
factory = c
|
||||
else:
|
||||
klass = self.resolve(config.pop('class'))
|
||||
# Special case for handler which refers to another handler
|
||||
if issubclass(klass, logging.handlers.MemoryHandler) and\
|
||||
'target' in config:
|
||||
try:
|
||||
config['target'] = self.config['handlers'][config['target']]
|
||||
except StandardError as e:
|
||||
raise ValueError('Unable to set target handler '
|
||||
'%r: %s' % (config['target'], e))
|
||||
elif issubclass(klass, logging.handlers.SMTPHandler) and\
|
||||
'mailhost' in config:
|
||||
config['mailhost'] = self.as_tuple(config['mailhost'])
|
||||
elif issubclass(klass, logging.handlers.SysLogHandler) and\
|
||||
'address' in config:
|
||||
config['address'] = self.as_tuple(config['address'])
|
||||
factory = klass
|
||||
kwargs = dict((k, config[k]) for k in config if valid_ident(k))
|
||||
try:
|
||||
result = factory(**kwargs)
|
||||
except TypeError as te:
|
||||
if "'stream'" not in str(te):
|
||||
raise
|
||||
# The argument name changed from strm to stream
|
||||
# Retry with old name.
|
||||
# This is so that code can be used with older Python versions
|
||||
#(e.g. by Django)
|
||||
kwargs['strm'] = kwargs.pop('stream')
|
||||
result = factory(**kwargs)
|
||||
if formatter:
|
||||
result.setFormatter(formatter)
|
||||
if level is not None:
|
||||
result.setLevel(_checkLevel(level))
|
||||
if filters:
|
||||
self.add_filters(result, filters)
|
||||
return result
|
||||
|
||||
def add_handlers(self, logger, handlers):
|
||||
"""Add handlers to a logger from a list of names."""
|
||||
for h in handlers:
|
||||
try:
|
||||
logger.addHandler(self.config['handlers'][h])
|
||||
except StandardError as e:
|
||||
raise ValueError('Unable to add handler %r: %s' % (h, e))
|
||||
|
||||
def common_logger_config(self, logger, config, incremental=False):
|
||||
"""
|
||||
Perform configuration which is common to root and non-root loggers.
|
||||
"""
|
||||
level = config.get('level', None)
|
||||
if level is not None:
|
||||
logger.setLevel(_checkLevel(level))
|
||||
if not incremental:
|
||||
# Remove any existing handlers
|
||||
for h in logger.handlers[:]:
|
||||
logger.removeHandler(h)
|
||||
handlers = config.get('handlers', None)
|
||||
if handlers:
|
||||
self.add_handlers(logger, handlers)
|
||||
filters = config.get('filters', None)
|
||||
if filters:
|
||||
self.add_filters(logger, filters)
|
||||
|
||||
def configure_logger(self, name, config, incremental=False):
|
||||
"""Configure a non-root logger from a dictionary."""
|
||||
logger = logging.getLogger(name)
|
||||
self.common_logger_config(logger, config, incremental)
|
||||
propagate = config.get('propagate', None)
|
||||
if propagate is not None:
|
||||
logger.propagate = propagate
|
||||
|
||||
def configure_root(self, config, incremental=False):
|
||||
"""Configure a root logger from a dictionary."""
|
||||
root = logging.getLogger()
|
||||
self.common_logger_config(root, config, incremental)
|
||||
|
||||
dictConfigClass = DictConfigurator
|
||||
|
||||
|
||||
def dictConfig(config):
|
||||
"""Configure logging using a dictionary."""
|
||||
dictConfigClass(config).configure()
|
||||
894
Shared/lib/python3.4/site-packages/pip/download.py
Normal file
894
Shared/lib/python3.4/site-packages/pip/download.py
Normal file
|
|
@ -0,0 +1,894 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import cgi
|
||||
import email.utils
|
||||
import getpass
|
||||
import json
|
||||
import logging
|
||||
import mimetypes
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
try:
|
||||
import ssl # noqa
|
||||
HAS_TLS = True
|
||||
except ImportError:
|
||||
HAS_TLS = False
|
||||
|
||||
from pip._vendor.six.moves.urllib import parse as urllib_parse
|
||||
from pip._vendor.six.moves.urllib import request as urllib_request
|
||||
|
||||
import pip
|
||||
|
||||
from pip.exceptions import InstallationError, HashMismatch
|
||||
from pip.models import PyPI
|
||||
from pip.utils import (splitext, rmtree, format_size, display_path,
|
||||
backup_dir, ask_path_exists, unpack_file,
|
||||
ARCHIVE_EXTENSIONS, consume, call_subprocess)
|
||||
from pip.utils.filesystem import check_path_owner
|
||||
from pip.utils.logging import indent_log
|
||||
from pip.utils.setuptools_build import SETUPTOOLS_SHIM
|
||||
from pip.utils.ui import DownloadProgressBar, DownloadProgressSpinner
|
||||
from pip.locations import write_delete_marker_file
|
||||
from pip.vcs import vcs
|
||||
from pip._vendor import requests, six
|
||||
from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter
|
||||
from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth
|
||||
from pip._vendor.requests.models import Response
|
||||
from pip._vendor.requests.structures import CaseInsensitiveDict
|
||||
from pip._vendor.requests.packages import urllib3
|
||||
from pip._vendor.cachecontrol import CacheControlAdapter
|
||||
from pip._vendor.cachecontrol.caches import FileCache
|
||||
from pip._vendor.lockfile import LockError
|
||||
from pip._vendor.six.moves import xmlrpc_client
|
||||
|
||||
|
||||
__all__ = ['get_file_content',
|
||||
'is_url', 'url_to_path', 'path_to_url',
|
||||
'is_archive_file', 'unpack_vcs_link',
|
||||
'unpack_file_url', 'is_vcs_url', 'is_file_url',
|
||||
'unpack_http_url', 'unpack_url']
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def user_agent():
|
||||
"""
|
||||
Return a string representing the user agent.
|
||||
"""
|
||||
data = {
|
||||
"installer": {"name": "pip", "version": pip.__version__},
|
||||
"python": platform.python_version(),
|
||||
"implementation": {
|
||||
"name": platform.python_implementation(),
|
||||
},
|
||||
}
|
||||
|
||||
if data["implementation"]["name"] == 'CPython':
|
||||
data["implementation"]["version"] = platform.python_version()
|
||||
elif data["implementation"]["name"] == 'PyPy':
|
||||
if sys.pypy_version_info.releaselevel == 'final':
|
||||
pypy_version_info = sys.pypy_version_info[:3]
|
||||
else:
|
||||
pypy_version_info = sys.pypy_version_info
|
||||
data["implementation"]["version"] = ".".join(
|
||||
[str(x) for x in pypy_version_info]
|
||||
)
|
||||
elif data["implementation"]["name"] == 'Jython':
|
||||
# Complete Guess
|
||||
data["implementation"]["version"] = platform.python_version()
|
||||
elif data["implementation"]["name"] == 'IronPython':
|
||||
# Complete Guess
|
||||
data["implementation"]["version"] = platform.python_version()
|
||||
|
||||
if sys.platform.startswith("linux"):
|
||||
distro = dict(filter(
|
||||
lambda x: x[1],
|
||||
zip(["name", "version", "id"], platform.linux_distribution()),
|
||||
))
|
||||
libc = dict(filter(
|
||||
lambda x: x[1],
|
||||
zip(["lib", "version"], platform.libc_ver()),
|
||||
))
|
||||
if libc:
|
||||
distro["libc"] = libc
|
||||
if distro:
|
||||
data["distro"] = distro
|
||||
|
||||
if sys.platform.startswith("darwin") and platform.mac_ver()[0]:
|
||||
data["distro"] = {"name": "OS X", "version": platform.mac_ver()[0]}
|
||||
|
||||
if platform.system():
|
||||
data.setdefault("system", {})["name"] = platform.system()
|
||||
|
||||
if platform.release():
|
||||
data.setdefault("system", {})["release"] = platform.release()
|
||||
|
||||
if platform.machine():
|
||||
data["cpu"] = platform.machine()
|
||||
|
||||
# Python 2.6 doesn't have ssl.OPENSSL_VERSION.
|
||||
if HAS_TLS and sys.version_info[:2] > (2, 6):
|
||||
data["openssl_version"] = ssl.OPENSSL_VERSION
|
||||
|
||||
return "{data[installer][name]}/{data[installer][version]} {json}".format(
|
||||
data=data,
|
||||
json=json.dumps(data, separators=(",", ":"), sort_keys=True),
|
||||
)
|
||||
|
||||
|
||||
class MultiDomainBasicAuth(AuthBase):
|
||||
|
||||
def __init__(self, prompting=True):
|
||||
self.prompting = prompting
|
||||
self.passwords = {}
|
||||
|
||||
def __call__(self, req):
|
||||
parsed = urllib_parse.urlparse(req.url)
|
||||
|
||||
# Get the netloc without any embedded credentials
|
||||
netloc = parsed.netloc.rsplit("@", 1)[-1]
|
||||
|
||||
# Set the url of the request to the url without any credentials
|
||||
req.url = urllib_parse.urlunparse(parsed[:1] + (netloc,) + parsed[2:])
|
||||
|
||||
# Use any stored credentials that we have for this netloc
|
||||
username, password = self.passwords.get(netloc, (None, None))
|
||||
|
||||
# Extract credentials embedded in the url if we have none stored
|
||||
if username is None:
|
||||
username, password = self.parse_credentials(parsed.netloc)
|
||||
|
||||
if username or password:
|
||||
# Store the username and password
|
||||
self.passwords[netloc] = (username, password)
|
||||
|
||||
# Send the basic auth with this request
|
||||
req = HTTPBasicAuth(username or "", password or "")(req)
|
||||
|
||||
# Attach a hook to handle 401 responses
|
||||
req.register_hook("response", self.handle_401)
|
||||
|
||||
return req
|
||||
|
||||
def handle_401(self, resp, **kwargs):
|
||||
# We only care about 401 responses, anything else we want to just
|
||||
# pass through the actual response
|
||||
if resp.status_code != 401:
|
||||
return resp
|
||||
|
||||
# We are not able to prompt the user so simple return the response
|
||||
if not self.prompting:
|
||||
return resp
|
||||
|
||||
parsed = urllib_parse.urlparse(resp.url)
|
||||
|
||||
# Prompt the user for a new username and password
|
||||
username = six.moves.input("User for %s: " % parsed.netloc)
|
||||
password = getpass.getpass("Password: ")
|
||||
|
||||
# Store the new username and password to use for future requests
|
||||
if username or password:
|
||||
self.passwords[parsed.netloc] = (username, password)
|
||||
|
||||
# Consume content and release the original connection to allow our new
|
||||
# request to reuse the same one.
|
||||
resp.content
|
||||
resp.raw.release_conn()
|
||||
|
||||
# Add our new username and password to the request
|
||||
req = HTTPBasicAuth(username or "", password or "")(resp.request)
|
||||
|
||||
# Send our new request
|
||||
new_resp = resp.connection.send(req, **kwargs)
|
||||
new_resp.history.append(resp)
|
||||
|
||||
return new_resp
|
||||
|
||||
def parse_credentials(self, netloc):
|
||||
if "@" in netloc:
|
||||
userinfo = netloc.rsplit("@", 1)[0]
|
||||
if ":" in userinfo:
|
||||
return userinfo.split(":", 1)
|
||||
return userinfo, None
|
||||
return None, None
|
||||
|
||||
|
||||
class LocalFSAdapter(BaseAdapter):
|
||||
|
||||
def send(self, request, stream=None, timeout=None, verify=None, cert=None,
|
||||
proxies=None):
|
||||
pathname = url_to_path(request.url)
|
||||
|
||||
resp = Response()
|
||||
resp.status_code = 200
|
||||
resp.url = request.url
|
||||
|
||||
try:
|
||||
stats = os.stat(pathname)
|
||||
except OSError as exc:
|
||||
resp.status_code = 404
|
||||
resp.raw = exc
|
||||
else:
|
||||
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
|
||||
content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
|
||||
resp.headers = CaseInsensitiveDict({
|
||||
"Content-Type": content_type,
|
||||
"Content-Length": stats.st_size,
|
||||
"Last-Modified": modified,
|
||||
})
|
||||
|
||||
resp.raw = open(pathname, "rb")
|
||||
resp.close = resp.raw.close
|
||||
|
||||
return resp
|
||||
|
||||
def close(self):
|
||||
pass
|
||||
|
||||
|
||||
class SafeFileCache(FileCache):
|
||||
"""
|
||||
A file based cache which is safe to use even when the target directory may
|
||||
not be accessible or writable.
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(SafeFileCache, self).__init__(*args, **kwargs)
|
||||
|
||||
# Check to ensure that the directory containing our cache directory
|
||||
# is owned by the user current executing pip. If it does not exist
|
||||
# we will check the parent directory until we find one that does exist.
|
||||
# If it is not owned by the user executing pip then we will disable
|
||||
# the cache and log a warning.
|
||||
if not check_path_owner(self.directory):
|
||||
logger.warning(
|
||||
"The directory '%s' or its parent directory is not owned by "
|
||||
"the current user and the cache has been disabled. Please "
|
||||
"check the permissions and owner of that directory. If "
|
||||
"executing pip with sudo, you may want sudo's -H flag.",
|
||||
self.directory,
|
||||
)
|
||||
|
||||
# Set our directory to None to disable the Cache
|
||||
self.directory = None
|
||||
|
||||
def get(self, *args, **kwargs):
|
||||
# If we don't have a directory, then the cache should be a no-op.
|
||||
if self.directory is None:
|
||||
return
|
||||
|
||||
try:
|
||||
return super(SafeFileCache, self).get(*args, **kwargs)
|
||||
except (LockError, OSError, IOError):
|
||||
# We intentionally silence this error, if we can't access the cache
|
||||
# then we can just skip caching and process the request as if
|
||||
# caching wasn't enabled.
|
||||
pass
|
||||
|
||||
def set(self, *args, **kwargs):
|
||||
# If we don't have a directory, then the cache should be a no-op.
|
||||
if self.directory is None:
|
||||
return
|
||||
|
||||
try:
|
||||
return super(SafeFileCache, self).set(*args, **kwargs)
|
||||
except (LockError, OSError, IOError):
|
||||
# We intentionally silence this error, if we can't access the cache
|
||||
# then we can just skip caching and process the request as if
|
||||
# caching wasn't enabled.
|
||||
pass
|
||||
|
||||
def delete(self, *args, **kwargs):
|
||||
# If we don't have a directory, then the cache should be a no-op.
|
||||
if self.directory is None:
|
||||
return
|
||||
|
||||
try:
|
||||
return super(SafeFileCache, self).delete(*args, **kwargs)
|
||||
except (LockError, OSError, IOError):
|
||||
# We intentionally silence this error, if we can't access the cache
|
||||
# then we can just skip caching and process the request as if
|
||||
# caching wasn't enabled.
|
||||
pass
|
||||
|
||||
|
||||
class InsecureHTTPAdapter(HTTPAdapter):
|
||||
|
||||
def cert_verify(self, conn, url, verify, cert):
|
||||
conn.cert_reqs = 'CERT_NONE'
|
||||
conn.ca_certs = None
|
||||
|
||||
|
||||
class PipSession(requests.Session):
|
||||
|
||||
timeout = None
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
retries = kwargs.pop("retries", 0)
|
||||
cache = kwargs.pop("cache", None)
|
||||
insecure_hosts = kwargs.pop("insecure_hosts", [])
|
||||
|
||||
super(PipSession, self).__init__(*args, **kwargs)
|
||||
|
||||
# Attach our User Agent to the request
|
||||
self.headers["User-Agent"] = user_agent()
|
||||
|
||||
# Attach our Authentication handler to the session
|
||||
self.auth = MultiDomainBasicAuth()
|
||||
|
||||
# Create our urllib3.Retry instance which will allow us to customize
|
||||
# how we handle retries.
|
||||
retries = urllib3.Retry(
|
||||
# Set the total number of retries that a particular request can
|
||||
# have.
|
||||
total=retries,
|
||||
|
||||
# A 503 error from PyPI typically means that the Fastly -> Origin
|
||||
# connection got interupted in some way. A 503 error in general
|
||||
# is typically considered a transient error so we'll go ahead and
|
||||
# retry it.
|
||||
status_forcelist=[503],
|
||||
|
||||
# Add a small amount of back off between failed requests in
|
||||
# order to prevent hammering the service.
|
||||
backoff_factor=0.25,
|
||||
)
|
||||
|
||||
# We want to _only_ cache responses on securely fetched origins. We do
|
||||
# this because we can't validate the response of an insecurely fetched
|
||||
# origin, and we don't want someone to be able to poison the cache and
|
||||
# require manual eviction from the cache to fix it.
|
||||
if cache:
|
||||
secure_adapter = CacheControlAdapter(
|
||||
cache=SafeFileCache(cache, use_dir_lock=True),
|
||||
max_retries=retries,
|
||||
)
|
||||
else:
|
||||
secure_adapter = HTTPAdapter(max_retries=retries)
|
||||
|
||||
# Our Insecure HTTPAdapter disables HTTPS validation. It does not
|
||||
# support caching (see above) so we'll use it for all http:// URLs as
|
||||
# well as any https:// host that we've marked as ignoring TLS errors
|
||||
# for.
|
||||
insecure_adapter = InsecureHTTPAdapter(max_retries=retries)
|
||||
|
||||
self.mount("https://", secure_adapter)
|
||||
self.mount("http://", insecure_adapter)
|
||||
|
||||
# Enable file:// urls
|
||||
self.mount("file://", LocalFSAdapter())
|
||||
|
||||
# We want to use a non-validating adapter for any requests which are
|
||||
# deemed insecure.
|
||||
for host in insecure_hosts:
|
||||
self.mount("https://{0}/".format(host), insecure_adapter)
|
||||
|
||||
def request(self, method, url, *args, **kwargs):
|
||||
# Allow setting a default timeout on a session
|
||||
kwargs.setdefault("timeout", self.timeout)
|
||||
|
||||
# Dispatch the actual request
|
||||
return super(PipSession, self).request(method, url, *args, **kwargs)
|
||||
|
||||
|
||||
def get_file_content(url, comes_from=None, session=None):
|
||||
"""Gets the content of a file; it may be a filename, file: URL, or
|
||||
http: URL. Returns (location, content). Content is unicode."""
|
||||
if session is None:
|
||||
raise TypeError(
|
||||
"get_file_content() missing 1 required keyword argument: 'session'"
|
||||
)
|
||||
|
||||
match = _scheme_re.search(url)
|
||||
if match:
|
||||
scheme = match.group(1).lower()
|
||||
if (scheme == 'file' and comes_from and
|
||||
comes_from.startswith('http')):
|
||||
raise InstallationError(
|
||||
'Requirements file %s references URL %s, which is local'
|
||||
% (comes_from, url))
|
||||
if scheme == 'file':
|
||||
path = url.split(':', 1)[1]
|
||||
path = path.replace('\\', '/')
|
||||
match = _url_slash_drive_re.match(path)
|
||||
if match:
|
||||
path = match.group(1) + ':' + path.split('|', 1)[1]
|
||||
path = urllib_parse.unquote(path)
|
||||
if path.startswith('/'):
|
||||
path = '/' + path.lstrip('/')
|
||||
url = path
|
||||
else:
|
||||
# FIXME: catch some errors
|
||||
resp = session.get(url)
|
||||
resp.raise_for_status()
|
||||
|
||||
if six.PY3:
|
||||
return resp.url, resp.text
|
||||
else:
|
||||
return resp.url, resp.content
|
||||
try:
|
||||
with open(url) as f:
|
||||
content = f.read()
|
||||
except IOError as exc:
|
||||
raise InstallationError(
|
||||
'Could not open requirements file: %s' % str(exc)
|
||||
)
|
||||
return url, content
|
||||
|
||||
|
||||
_scheme_re = re.compile(r'^(http|https|file):', re.I)
|
||||
_url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I)
|
||||
|
||||
|
||||
def is_url(name):
|
||||
"""Returns true if the name looks like a URL"""
|
||||
if ':' not in name:
|
||||
return False
|
||||
scheme = name.split(':', 1)[0].lower()
|
||||
return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes
|
||||
|
||||
|
||||
def url_to_path(url):
|
||||
"""
|
||||
Convert a file: URL to a path.
|
||||
"""
|
||||
assert url.startswith('file:'), (
|
||||
"You can only turn file: urls into filenames (not %r)" % url)
|
||||
|
||||
_, netloc, path, _, _ = urllib_parse.urlsplit(url)
|
||||
|
||||
# if we have a UNC path, prepend UNC share notation
|
||||
if netloc:
|
||||
netloc = '\\\\' + netloc
|
||||
|
||||
path = urllib_request.url2pathname(netloc + path)
|
||||
return path
|
||||
|
||||
|
||||
def path_to_url(path):
|
||||
"""
|
||||
Convert a path to a file: URL. The path will be made absolute and have
|
||||
quoted path parts.
|
||||
"""
|
||||
path = os.path.normpath(os.path.abspath(path))
|
||||
url = urllib_parse.urljoin('file:', urllib_request.pathname2url(path))
|
||||
return url
|
||||
|
||||
|
||||
def is_archive_file(name):
|
||||
"""Return True if `name` is a considered as an archive file."""
|
||||
ext = splitext(name)[1].lower()
|
||||
if ext in ARCHIVE_EXTENSIONS:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def unpack_vcs_link(link, location):
|
||||
vcs_backend = _get_used_vcs_backend(link)
|
||||
vcs_backend.unpack(location)
|
||||
|
||||
|
||||
def _get_used_vcs_backend(link):
|
||||
for backend in vcs.backends:
|
||||
if link.scheme in backend.schemes:
|
||||
vcs_backend = backend(link.url)
|
||||
return vcs_backend
|
||||
|
||||
|
||||
def is_vcs_url(link):
|
||||
return bool(_get_used_vcs_backend(link))
|
||||
|
||||
|
||||
def is_file_url(link):
|
||||
return link.url.lower().startswith('file:')
|
||||
|
||||
|
||||
def is_dir_url(link):
|
||||
"""Return whether a file:// Link points to a directory.
|
||||
|
||||
``link`` must not have any other scheme but file://. Call is_file_url()
|
||||
first.
|
||||
|
||||
"""
|
||||
link_path = url_to_path(link.url_without_fragment)
|
||||
return os.path.isdir(link_path)
|
||||
|
||||
|
||||
def _progress_indicator(iterable, *args, **kwargs):
|
||||
return iterable
|
||||
|
||||
|
||||
def _download_url(resp, link, content_file, hashes):
|
||||
try:
|
||||
total_length = int(resp.headers['content-length'])
|
||||
except (ValueError, KeyError, TypeError):
|
||||
total_length = 0
|
||||
|
||||
cached_resp = getattr(resp, "from_cache", False)
|
||||
|
||||
if logger.getEffectiveLevel() > logging.INFO:
|
||||
show_progress = False
|
||||
elif cached_resp:
|
||||
show_progress = False
|
||||
elif total_length > (40 * 1000):
|
||||
show_progress = True
|
||||
elif not total_length:
|
||||
show_progress = True
|
||||
else:
|
||||
show_progress = False
|
||||
|
||||
show_url = link.show_url
|
||||
|
||||
def resp_read(chunk_size):
|
||||
try:
|
||||
# Special case for urllib3.
|
||||
for chunk in resp.raw.stream(
|
||||
chunk_size,
|
||||
# We use decode_content=False here because we don't
|
||||
# want urllib3 to mess with the raw bytes we get
|
||||
# from the server. If we decompress inside of
|
||||
# urllib3 then we cannot verify the checksum
|
||||
# because the checksum will be of the compressed
|
||||
# file. This breakage will only occur if the
|
||||
# server adds a Content-Encoding header, which
|
||||
# depends on how the server was configured:
|
||||
# - Some servers will notice that the file isn't a
|
||||
# compressible file and will leave the file alone
|
||||
# and with an empty Content-Encoding
|
||||
# - Some servers will notice that the file is
|
||||
# already compressed and will leave the file
|
||||
# alone and will add a Content-Encoding: gzip
|
||||
# header
|
||||
# - Some servers won't notice anything at all and
|
||||
# will take a file that's already been compressed
|
||||
# and compress it again and set the
|
||||
# Content-Encoding: gzip header
|
||||
#
|
||||
# By setting this not to decode automatically we
|
||||
# hope to eliminate problems with the second case.
|
||||
decode_content=False):
|
||||
yield chunk
|
||||
except AttributeError:
|
||||
# Standard file-like object.
|
||||
while True:
|
||||
chunk = resp.raw.read(chunk_size)
|
||||
if not chunk:
|
||||
break
|
||||
yield chunk
|
||||
|
||||
def written_chunks(chunks):
|
||||
for chunk in chunks:
|
||||
content_file.write(chunk)
|
||||
yield chunk
|
||||
|
||||
progress_indicator = _progress_indicator
|
||||
|
||||
if link.netloc == PyPI.netloc:
|
||||
url = show_url
|
||||
else:
|
||||
url = link.url_without_fragment
|
||||
|
||||
if show_progress: # We don't show progress on cached responses
|
||||
if total_length:
|
||||
logger.info("Downloading %s (%s)", url, format_size(total_length))
|
||||
progress_indicator = DownloadProgressBar(max=total_length).iter
|
||||
else:
|
||||
logger.info("Downloading %s", url)
|
||||
progress_indicator = DownloadProgressSpinner().iter
|
||||
elif cached_resp:
|
||||
logger.info("Using cached %s", url)
|
||||
else:
|
||||
logger.info("Downloading %s", url)
|
||||
|
||||
logger.debug('Downloading from URL %s', link)
|
||||
|
||||
downloaded_chunks = written_chunks(progress_indicator(resp_read(4096),
|
||||
4096))
|
||||
if hashes:
|
||||
hashes.check_against_chunks(downloaded_chunks)
|
||||
else:
|
||||
consume(downloaded_chunks)
|
||||
|
||||
|
||||
def _copy_file(filename, location, link):
|
||||
copy = True
|
||||
download_location = os.path.join(location, link.filename)
|
||||
if os.path.exists(download_location):
|
||||
response = ask_path_exists(
|
||||
'The file %s exists. (i)gnore, (w)ipe, (b)ackup ' %
|
||||
display_path(download_location), ('i', 'w', 'b'))
|
||||
if response == 'i':
|
||||
copy = False
|
||||
elif response == 'w':
|
||||
logger.warning('Deleting %s', display_path(download_location))
|
||||
os.remove(download_location)
|
||||
elif response == 'b':
|
||||
dest_file = backup_dir(download_location)
|
||||
logger.warning(
|
||||
'Backing up %s to %s',
|
||||
display_path(download_location),
|
||||
display_path(dest_file),
|
||||
)
|
||||
shutil.move(download_location, dest_file)
|
||||
if copy:
|
||||
shutil.copy(filename, download_location)
|
||||
logger.info('Saved %s', display_path(download_location))
|
||||
|
||||
|
||||
def unpack_http_url(link, location, download_dir=None,
|
||||
session=None, hashes=None):
|
||||
if session is None:
|
||||
raise TypeError(
|
||||
"unpack_http_url() missing 1 required keyword argument: 'session'"
|
||||
)
|
||||
|
||||
temp_dir = tempfile.mkdtemp('-unpack', 'pip-')
|
||||
|
||||
# If a download dir is specified, is the file already downloaded there?
|
||||
already_downloaded_path = None
|
||||
if download_dir:
|
||||
already_downloaded_path = _check_download_dir(link,
|
||||
download_dir,
|
||||
hashes)
|
||||
|
||||
if already_downloaded_path:
|
||||
from_path = already_downloaded_path
|
||||
content_type = mimetypes.guess_type(from_path)[0]
|
||||
else:
|
||||
# let's download to a tmp dir
|
||||
from_path, content_type = _download_http_url(link,
|
||||
session,
|
||||
temp_dir,
|
||||
hashes)
|
||||
|
||||
# unpack the archive to the build dir location. even when only downloading
|
||||
# archives, they have to be unpacked to parse dependencies
|
||||
unpack_file(from_path, location, content_type, link)
|
||||
|
||||
# a download dir is specified; let's copy the archive there
|
||||
if download_dir and not already_downloaded_path:
|
||||
_copy_file(from_path, download_dir, link)
|
||||
|
||||
if not already_downloaded_path:
|
||||
os.unlink(from_path)
|
||||
rmtree(temp_dir)
|
||||
|
||||
|
||||
def unpack_file_url(link, location, download_dir=None, hashes=None):
|
||||
"""Unpack link into location.
|
||||
|
||||
If download_dir is provided and link points to a file, make a copy
|
||||
of the link file inside download_dir.
|
||||
"""
|
||||
link_path = url_to_path(link.url_without_fragment)
|
||||
|
||||
# If it's a url to a local directory
|
||||
if is_dir_url(link):
|
||||
if os.path.isdir(location):
|
||||
rmtree(location)
|
||||
shutil.copytree(link_path, location, symlinks=True)
|
||||
if download_dir:
|
||||
logger.info('Link is a directory, ignoring download_dir')
|
||||
return
|
||||
|
||||
# If --require-hashes is off, `hashes` is either empty, the
|
||||
# link's embeddded hash, or MissingHashes; it is required to
|
||||
# match. If --require-hashes is on, we are satisfied by any
|
||||
# hash in `hashes` matching: a URL-based or an option-based
|
||||
# one; no internet-sourced hash will be in `hashes`.
|
||||
if hashes:
|
||||
hashes.check_against_path(link_path)
|
||||
|
||||
# If a download dir is specified, is the file already there and valid?
|
||||
already_downloaded_path = None
|
||||
if download_dir:
|
||||
already_downloaded_path = _check_download_dir(link,
|
||||
download_dir,
|
||||
hashes)
|
||||
|
||||
if already_downloaded_path:
|
||||
from_path = already_downloaded_path
|
||||
else:
|
||||
from_path = link_path
|
||||
|
||||
content_type = mimetypes.guess_type(from_path)[0]
|
||||
|
||||
# unpack the archive to the build dir location. even when only downloading
|
||||
# archives, they have to be unpacked to parse dependencies
|
||||
unpack_file(from_path, location, content_type, link)
|
||||
|
||||
# a download dir is specified and not already downloaded
|
||||
if download_dir and not already_downloaded_path:
|
||||
_copy_file(from_path, download_dir, link)
|
||||
|
||||
|
||||
def _copy_dist_from_dir(link_path, location):
|
||||
"""Copy distribution files in `link_path` to `location`.
|
||||
|
||||
Invoked when user requests to install a local directory. E.g.:
|
||||
|
||||
pip install .
|
||||
pip install ~/dev/git-repos/python-prompt-toolkit
|
||||
|
||||
"""
|
||||
|
||||
# Note: This is currently VERY SLOW if you have a lot of data in the
|
||||
# directory, because it copies everything with `shutil.copytree`.
|
||||
# What it should really do is build an sdist and install that.
|
||||
# See https://github.com/pypa/pip/issues/2195
|
||||
|
||||
if os.path.isdir(location):
|
||||
rmtree(location)
|
||||
|
||||
# build an sdist
|
||||
setup_py = 'setup.py'
|
||||
sdist_args = [sys.executable]
|
||||
sdist_args.append('-c')
|
||||
sdist_args.append(SETUPTOOLS_SHIM % setup_py)
|
||||
sdist_args.append('sdist')
|
||||
sdist_args += ['--dist-dir', location]
|
||||
logger.info('Running setup.py sdist for %s', link_path)
|
||||
|
||||
with indent_log():
|
||||
call_subprocess(sdist_args, cwd=link_path, show_stdout=False)
|
||||
|
||||
# unpack sdist into `location`
|
||||
sdist = os.path.join(location, os.listdir(location)[0])
|
||||
logger.info('Unpacking sdist %s into %s', sdist, location)
|
||||
unpack_file(sdist, location, content_type=None, link=None)
|
||||
|
||||
|
||||
class PipXmlrpcTransport(xmlrpc_client.Transport):
|
||||
"""Provide a `xmlrpclib.Transport` implementation via a `PipSession`
|
||||
object.
|
||||
"""
|
||||
def __init__(self, index_url, session, use_datetime=False):
|
||||
xmlrpc_client.Transport.__init__(self, use_datetime)
|
||||
index_parts = urllib_parse.urlparse(index_url)
|
||||
self._scheme = index_parts.scheme
|
||||
self._session = session
|
||||
|
||||
def request(self, host, handler, request_body, verbose=False):
|
||||
parts = (self._scheme, host, handler, None, None, None)
|
||||
url = urllib_parse.urlunparse(parts)
|
||||
try:
|
||||
headers = {'Content-Type': 'text/xml'}
|
||||
response = self._session.post(url, data=request_body,
|
||||
headers=headers, stream=True)
|
||||
response.raise_for_status()
|
||||
self.verbose = verbose
|
||||
return self.parse_response(response.raw)
|
||||
except requests.HTTPError as exc:
|
||||
logger.critical(
|
||||
"HTTP error %s while getting %s",
|
||||
exc.response.status_code, url,
|
||||
)
|
||||
raise
|
||||
|
||||
|
||||
def unpack_url(link, location, download_dir=None,
|
||||
only_download=False, session=None, hashes=None):
|
||||
"""Unpack link.
|
||||
If link is a VCS link:
|
||||
if only_download, export into download_dir and ignore location
|
||||
else unpack into location
|
||||
for other types of link:
|
||||
- unpack into location
|
||||
- if download_dir, copy the file into download_dir
|
||||
- if only_download, mark location for deletion
|
||||
|
||||
:param hashes: A Hashes object, one of whose embedded hashes must match,
|
||||
or HashMismatch will be raised. If the Hashes is empty, no matches are
|
||||
required, and unhashable types of requirements (like VCS ones, which
|
||||
would ordinarily raise HashUnsupported) are allowed.
|
||||
"""
|
||||
# non-editable vcs urls
|
||||
if is_vcs_url(link):
|
||||
unpack_vcs_link(link, location)
|
||||
|
||||
# file urls
|
||||
elif is_file_url(link):
|
||||
unpack_file_url(link, location, download_dir, hashes=hashes)
|
||||
|
||||
# http urls
|
||||
else:
|
||||
if session is None:
|
||||
session = PipSession()
|
||||
|
||||
unpack_http_url(
|
||||
link,
|
||||
location,
|
||||
download_dir,
|
||||
session,
|
||||
hashes=hashes
|
||||
)
|
||||
if only_download:
|
||||
write_delete_marker_file(location)
|
||||
|
||||
|
||||
def _download_http_url(link, session, temp_dir, hashes):
|
||||
"""Download link url into temp_dir using provided session"""
|
||||
target_url = link.url.split('#', 1)[0]
|
||||
try:
|
||||
resp = session.get(
|
||||
target_url,
|
||||
# We use Accept-Encoding: identity here because requests
|
||||
# defaults to accepting compressed responses. This breaks in
|
||||
# a variety of ways depending on how the server is configured.
|
||||
# - Some servers will notice that the file isn't a compressible
|
||||
# file and will leave the file alone and with an empty
|
||||
# Content-Encoding
|
||||
# - Some servers will notice that the file is already
|
||||
# compressed and will leave the file alone and will add a
|
||||
# Content-Encoding: gzip header
|
||||
# - Some servers won't notice anything at all and will take
|
||||
# a file that's already been compressed and compress it again
|
||||
# and set the Content-Encoding: gzip header
|
||||
# By setting this to request only the identity encoding We're
|
||||
# hoping to eliminate the third case. Hopefully there does not
|
||||
# exist a server which when given a file will notice it is
|
||||
# already compressed and that you're not asking for a
|
||||
# compressed file and will then decompress it before sending
|
||||
# because if that's the case I don't think it'll ever be
|
||||
# possible to make this work.
|
||||
headers={"Accept-Encoding": "identity"},
|
||||
stream=True,
|
||||
)
|
||||
resp.raise_for_status()
|
||||
except requests.HTTPError as exc:
|
||||
logger.critical(
|
||||
"HTTP error %s while getting %s", exc.response.status_code, link,
|
||||
)
|
||||
raise
|
||||
|
||||
content_type = resp.headers.get('content-type', '')
|
||||
filename = link.filename # fallback
|
||||
# Have a look at the Content-Disposition header for a better guess
|
||||
content_disposition = resp.headers.get('content-disposition')
|
||||
if content_disposition:
|
||||
type, params = cgi.parse_header(content_disposition)
|
||||
# We use ``or`` here because we don't want to use an "empty" value
|
||||
# from the filename param.
|
||||
filename = params.get('filename') or filename
|
||||
ext = splitext(filename)[1]
|
||||
if not ext:
|
||||
ext = mimetypes.guess_extension(content_type)
|
||||
if ext:
|
||||
filename += ext
|
||||
if not ext and link.url != resp.url:
|
||||
ext = os.path.splitext(resp.url)[1]
|
||||
if ext:
|
||||
filename += ext
|
||||
file_path = os.path.join(temp_dir, filename)
|
||||
with open(file_path, 'wb') as content_file:
|
||||
_download_url(resp, link, content_file, hashes)
|
||||
return file_path, content_type
|
||||
|
||||
|
||||
def _check_download_dir(link, download_dir, hashes):
|
||||
""" Check download_dir for previously downloaded file with correct hash
|
||||
If a correct file is found return its path else None
|
||||
"""
|
||||
download_path = os.path.join(download_dir, link.filename)
|
||||
if os.path.exists(download_path):
|
||||
# If already downloaded, does its hash match?
|
||||
logger.info('File was already downloaded %s', download_path)
|
||||
if hashes:
|
||||
try:
|
||||
hashes.check_against_path(download_path)
|
||||
except HashMismatch:
|
||||
logger.warning(
|
||||
'Previously-downloaded file %s has bad hash. '
|
||||
'Re-downloading.',
|
||||
download_path
|
||||
)
|
||||
os.unlink(download_path)
|
||||
return None
|
||||
return download_path
|
||||
return None
|
||||
236
Shared/lib/python3.4/site-packages/pip/exceptions.py
Normal file
236
Shared/lib/python3.4/site-packages/pip/exceptions.py
Normal file
|
|
@ -0,0 +1,236 @@
|
|||
"""Exceptions used throughout package"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
from itertools import chain, groupby, repeat
|
||||
|
||||
from pip._vendor.six import iteritems
|
||||
|
||||
|
||||
class PipError(Exception):
|
||||
"""Base pip exception"""
|
||||
|
||||
|
||||
class InstallationError(PipError):
|
||||
"""General exception during installation"""
|
||||
|
||||
|
||||
class UninstallationError(PipError):
|
||||
"""General exception during uninstallation"""
|
||||
|
||||
|
||||
class DistributionNotFound(InstallationError):
|
||||
"""Raised when a distribution cannot be found to satisfy a requirement"""
|
||||
|
||||
|
||||
class RequirementsFileParseError(InstallationError):
|
||||
"""Raised when a general error occurs parsing a requirements file line."""
|
||||
|
||||
|
||||
class BestVersionAlreadyInstalled(PipError):
|
||||
"""Raised when the most up-to-date version of a package is already
|
||||
installed."""
|
||||
|
||||
|
||||
class BadCommand(PipError):
|
||||
"""Raised when virtualenv or a command is not found"""
|
||||
|
||||
|
||||
class CommandError(PipError):
|
||||
"""Raised when there is an error in command-line arguments"""
|
||||
|
||||
|
||||
class PreviousBuildDirError(PipError):
|
||||
"""Raised when there's a previous conflicting build directory"""
|
||||
|
||||
|
||||
class InvalidWheelFilename(InstallationError):
|
||||
"""Invalid wheel filename."""
|
||||
|
||||
|
||||
class UnsupportedWheel(InstallationError):
|
||||
"""Unsupported wheel."""
|
||||
|
||||
|
||||
class HashErrors(InstallationError):
|
||||
"""Multiple HashError instances rolled into one for reporting"""
|
||||
|
||||
def __init__(self):
|
||||
self.errors = []
|
||||
|
||||
def append(self, error):
|
||||
self.errors.append(error)
|
||||
|
||||
def __str__(self):
|
||||
lines = []
|
||||
self.errors.sort(key=lambda e: e.order)
|
||||
for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__):
|
||||
lines.append(cls.head)
|
||||
lines.extend(e.body() for e in errors_of_cls)
|
||||
if lines:
|
||||
return '\n'.join(lines)
|
||||
|
||||
def __nonzero__(self):
|
||||
return bool(self.errors)
|
||||
|
||||
def __bool__(self):
|
||||
return self.__nonzero__()
|
||||
|
||||
|
||||
class HashError(InstallationError):
|
||||
"""
|
||||
A failure to verify a package against known-good hashes
|
||||
|
||||
:cvar order: An int sorting hash exception classes by difficulty of
|
||||
recovery (lower being harder), so the user doesn't bother fretting
|
||||
about unpinned packages when he has deeper issues, like VCS
|
||||
dependencies, to deal with. Also keeps error reports in a
|
||||
deterministic order.
|
||||
:cvar head: A section heading for display above potentially many
|
||||
exceptions of this kind
|
||||
:ivar req: The InstallRequirement that triggered this error. This is
|
||||
pasted on after the exception is instantiated, because it's not
|
||||
typically available earlier.
|
||||
|
||||
"""
|
||||
req = None
|
||||
head = ''
|
||||
|
||||
def body(self):
|
||||
"""Return a summary of me for display under the heading.
|
||||
|
||||
This default implementation simply prints a description of the
|
||||
triggering requirement.
|
||||
|
||||
:param req: The InstallRequirement that provoked this error, with
|
||||
populate_link() having already been called
|
||||
|
||||
"""
|
||||
return ' %s' % self._requirement_name()
|
||||
|
||||
def __str__(self):
|
||||
return '%s\n%s' % (self.head, self.body())
|
||||
|
||||
def _requirement_name(self):
|
||||
"""Return a description of the requirement that triggered me.
|
||||
|
||||
This default implementation returns long description of the req, with
|
||||
line numbers
|
||||
|
||||
"""
|
||||
return str(self.req) if self.req else 'unknown package'
|
||||
|
||||
|
||||
class VcsHashUnsupported(HashError):
|
||||
"""A hash was provided for a version-control-system-based requirement, but
|
||||
we don't have a method for hashing those."""
|
||||
|
||||
order = 0
|
||||
head = ("Can't verify hashes for these requirements because we don't "
|
||||
"have a way to hash version control repositories:")
|
||||
|
||||
|
||||
class DirectoryUrlHashUnsupported(HashError):
|
||||
"""A hash was provided for a version-control-system-based requirement, but
|
||||
we don't have a method for hashing those."""
|
||||
|
||||
order = 1
|
||||
head = ("Can't verify hashes for these file:// requirements because they "
|
||||
"point to directories:")
|
||||
|
||||
|
||||
class HashMissing(HashError):
|
||||
"""A hash was needed for a requirement but is absent."""
|
||||
|
||||
order = 2
|
||||
head = ('Hashes are required in --require-hashes mode, but they are '
|
||||
'missing from some requirements. Here is a list of those '
|
||||
'requirements along with the hashes their downloaded archives '
|
||||
'actually had. Add lines like these to your requirements files to '
|
||||
'prevent tampering. (If you did not enable --require-hashes '
|
||||
'manually, note that it turns on automatically when any package '
|
||||
'has a hash.)')
|
||||
|
||||
def __init__(self, gotten_hash):
|
||||
"""
|
||||
:param gotten_hash: The hash of the (possibly malicious) archive we
|
||||
just downloaded
|
||||
"""
|
||||
self.gotten_hash = gotten_hash
|
||||
|
||||
def body(self):
|
||||
from pip.utils.hashes import FAVORITE_HASH # Dodge circular import.
|
||||
|
||||
package_name = (self.req.req if self.req and
|
||||
# In case someone feeds something
|
||||
# downright stupid to
|
||||
# InstallRequirement's constructor:
|
||||
getattr(self.req, 'req', None)
|
||||
else 'unknown package')
|
||||
return ' %s --hash=%s:%s' % (package_name,
|
||||
FAVORITE_HASH,
|
||||
self.gotten_hash)
|
||||
|
||||
|
||||
class HashUnpinned(HashError):
|
||||
"""A requirement had a hash specified but was not pinned to a specific
|
||||
version."""
|
||||
|
||||
order = 3
|
||||
head = ('In --require-hashes mode, all requirements must have their '
|
||||
'versions pinned with ==. These do not:')
|
||||
|
||||
|
||||
class HashMismatch(HashError):
|
||||
"""
|
||||
Distribution file hash values don't match.
|
||||
|
||||
:ivar package_name: The name of the package that triggered the hash
|
||||
mismatch. Feel free to write to this after the exception is raise to
|
||||
improve its error message.
|
||||
|
||||
"""
|
||||
order = 4
|
||||
head = ('THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS '
|
||||
'FILE. If you have updated the package versions, please update '
|
||||
'the hashes. Otherwise, examine the package contents carefully; '
|
||||
'someone may have tampered with them.')
|
||||
|
||||
def __init__(self, allowed, gots):
|
||||
"""
|
||||
:param allowed: A dict of algorithm names pointing to lists of allowed
|
||||
hex digests
|
||||
:param gots: A dict of algorithm names pointing to hashes we
|
||||
actually got from the files under suspicion
|
||||
"""
|
||||
self.allowed = allowed
|
||||
self.gots = gots
|
||||
|
||||
def body(self):
|
||||
return ' %s:\n%s' % (self._requirement_name(),
|
||||
self._hash_comparison())
|
||||
|
||||
def _hash_comparison(self):
|
||||
"""
|
||||
Return a comparison of actual and expected hash values.
|
||||
|
||||
Example::
|
||||
|
||||
Expected sha256 abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde
|
||||
or 123451234512345123451234512345123451234512345
|
||||
Got bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef
|
||||
|
||||
"""
|
||||
def hash_then_or(hash_name):
|
||||
# For now, all the decent hashes have 6-char names, so we can get
|
||||
# away with hard-coding space literals.
|
||||
return chain([hash_name], repeat(' or'))
|
||||
|
||||
lines = []
|
||||
for hash_name, expecteds in iteritems(self.allowed):
|
||||
prefix = hash_then_or(hash_name)
|
||||
lines.extend((' Expected %s %s' % (next(prefix), e))
|
||||
for e in expecteds)
|
||||
lines.append(' Got %s\n' %
|
||||
self.gots[hash_name].hexdigest())
|
||||
prefix = ' or'
|
||||
return '\n'.join(lines)
|
||||
1033
Shared/lib/python3.4/site-packages/pip/index.py
Normal file
1033
Shared/lib/python3.4/site-packages/pip/index.py
Normal file
File diff suppressed because it is too large
Load diff
182
Shared/lib/python3.4/site-packages/pip/locations.py
Normal file
182
Shared/lib/python3.4/site-packages/pip/locations.py
Normal file
|
|
@ -0,0 +1,182 @@
|
|||
"""Locations where we look for configs, install stuff, etc"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
import os.path
|
||||
import site
|
||||
import sys
|
||||
|
||||
from distutils import sysconfig
|
||||
from distutils.command.install import install, SCHEME_KEYS # noqa
|
||||
|
||||
from pip.compat import WINDOWS, expanduser
|
||||
from pip.utils import appdirs
|
||||
|
||||
|
||||
# Application Directories
|
||||
USER_CACHE_DIR = appdirs.user_cache_dir("pip")
|
||||
|
||||
|
||||
DELETE_MARKER_MESSAGE = '''\
|
||||
This file is placed here by pip to indicate the source was put
|
||||
here by pip.
|
||||
|
||||
Once this package is successfully installed this source code will be
|
||||
deleted (unless you remove this file).
|
||||
'''
|
||||
PIP_DELETE_MARKER_FILENAME = 'pip-delete-this-directory.txt'
|
||||
|
||||
|
||||
def write_delete_marker_file(directory):
|
||||
"""
|
||||
Write the pip delete marker file into this directory.
|
||||
"""
|
||||
filepath = os.path.join(directory, PIP_DELETE_MARKER_FILENAME)
|
||||
with open(filepath, 'w') as marker_fp:
|
||||
marker_fp.write(DELETE_MARKER_MESSAGE)
|
||||
|
||||
|
||||
def running_under_virtualenv():
|
||||
"""
|
||||
Return True if we're running inside a virtualenv, False otherwise.
|
||||
|
||||
"""
|
||||
if hasattr(sys, 'real_prefix'):
|
||||
return True
|
||||
elif sys.prefix != getattr(sys, "base_prefix", sys.prefix):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def virtualenv_no_global():
|
||||
"""
|
||||
Return True if in a venv and no system site packages.
|
||||
"""
|
||||
# this mirrors the logic in virtualenv.py for locating the
|
||||
# no-global-site-packages.txt file
|
||||
site_mod_dir = os.path.dirname(os.path.abspath(site.__file__))
|
||||
no_global_file = os.path.join(site_mod_dir, 'no-global-site-packages.txt')
|
||||
if running_under_virtualenv() and os.path.isfile(no_global_file):
|
||||
return True
|
||||
|
||||
|
||||
if running_under_virtualenv():
|
||||
src_prefix = os.path.join(sys.prefix, 'src')
|
||||
else:
|
||||
# FIXME: keep src in cwd for now (it is not a temporary folder)
|
||||
try:
|
||||
src_prefix = os.path.join(os.getcwd(), 'src')
|
||||
except OSError:
|
||||
# In case the current working directory has been renamed or deleted
|
||||
sys.exit(
|
||||
"The folder you are executing pip from can no longer be found."
|
||||
)
|
||||
|
||||
# under Mac OS X + virtualenv sys.prefix is not properly resolved
|
||||
# it is something like /path/to/python/bin/..
|
||||
# Note: using realpath due to tmp dirs on OSX being symlinks
|
||||
src_prefix = os.path.abspath(src_prefix)
|
||||
|
||||
# FIXME doesn't account for venv linked to global site-packages
|
||||
|
||||
site_packages = sysconfig.get_python_lib()
|
||||
user_site = site.USER_SITE
|
||||
user_dir = expanduser('~')
|
||||
if WINDOWS:
|
||||
bin_py = os.path.join(sys.prefix, 'Scripts')
|
||||
bin_user = os.path.join(user_site, 'Scripts')
|
||||
# buildout uses 'bin' on Windows too?
|
||||
if not os.path.exists(bin_py):
|
||||
bin_py = os.path.join(sys.prefix, 'bin')
|
||||
bin_user = os.path.join(user_site, 'bin')
|
||||
|
||||
config_basename = 'pip.ini'
|
||||
|
||||
legacy_storage_dir = os.path.join(user_dir, 'pip')
|
||||
legacy_config_file = os.path.join(
|
||||
legacy_storage_dir,
|
||||
config_basename,
|
||||
)
|
||||
else:
|
||||
bin_py = os.path.join(sys.prefix, 'bin')
|
||||
bin_user = os.path.join(user_site, 'bin')
|
||||
|
||||
config_basename = 'pip.conf'
|
||||
|
||||
legacy_storage_dir = os.path.join(user_dir, '.pip')
|
||||
legacy_config_file = os.path.join(
|
||||
legacy_storage_dir,
|
||||
config_basename,
|
||||
)
|
||||
|
||||
# Forcing to use /usr/local/bin for standard Mac OS X framework installs
|
||||
# Also log to ~/Library/Logs/ for use with the Console.app log viewer
|
||||
if sys.platform[:6] == 'darwin' and sys.prefix[:16] == '/System/Library/':
|
||||
bin_py = '/usr/local/bin'
|
||||
|
||||
site_config_files = [
|
||||
os.path.join(path, config_basename)
|
||||
for path in appdirs.site_config_dirs('pip')
|
||||
]
|
||||
|
||||
|
||||
def distutils_scheme(dist_name, user=False, home=None, root=None,
|
||||
isolated=False, prefix=None):
|
||||
"""
|
||||
Return a distutils install scheme
|
||||
"""
|
||||
from distutils.dist import Distribution
|
||||
|
||||
scheme = {}
|
||||
|
||||
if isolated:
|
||||
extra_dist_args = {"script_args": ["--no-user-cfg"]}
|
||||
else:
|
||||
extra_dist_args = {}
|
||||
dist_args = {'name': dist_name}
|
||||
dist_args.update(extra_dist_args)
|
||||
|
||||
d = Distribution(dist_args)
|
||||
d.parse_config_files()
|
||||
i = d.get_command_obj('install', create=True)
|
||||
# NOTE: setting user or home has the side-effect of creating the home dir
|
||||
# or user base for installations during finalize_options()
|
||||
# ideally, we'd prefer a scheme class that has no side-effects.
|
||||
assert not (user and prefix), "user={0} prefix={1}".format(user, prefix)
|
||||
i.user = user or i.user
|
||||
if user:
|
||||
i.prefix = ""
|
||||
i.prefix = prefix or i.prefix
|
||||
i.home = home or i.home
|
||||
i.root = root or i.root
|
||||
i.finalize_options()
|
||||
for key in SCHEME_KEYS:
|
||||
scheme[key] = getattr(i, 'install_' + key)
|
||||
|
||||
# install_lib specified in setup.cfg should install *everything*
|
||||
# into there (i.e. it takes precedence over both purelib and
|
||||
# platlib). Note, i.install_lib is *always* set after
|
||||
# finalize_options(); we only want to override here if the user
|
||||
# has explicitly requested it hence going back to the config
|
||||
if 'install_lib' in d.get_option_dict('install'):
|
||||
scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib))
|
||||
|
||||
if running_under_virtualenv():
|
||||
scheme['headers'] = os.path.join(
|
||||
sys.prefix,
|
||||
'include',
|
||||
'site',
|
||||
'python' + sys.version[:3],
|
||||
dist_name,
|
||||
)
|
||||
|
||||
if root is not None:
|
||||
path_no_drive = os.path.splitdrive(
|
||||
os.path.abspath(scheme["headers"]))[1]
|
||||
scheme["headers"] = os.path.join(
|
||||
root,
|
||||
path_no_drive[1:],
|
||||
)
|
||||
|
||||
return scheme
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
from pip.models.index import Index, PyPI
|
||||
|
||||
|
||||
__all__ = ["Index", "PyPI"]
|
||||
16
Shared/lib/python3.4/site-packages/pip/models/index.py
Normal file
16
Shared/lib/python3.4/site-packages/pip/models/index.py
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
from pip._vendor.six.moves.urllib import parse as urllib_parse
|
||||
|
||||
|
||||
class Index(object):
|
||||
def __init__(self, url):
|
||||
self.url = url
|
||||
self.netloc = urllib_parse.urlsplit(url).netloc
|
||||
self.simple_url = self.url_to_path('simple')
|
||||
self.pypi_url = self.url_to_path('pypi')
|
||||
self.pip_json_url = self.url_to_path('pypi/pip/json')
|
||||
|
||||
def url_to_path(self, path):
|
||||
return urllib_parse.urljoin(self.url, path)
|
||||
|
||||
|
||||
PyPI = Index('https://pypi.python.org/')
|
||||
112
Shared/lib/python3.4/site-packages/pip/operations/freeze.py
Normal file
112
Shared/lib/python3.4/site-packages/pip/operations/freeze.py
Normal file
|
|
@ -0,0 +1,112 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import logging
|
||||
import re
|
||||
|
||||
import pip
|
||||
from pip.compat import stdlib_pkgs
|
||||
from pip.req import InstallRequirement
|
||||
from pip.utils import get_installed_distributions
|
||||
from pip._vendor import pkg_resources
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# packages to exclude from freeze output
|
||||
freeze_excludes = stdlib_pkgs + ['setuptools', 'pip', 'distribute']
|
||||
|
||||
|
||||
def freeze(
|
||||
requirement=None,
|
||||
find_links=None, local_only=None, user_only=None, skip_regex=None,
|
||||
default_vcs=None,
|
||||
isolated=False,
|
||||
wheel_cache=None):
|
||||
find_links = find_links or []
|
||||
skip_match = None
|
||||
|
||||
if skip_regex:
|
||||
skip_match = re.compile(skip_regex)
|
||||
|
||||
dependency_links = []
|
||||
|
||||
for dist in pkg_resources.working_set:
|
||||
if dist.has_metadata('dependency_links.txt'):
|
||||
dependency_links.extend(
|
||||
dist.get_metadata_lines('dependency_links.txt')
|
||||
)
|
||||
for link in find_links:
|
||||
if '#egg=' in link:
|
||||
dependency_links.append(link)
|
||||
for link in find_links:
|
||||
yield '-f %s' % link
|
||||
installations = {}
|
||||
for dist in get_installed_distributions(local_only=local_only,
|
||||
skip=freeze_excludes,
|
||||
user_only=user_only):
|
||||
req = pip.FrozenRequirement.from_dist(
|
||||
dist,
|
||||
dependency_links
|
||||
)
|
||||
installations[req.name] = req
|
||||
|
||||
if requirement:
|
||||
with open(requirement) as req_file:
|
||||
for line in req_file:
|
||||
if (not line.strip() or
|
||||
line.strip().startswith('#') or
|
||||
(skip_match and skip_match.search(line)) or
|
||||
line.startswith((
|
||||
'-r', '--requirement',
|
||||
'-Z', '--always-unzip',
|
||||
'-f', '--find-links',
|
||||
'-i', '--index-url',
|
||||
'--extra-index-url'))):
|
||||
yield line.rstrip()
|
||||
continue
|
||||
|
||||
if line.startswith('-e') or line.startswith('--editable'):
|
||||
if line.startswith('-e'):
|
||||
line = line[2:].strip()
|
||||
else:
|
||||
line = line[len('--editable'):].strip().lstrip('=')
|
||||
line_req = InstallRequirement.from_editable(
|
||||
line,
|
||||
default_vcs=default_vcs,
|
||||
isolated=isolated,
|
||||
wheel_cache=wheel_cache,
|
||||
)
|
||||
else:
|
||||
line_req = InstallRequirement.from_line(
|
||||
line,
|
||||
isolated=isolated,
|
||||
wheel_cache=wheel_cache,
|
||||
)
|
||||
|
||||
if not line_req.name:
|
||||
logger.info(
|
||||
"Skipping line because it's not clear what it "
|
||||
"would install: %s",
|
||||
line.strip(),
|
||||
)
|
||||
logger.info(
|
||||
" (add #egg=PackageName to the URL to avoid"
|
||||
" this warning)"
|
||||
)
|
||||
elif line_req.name not in installations:
|
||||
logger.warning(
|
||||
"Requirement file contains %s, but that package is"
|
||||
" not installed",
|
||||
line.strip(),
|
||||
)
|
||||
else:
|
||||
yield str(installations[line_req.name]).rstrip()
|
||||
del installations[line_req.name]
|
||||
|
||||
yield(
|
||||
'## The following requirements were added by '
|
||||
'pip freeze:'
|
||||
)
|
||||
for installation in sorted(
|
||||
installations.values(), key=lambda x: x.name.lower()):
|
||||
yield str(installation).rstrip()
|
||||
222
Shared/lib/python3.4/site-packages/pip/pep425tags.py
Normal file
222
Shared/lib/python3.4/site-packages/pip/pep425tags.py
Normal file
|
|
@ -0,0 +1,222 @@
|
|||
"""Generate and work with PEP 425 Compatibility Tags."""
|
||||
from __future__ import absolute_import
|
||||
|
||||
import re
|
||||
import sys
|
||||
import warnings
|
||||
import platform
|
||||
import logging
|
||||
|
||||
try:
|
||||
import sysconfig
|
||||
except ImportError: # pragma nocover
|
||||
# Python < 2.7
|
||||
import distutils.sysconfig as sysconfig
|
||||
import distutils.util
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
_osx_arch_pat = re.compile(r'(.+)_(\d+)_(\d+)_(.+)')
|
||||
|
||||
|
||||
def get_config_var(var):
|
||||
try:
|
||||
return sysconfig.get_config_var(var)
|
||||
except IOError as e: # Issue #1074
|
||||
warnings.warn("{0}".format(e), RuntimeWarning)
|
||||
return None
|
||||
|
||||
|
||||
def get_abbr_impl():
|
||||
"""Return abbreviated implementation name."""
|
||||
if hasattr(sys, 'pypy_version_info'):
|
||||
pyimpl = 'pp'
|
||||
elif sys.platform.startswith('java'):
|
||||
pyimpl = 'jy'
|
||||
elif sys.platform == 'cli':
|
||||
pyimpl = 'ip'
|
||||
else:
|
||||
pyimpl = 'cp'
|
||||
return pyimpl
|
||||
|
||||
|
||||
def get_impl_ver():
|
||||
"""Return implementation version."""
|
||||
impl_ver = get_config_var("py_version_nodot")
|
||||
if not impl_ver or get_abbr_impl() == 'pp':
|
||||
impl_ver = ''.join(map(str, get_impl_version_info()))
|
||||
return impl_ver
|
||||
|
||||
|
||||
def get_impl_version_info():
|
||||
"""Return sys.version_info-like tuple for use in decrementing the minor
|
||||
version."""
|
||||
if get_abbr_impl() == 'pp':
|
||||
# as per https://github.com/pypa/pip/issues/2882
|
||||
return (sys.version_info[0], sys.pypy_version_info.major,
|
||||
sys.pypy_version_info.minor)
|
||||
else:
|
||||
return sys.version_info[0], sys.version_info[1]
|
||||
|
||||
|
||||
def get_impl_tag():
|
||||
"""
|
||||
Returns the Tag for this specific implementation.
|
||||
"""
|
||||
return "{0}{1}".format(get_abbr_impl(), get_impl_ver())
|
||||
|
||||
|
||||
def get_flag(var, fallback, expected=True, warn=True):
|
||||
"""Use a fallback method for determining SOABI flags if the needed config
|
||||
var is unset or unavailable."""
|
||||
val = get_config_var(var)
|
||||
if val is None:
|
||||
if warn:
|
||||
logger.debug("Config variable '%s' is unset, Python ABI tag may "
|
||||
"be incorrect", var)
|
||||
return fallback()
|
||||
return val == expected
|
||||
|
||||
|
||||
def get_abi_tag():
|
||||
"""Return the ABI tag based on SOABI (if available) or emulate SOABI
|
||||
(CPython 2, PyPy)."""
|
||||
soabi = get_config_var('SOABI')
|
||||
impl = get_abbr_impl()
|
||||
if not soabi and impl in ('cp', 'pp') and hasattr(sys, 'maxunicode'):
|
||||
d = ''
|
||||
m = ''
|
||||
u = ''
|
||||
if get_flag('Py_DEBUG',
|
||||
lambda: hasattr(sys, 'gettotalrefcount'),
|
||||
warn=(impl == 'cp')):
|
||||
d = 'd'
|
||||
if get_flag('WITH_PYMALLOC',
|
||||
lambda: impl == 'cp',
|
||||
warn=(impl == 'cp')):
|
||||
m = 'm'
|
||||
if get_flag('Py_UNICODE_SIZE',
|
||||
lambda: sys.maxunicode == 0x10ffff,
|
||||
expected=4,
|
||||
warn=(impl == 'cp' and
|
||||
sys.version_info < (3, 3))) \
|
||||
and sys.version_info < (3, 3):
|
||||
u = 'u'
|
||||
abi = '%s%s%s%s%s' % (impl, get_impl_ver(), d, m, u)
|
||||
elif soabi and soabi.startswith('cpython-'):
|
||||
abi = 'cp' + soabi.split('-')[1]
|
||||
elif soabi:
|
||||
abi = soabi.replace('.', '_').replace('-', '_')
|
||||
else:
|
||||
abi = None
|
||||
return abi
|
||||
|
||||
|
||||
def get_platform():
|
||||
"""Return our platform name 'win32', 'linux_x86_64'"""
|
||||
if sys.platform == 'darwin':
|
||||
# distutils.util.get_platform() returns the release based on the value
|
||||
# of MACOSX_DEPLOYMENT_TARGET on which Python was built, which may
|
||||
# be signficantly older than the user's current machine.
|
||||
release, _, machine = platform.mac_ver()
|
||||
split_ver = release.split('.')
|
||||
return 'macosx_{0}_{1}_{2}'.format(split_ver[0], split_ver[1], machine)
|
||||
# XXX remove distutils dependency
|
||||
return distutils.util.get_platform().replace('.', '_').replace('-', '_')
|
||||
|
||||
|
||||
def get_supported(versions=None, noarch=False):
|
||||
"""Return a list of supported tags for each version specified in
|
||||
`versions`.
|
||||
|
||||
:param versions: a list of string versions, of the form ["33", "32"],
|
||||
or None. The first version will be assumed to support our ABI.
|
||||
"""
|
||||
supported = []
|
||||
|
||||
# Versions must be given with respect to the preference
|
||||
if versions is None:
|
||||
versions = []
|
||||
version_info = get_impl_version_info()
|
||||
major = version_info[:-1]
|
||||
# Support all previous minor Python versions.
|
||||
for minor in range(version_info[-1], -1, -1):
|
||||
versions.append(''.join(map(str, major + (minor,))))
|
||||
|
||||
impl = get_abbr_impl()
|
||||
|
||||
abis = []
|
||||
|
||||
abi = get_abi_tag()
|
||||
if abi:
|
||||
abis[0:0] = [abi]
|
||||
|
||||
abi3s = set()
|
||||
import imp
|
||||
for suffix in imp.get_suffixes():
|
||||
if suffix[0].startswith('.abi'):
|
||||
abi3s.add(suffix[0].split('.', 2)[1])
|
||||
|
||||
abis.extend(sorted(list(abi3s)))
|
||||
|
||||
abis.append('none')
|
||||
|
||||
if not noarch:
|
||||
arch = get_platform()
|
||||
if sys.platform == 'darwin':
|
||||
# support macosx-10.6-intel on macosx-10.9-x86_64
|
||||
match = _osx_arch_pat.match(arch)
|
||||
if match:
|
||||
name, major, minor, actual_arch = match.groups()
|
||||
actual_arches = [actual_arch]
|
||||
if actual_arch in ('i386', 'ppc'):
|
||||
actual_arches.append('fat')
|
||||
if actual_arch in ('i386', 'x86_64'):
|
||||
actual_arches.append('intel')
|
||||
if actual_arch in ('ppc64', 'x86_64'):
|
||||
actual_arches.append('fat64')
|
||||
if actual_arch in ('i386', 'ppc', 'x86_64'):
|
||||
actual_arches.append('fat32')
|
||||
if actual_arch in ('i386', 'x86_64', 'intel', 'ppc', 'ppc64'):
|
||||
actual_arches.append('universal')
|
||||
tpl = '{0}_{1}_%i_%s'.format(name, major)
|
||||
arches = []
|
||||
for m in reversed(range(int(minor) + 1)):
|
||||
for a in actual_arches:
|
||||
arches.append(tpl % (m, a))
|
||||
else:
|
||||
# arch pattern didn't match (?!)
|
||||
arches = [arch]
|
||||
else:
|
||||
arches = [arch]
|
||||
|
||||
# Current version, current API (built specifically for our Python):
|
||||
for abi in abis:
|
||||
for arch in arches:
|
||||
supported.append(('%s%s' % (impl, versions[0]), abi, arch))
|
||||
|
||||
# Has binaries, does not use the Python API:
|
||||
supported.append(('py%s' % (versions[0][0]), 'none', arch))
|
||||
|
||||
# No abi / arch, but requires our implementation:
|
||||
for i, version in enumerate(versions):
|
||||
supported.append(('%s%s' % (impl, version), 'none', 'any'))
|
||||
if i == 0:
|
||||
# Tagged specifically as being cross-version compatible
|
||||
# (with just the major version specified)
|
||||
supported.append(('%s%s' % (impl, versions[0][0]), 'none', 'any'))
|
||||
|
||||
# No abi / arch, generic Python
|
||||
for i, version in enumerate(versions):
|
||||
supported.append(('py%s' % (version,), 'none', 'any'))
|
||||
if i == 0:
|
||||
supported.append(('py%s' % (version[0]), 'none', 'any'))
|
||||
|
||||
return supported
|
||||
|
||||
supported_tags = get_supported()
|
||||
supported_tags_noarch = get_supported(noarch=True)
|
||||
|
||||
implementation_tag = get_impl_tag()
|
||||
10
Shared/lib/python3.4/site-packages/pip/req/__init__.py
Normal file
10
Shared/lib/python3.4/site-packages/pip/req/__init__.py
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
from .req_install import InstallRequirement
|
||||
from .req_set import RequirementSet, Requirements
|
||||
from .req_file import parse_requirements
|
||||
|
||||
__all__ = [
|
||||
"RequirementSet", "Requirements", "InstallRequirement",
|
||||
"parse_requirements",
|
||||
]
|
||||
338
Shared/lib/python3.4/site-packages/pip/req/req_file.py
Normal file
338
Shared/lib/python3.4/site-packages/pip/req/req_file.py
Normal file
|
|
@ -0,0 +1,338 @@
|
|||
"""
|
||||
Requirements file parsing
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
import re
|
||||
import shlex
|
||||
import optparse
|
||||
import warnings
|
||||
|
||||
from pip._vendor.six.moves.urllib import parse as urllib_parse
|
||||
from pip._vendor.six.moves import filterfalse
|
||||
|
||||
import pip
|
||||
from pip.download import get_file_content
|
||||
from pip.req.req_install import InstallRequirement
|
||||
from pip.exceptions import (RequirementsFileParseError)
|
||||
from pip.utils.deprecation import RemovedInPip10Warning
|
||||
from pip import cmdoptions
|
||||
|
||||
__all__ = ['parse_requirements']
|
||||
|
||||
SCHEME_RE = re.compile(r'^(http|https|file):', re.I)
|
||||
COMMENT_RE = re.compile(r'(^|\s)+#.*$')
|
||||
|
||||
SUPPORTED_OPTIONS = [
|
||||
cmdoptions.constraints,
|
||||
cmdoptions.editable,
|
||||
cmdoptions.requirements,
|
||||
cmdoptions.no_index,
|
||||
cmdoptions.index_url,
|
||||
cmdoptions.find_links,
|
||||
cmdoptions.extra_index_url,
|
||||
cmdoptions.allow_external,
|
||||
cmdoptions.allow_all_external,
|
||||
cmdoptions.no_allow_external,
|
||||
cmdoptions.allow_unsafe,
|
||||
cmdoptions.no_allow_unsafe,
|
||||
cmdoptions.use_wheel,
|
||||
cmdoptions.no_use_wheel,
|
||||
cmdoptions.always_unzip,
|
||||
cmdoptions.no_binary,
|
||||
cmdoptions.only_binary,
|
||||
cmdoptions.pre,
|
||||
cmdoptions.process_dependency_links,
|
||||
cmdoptions.trusted_host,
|
||||
cmdoptions.require_hashes,
|
||||
]
|
||||
|
||||
# options to be passed to requirements
|
||||
SUPPORTED_OPTIONS_REQ = [
|
||||
cmdoptions.install_options,
|
||||
cmdoptions.global_options,
|
||||
cmdoptions.hash,
|
||||
]
|
||||
|
||||
# the 'dest' string values
|
||||
SUPPORTED_OPTIONS_REQ_DEST = [o().dest for o in SUPPORTED_OPTIONS_REQ]
|
||||
|
||||
|
||||
def parse_requirements(filename, finder=None, comes_from=None, options=None,
|
||||
session=None, constraint=False, wheel_cache=None):
|
||||
"""Parse a requirements file and yield InstallRequirement instances.
|
||||
|
||||
:param filename: Path or url of requirements file.
|
||||
:param finder: Instance of pip.index.PackageFinder.
|
||||
:param comes_from: Origin description of requirements.
|
||||
:param options: cli options.
|
||||
:param session: Instance of pip.download.PipSession.
|
||||
:param constraint: If true, parsing a constraint file rather than
|
||||
requirements file.
|
||||
:param wheel_cache: Instance of pip.wheel.WheelCache
|
||||
"""
|
||||
if session is None:
|
||||
raise TypeError(
|
||||
"parse_requirements() missing 1 required keyword argument: "
|
||||
"'session'"
|
||||
)
|
||||
|
||||
_, content = get_file_content(
|
||||
filename, comes_from=comes_from, session=session
|
||||
)
|
||||
|
||||
lines_enum = preprocess(content, options)
|
||||
|
||||
for line_number, line in lines_enum:
|
||||
req_iter = process_line(line, filename, line_number, finder,
|
||||
comes_from, options, session, wheel_cache,
|
||||
constraint=constraint)
|
||||
for req in req_iter:
|
||||
yield req
|
||||
|
||||
|
||||
def preprocess(content, options):
|
||||
"""Split, filter, and join lines, and return a line iterator
|
||||
|
||||
:param content: the content of the requirements file
|
||||
:param options: cli options
|
||||
"""
|
||||
lines_enum = enumerate(content.splitlines(), start=1)
|
||||
lines_enum = join_lines(lines_enum)
|
||||
lines_enum = ignore_comments(lines_enum)
|
||||
lines_enum = skip_regex(lines_enum, options)
|
||||
return lines_enum
|
||||
|
||||
|
||||
def process_line(line, filename, line_number, finder=None, comes_from=None,
|
||||
options=None, session=None, wheel_cache=None,
|
||||
constraint=False):
|
||||
"""Process a single requirements line; This can result in creating/yielding
|
||||
requirements, or updating the finder.
|
||||
|
||||
For lines that contain requirements, the only options that have an effect
|
||||
are from SUPPORTED_OPTIONS_REQ, and they are scoped to the
|
||||
requirement. Other options from SUPPORTED_OPTIONS may be present, but are
|
||||
ignored.
|
||||
|
||||
For lines that do not contain requirements, the only options that have an
|
||||
effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may
|
||||
be present, but are ignored. These lines may contain multiple options
|
||||
(although our docs imply only one is supported), and all our parsed and
|
||||
affect the finder.
|
||||
|
||||
:param constraint: If True, parsing a constraints file.
|
||||
:param options: OptionParser options that we may update
|
||||
"""
|
||||
parser = build_parser()
|
||||
defaults = parser.get_default_values()
|
||||
defaults.index_url = None
|
||||
if finder:
|
||||
# `finder.format_control` will be updated during parsing
|
||||
defaults.format_control = finder.format_control
|
||||
args_str, options_str = break_args_options(line)
|
||||
opts, _ = parser.parse_args(shlex.split(options_str), defaults)
|
||||
|
||||
# preserve for the nested code path
|
||||
line_comes_from = '%s %s (line %s)' % (
|
||||
'-c' if constraint else '-r', filename, line_number)
|
||||
|
||||
# yield a line requirement
|
||||
if args_str:
|
||||
isolated = options.isolated_mode if options else False
|
||||
if options:
|
||||
cmdoptions.check_install_build_global(options, opts)
|
||||
# get the options that apply to requirements
|
||||
req_options = {}
|
||||
for dest in SUPPORTED_OPTIONS_REQ_DEST:
|
||||
if dest in opts.__dict__ and opts.__dict__[dest]:
|
||||
req_options[dest] = opts.__dict__[dest]
|
||||
yield InstallRequirement.from_line(
|
||||
args_str, line_comes_from, constraint=constraint,
|
||||
isolated=isolated, options=req_options, wheel_cache=wheel_cache
|
||||
)
|
||||
|
||||
# yield an editable requirement
|
||||
elif opts.editables:
|
||||
isolated = options.isolated_mode if options else False
|
||||
default_vcs = options.default_vcs if options else None
|
||||
yield InstallRequirement.from_editable(
|
||||
opts.editables[0], comes_from=line_comes_from,
|
||||
constraint=constraint, default_vcs=default_vcs, isolated=isolated,
|
||||
wheel_cache=wheel_cache
|
||||
)
|
||||
|
||||
# parse a nested requirements file
|
||||
elif opts.requirements or opts.constraints:
|
||||
if opts.requirements:
|
||||
req_path = opts.requirements[0]
|
||||
nested_constraint = False
|
||||
else:
|
||||
req_path = opts.constraints[0]
|
||||
nested_constraint = True
|
||||
# original file is over http
|
||||
if SCHEME_RE.search(filename):
|
||||
# do a url join so relative paths work
|
||||
req_path = urllib_parse.urljoin(filename, req_path)
|
||||
# original file and nested file are paths
|
||||
elif not SCHEME_RE.search(req_path):
|
||||
# do a join so relative paths work
|
||||
req_path = os.path.join(os.path.dirname(filename), req_path)
|
||||
# TODO: Why not use `comes_from='-r {} (line {})'` here as well?
|
||||
parser = parse_requirements(
|
||||
req_path, finder, comes_from, options, session,
|
||||
constraint=nested_constraint, wheel_cache=wheel_cache
|
||||
)
|
||||
for req in parser:
|
||||
yield req
|
||||
|
||||
# percolate hash-checking option upward
|
||||
elif opts.require_hashes:
|
||||
options.require_hashes = opts.require_hashes
|
||||
|
||||
# set finder options
|
||||
elif finder:
|
||||
if opts.allow_external:
|
||||
warnings.warn(
|
||||
"--allow-external has been deprecated and will be removed in "
|
||||
"the future. Due to changes in the repository protocol, it no "
|
||||
"longer has any effect.",
|
||||
RemovedInPip10Warning,
|
||||
)
|
||||
|
||||
if opts.allow_all_external:
|
||||
warnings.warn(
|
||||
"--allow-all-external has been deprecated and will be removed "
|
||||
"in the future. Due to changes in the repository protocol, it "
|
||||
"no longer has any effect.",
|
||||
RemovedInPip10Warning,
|
||||
)
|
||||
|
||||
if opts.allow_unverified:
|
||||
warnings.warn(
|
||||
"--allow-unverified has been deprecated and will be removed "
|
||||
"in the future. Due to changes in the repository protocol, it "
|
||||
"no longer has any effect.",
|
||||
RemovedInPip10Warning,
|
||||
)
|
||||
|
||||
if opts.index_url:
|
||||
finder.index_urls = [opts.index_url]
|
||||
if opts.use_wheel is False:
|
||||
finder.use_wheel = False
|
||||
pip.index.fmt_ctl_no_use_wheel(finder.format_control)
|
||||
if opts.no_index is True:
|
||||
finder.index_urls = []
|
||||
if opts.extra_index_urls:
|
||||
finder.index_urls.extend(opts.extra_index_urls)
|
||||
if opts.find_links:
|
||||
# FIXME: it would be nice to keep track of the source
|
||||
# of the find_links: support a find-links local path
|
||||
# relative to a requirements file.
|
||||
value = opts.find_links[0]
|
||||
req_dir = os.path.dirname(os.path.abspath(filename))
|
||||
relative_to_reqs_file = os.path.join(req_dir, value)
|
||||
if os.path.exists(relative_to_reqs_file):
|
||||
value = relative_to_reqs_file
|
||||
finder.find_links.append(value)
|
||||
if opts.pre:
|
||||
finder.allow_all_prereleases = True
|
||||
if opts.process_dependency_links:
|
||||
finder.process_dependency_links = True
|
||||
if opts.trusted_hosts:
|
||||
finder.secure_origins.extend(
|
||||
("*", host, "*") for host in opts.trusted_hosts)
|
||||
|
||||
|
||||
def break_args_options(line):
|
||||
"""Break up the line into an args and options string. We only want to shlex
|
||||
(and then optparse) the options, not the args. args can contain markers
|
||||
which are corrupted by shlex.
|
||||
"""
|
||||
tokens = line.split(' ')
|
||||
args = []
|
||||
options = tokens[:]
|
||||
for token in tokens:
|
||||
if token.startswith('-') or token.startswith('--'):
|
||||
break
|
||||
else:
|
||||
args.append(token)
|
||||
options.pop(0)
|
||||
return ' '.join(args), ' '.join(options)
|
||||
|
||||
|
||||
def build_parser():
|
||||
"""
|
||||
Return a parser for parsing requirement lines
|
||||
"""
|
||||
parser = optparse.OptionParser(add_help_option=False)
|
||||
|
||||
option_factories = SUPPORTED_OPTIONS + SUPPORTED_OPTIONS_REQ
|
||||
for option_factory in option_factories:
|
||||
option = option_factory()
|
||||
parser.add_option(option)
|
||||
|
||||
# By default optparse sys.exits on parsing errors. We want to wrap
|
||||
# that in our own exception.
|
||||
def parser_exit(self, msg):
|
||||
raise RequirementsFileParseError(msg)
|
||||
parser.exit = parser_exit
|
||||
|
||||
return parser
|
||||
|
||||
|
||||
def join_lines(lines_enum):
|
||||
"""Joins a line ending in '\' with the previous line (except when following
|
||||
comments). The joined line takes on the index of the first line.
|
||||
"""
|
||||
primary_line_number = None
|
||||
new_line = []
|
||||
for line_number, line in lines_enum:
|
||||
if not line.endswith('\\') or COMMENT_RE.match(line):
|
||||
if COMMENT_RE.match(line):
|
||||
# this ensures comments are always matched later
|
||||
line = ' ' + line
|
||||
if new_line:
|
||||
new_line.append(line)
|
||||
yield primary_line_number, ''.join(new_line)
|
||||
new_line = []
|
||||
else:
|
||||
yield line_number, line
|
||||
else:
|
||||
if not new_line:
|
||||
primary_line_number = line_number
|
||||
new_line.append(line.strip('\\'))
|
||||
|
||||
# last line contains \
|
||||
if new_line:
|
||||
yield primary_line_number, ''.join(new_line)
|
||||
|
||||
# TODO: handle space after '\'.
|
||||
|
||||
|
||||
def ignore_comments(lines_enum):
|
||||
"""
|
||||
Strips comments and filter empty lines.
|
||||
"""
|
||||
for line_number, line in lines_enum:
|
||||
line = COMMENT_RE.sub('', line)
|
||||
line = line.strip()
|
||||
if line:
|
||||
yield line_number, line
|
||||
|
||||
|
||||
def skip_regex(lines_enum, options):
|
||||
"""
|
||||
Skip lines that match '--skip-requirements-regex' pattern
|
||||
|
||||
Note: the regex pattern is only built once
|
||||
"""
|
||||
skip_regex = options.skip_requirements_regex if options else None
|
||||
if skip_regex:
|
||||
pattern = re.compile(skip_regex)
|
||||
lines_enum = filterfalse(
|
||||
lambda e: pattern.search(e[1]),
|
||||
lines_enum)
|
||||
return lines_enum
|
||||
1217
Shared/lib/python3.4/site-packages/pip/req/req_install.py
Normal file
1217
Shared/lib/python3.4/site-packages/pip/req/req_install.py
Normal file
File diff suppressed because it is too large
Load diff
745
Shared/lib/python3.4/site-packages/pip/req/req_set.py
Normal file
745
Shared/lib/python3.4/site-packages/pip/req/req_set.py
Normal file
|
|
@ -0,0 +1,745 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
from collections import defaultdict
|
||||
from itertools import chain
|
||||
import logging
|
||||
import os
|
||||
|
||||
from pip._vendor import pkg_resources
|
||||
from pip._vendor import requests
|
||||
|
||||
from pip.compat import expanduser
|
||||
from pip.download import (is_file_url, is_dir_url, is_vcs_url, url_to_path,
|
||||
unpack_url)
|
||||
from pip.exceptions import (InstallationError, BestVersionAlreadyInstalled,
|
||||
DistributionNotFound, PreviousBuildDirError,
|
||||
HashError, HashErrors, HashUnpinned,
|
||||
DirectoryUrlHashUnsupported, VcsHashUnsupported)
|
||||
from pip.req.req_install import InstallRequirement
|
||||
from pip.utils import (
|
||||
display_path, dist_in_usersite, ensure_dir, normalize_path)
|
||||
from pip.utils.hashes import MissingHashes
|
||||
from pip.utils.logging import indent_log
|
||||
from pip.vcs import vcs
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Requirements(object):
|
||||
|
||||
def __init__(self):
|
||||
self._keys = []
|
||||
self._dict = {}
|
||||
|
||||
def keys(self):
|
||||
return self._keys
|
||||
|
||||
def values(self):
|
||||
return [self._dict[key] for key in self._keys]
|
||||
|
||||
def __contains__(self, item):
|
||||
return item in self._keys
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
if key not in self._keys:
|
||||
self._keys.append(key)
|
||||
self._dict[key] = value
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self._dict[key]
|
||||
|
||||
def __repr__(self):
|
||||
values = ['%s: %s' % (repr(k), repr(self[k])) for k in self.keys()]
|
||||
return 'Requirements({%s})' % ', '.join(values)
|
||||
|
||||
|
||||
class DistAbstraction(object):
|
||||
"""Abstracts out the wheel vs non-wheel prepare_files logic.
|
||||
|
||||
The requirements for anything installable are as follows:
|
||||
- we must be able to determine the requirement name
|
||||
(or we can't correctly handle the non-upgrade case).
|
||||
- we must be able to generate a list of run-time dependencies
|
||||
without installing any additional packages (or we would
|
||||
have to either burn time by doing temporary isolated installs
|
||||
or alternatively violate pips 'don't start installing unless
|
||||
all requirements are available' rule - neither of which are
|
||||
desirable).
|
||||
- for packages with setup requirements, we must also be able
|
||||
to determine their requirements without installing additional
|
||||
packages (for the same reason as run-time dependencies)
|
||||
- we must be able to create a Distribution object exposing the
|
||||
above metadata.
|
||||
"""
|
||||
|
||||
def __init__(self, req_to_install):
|
||||
self.req_to_install = req_to_install
|
||||
|
||||
def dist(self, finder):
|
||||
"""Return a setuptools Dist object."""
|
||||
raise NotImplementedError(self.dist)
|
||||
|
||||
def prep_for_dist(self):
|
||||
"""Ensure that we can get a Dist for this requirement."""
|
||||
raise NotImplementedError(self.dist)
|
||||
|
||||
|
||||
def make_abstract_dist(req_to_install):
|
||||
"""Factory to make an abstract dist object.
|
||||
|
||||
Preconditions: Either an editable req with a source_dir, or satisfied_by or
|
||||
a wheel link, or a non-editable req with a source_dir.
|
||||
|
||||
:return: A concrete DistAbstraction.
|
||||
"""
|
||||
if req_to_install.editable:
|
||||
return IsSDist(req_to_install)
|
||||
elif req_to_install.link and req_to_install.link.is_wheel:
|
||||
return IsWheel(req_to_install)
|
||||
else:
|
||||
return IsSDist(req_to_install)
|
||||
|
||||
|
||||
class IsWheel(DistAbstraction):
|
||||
|
||||
def dist(self, finder):
|
||||
return list(pkg_resources.find_distributions(
|
||||
self.req_to_install.source_dir))[0]
|
||||
|
||||
def prep_for_dist(self):
|
||||
# FIXME:https://github.com/pypa/pip/issues/1112
|
||||
pass
|
||||
|
||||
|
||||
class IsSDist(DistAbstraction):
|
||||
|
||||
def dist(self, finder):
|
||||
dist = self.req_to_install.get_dist()
|
||||
# FIXME: shouldn't be globally added:
|
||||
if dist.has_metadata('dependency_links.txt'):
|
||||
finder.add_dependency_links(
|
||||
dist.get_metadata_lines('dependency_links.txt')
|
||||
)
|
||||
return dist
|
||||
|
||||
def prep_for_dist(self):
|
||||
self.req_to_install.run_egg_info()
|
||||
self.req_to_install.assert_source_matches_version()
|
||||
|
||||
|
||||
class Installed(DistAbstraction):
|
||||
|
||||
def dist(self, finder):
|
||||
return self.req_to_install.satisfied_by
|
||||
|
||||
def prep_for_dist(self):
|
||||
pass
|
||||
|
||||
|
||||
class RequirementSet(object):
|
||||
|
||||
def __init__(self, build_dir, src_dir, download_dir, upgrade=False,
|
||||
ignore_installed=False, as_egg=False, target_dir=None,
|
||||
ignore_dependencies=False, force_reinstall=False,
|
||||
use_user_site=False, session=None, pycompile=True,
|
||||
isolated=False, wheel_download_dir=None,
|
||||
wheel_cache=None, require_hashes=False):
|
||||
"""Create a RequirementSet.
|
||||
|
||||
:param wheel_download_dir: Where still-packed .whl files should be
|
||||
written to. If None they are written to the download_dir parameter.
|
||||
Separate to download_dir to permit only keeping wheel archives for
|
||||
pip wheel.
|
||||
:param download_dir: Where still packed archives should be written to.
|
||||
If None they are not saved, and are deleted immediately after
|
||||
unpacking.
|
||||
:param wheel_cache: The pip wheel cache, for passing to
|
||||
InstallRequirement.
|
||||
"""
|
||||
if session is None:
|
||||
raise TypeError(
|
||||
"RequirementSet() missing 1 required keyword argument: "
|
||||
"'session'"
|
||||
)
|
||||
|
||||
self.build_dir = build_dir
|
||||
self.src_dir = src_dir
|
||||
# XXX: download_dir and wheel_download_dir overlap semantically and may
|
||||
# be combined if we're willing to have non-wheel archives present in
|
||||
# the wheelhouse output by 'pip wheel'.
|
||||
self.download_dir = download_dir
|
||||
self.upgrade = upgrade
|
||||
self.ignore_installed = ignore_installed
|
||||
self.force_reinstall = force_reinstall
|
||||
self.requirements = Requirements()
|
||||
# Mapping of alias: real_name
|
||||
self.requirement_aliases = {}
|
||||
self.unnamed_requirements = []
|
||||
self.ignore_dependencies = ignore_dependencies
|
||||
self.successfully_downloaded = []
|
||||
self.successfully_installed = []
|
||||
self.reqs_to_cleanup = []
|
||||
self.as_egg = as_egg
|
||||
self.use_user_site = use_user_site
|
||||
self.target_dir = target_dir # set from --target option
|
||||
self.session = session
|
||||
self.pycompile = pycompile
|
||||
self.isolated = isolated
|
||||
if wheel_download_dir:
|
||||
wheel_download_dir = normalize_path(wheel_download_dir)
|
||||
self.wheel_download_dir = wheel_download_dir
|
||||
self._wheel_cache = wheel_cache
|
||||
self.require_hashes = require_hashes
|
||||
# Maps from install_req -> dependencies_of_install_req
|
||||
self._dependencies = defaultdict(list)
|
||||
|
||||
def __str__(self):
|
||||
reqs = [req for req in self.requirements.values()
|
||||
if not req.comes_from]
|
||||
reqs.sort(key=lambda req: req.name.lower())
|
||||
return ' '.join([str(req.req) for req in reqs])
|
||||
|
||||
def __repr__(self):
|
||||
reqs = [req for req in self.requirements.values()]
|
||||
reqs.sort(key=lambda req: req.name.lower())
|
||||
reqs_str = ', '.join([str(req.req) for req in reqs])
|
||||
return ('<%s object; %d requirement(s): %s>'
|
||||
% (self.__class__.__name__, len(reqs), reqs_str))
|
||||
|
||||
def add_requirement(self, install_req, parent_req_name=None):
|
||||
"""Add install_req as a requirement to install.
|
||||
|
||||
:param parent_req_name: The name of the requirement that needed this
|
||||
added. The name is used because when multiple unnamed requirements
|
||||
resolve to the same name, we could otherwise end up with dependency
|
||||
links that point outside the Requirements set. parent_req must
|
||||
already be added. Note that None implies that this is a user
|
||||
supplied requirement, vs an inferred one.
|
||||
:return: Additional requirements to scan. That is either [] if
|
||||
the requirement is not applicable, or [install_req] if the
|
||||
requirement is applicable and has just been added.
|
||||
"""
|
||||
name = install_req.name
|
||||
if not install_req.match_markers():
|
||||
logger.warning("Ignoring %s: markers %r don't match your "
|
||||
"environment", install_req.name,
|
||||
install_req.markers)
|
||||
return []
|
||||
|
||||
install_req.as_egg = self.as_egg
|
||||
install_req.use_user_site = self.use_user_site
|
||||
install_req.target_dir = self.target_dir
|
||||
install_req.pycompile = self.pycompile
|
||||
if not name:
|
||||
# url or path requirement w/o an egg fragment
|
||||
self.unnamed_requirements.append(install_req)
|
||||
return [install_req]
|
||||
else:
|
||||
try:
|
||||
existing_req = self.get_requirement(name)
|
||||
except KeyError:
|
||||
existing_req = None
|
||||
if (parent_req_name is None and existing_req and not
|
||||
existing_req.constraint and
|
||||
existing_req.extras == install_req.extras):
|
||||
raise InstallationError(
|
||||
'Double requirement given: %s (already in %s, name=%r)'
|
||||
% (install_req, existing_req, name))
|
||||
if not existing_req:
|
||||
# Add requirement
|
||||
self.requirements[name] = install_req
|
||||
# FIXME: what about other normalizations? E.g., _ vs. -?
|
||||
if name.lower() != name:
|
||||
self.requirement_aliases[name.lower()] = name
|
||||
result = [install_req]
|
||||
else:
|
||||
# Assume there's no need to scan, and that we've already
|
||||
# encountered this for scanning.
|
||||
result = []
|
||||
if not install_req.constraint and existing_req.constraint:
|
||||
if (install_req.link and not (existing_req.link and
|
||||
install_req.link.path == existing_req.link.path)):
|
||||
self.reqs_to_cleanup.append(install_req)
|
||||
raise InstallationError(
|
||||
"Could not satisfy constraints for '%s': "
|
||||
"installation from path or url cannot be "
|
||||
"constrained to a version" % name)
|
||||
# If we're now installing a constraint, mark the existing
|
||||
# object for real installation.
|
||||
existing_req.constraint = False
|
||||
existing_req.extras = tuple(
|
||||
sorted(set(existing_req.extras).union(
|
||||
set(install_req.extras))))
|
||||
logger.debug("Setting %s extras to: %s",
|
||||
existing_req, existing_req.extras)
|
||||
# And now we need to scan this.
|
||||
result = [existing_req]
|
||||
# Canonicalise to the already-added object for the backref
|
||||
# check below.
|
||||
install_req = existing_req
|
||||
if parent_req_name:
|
||||
parent_req = self.get_requirement(parent_req_name)
|
||||
self._dependencies[parent_req].append(install_req)
|
||||
return result
|
||||
|
||||
def has_requirement(self, project_name):
|
||||
name = project_name.lower()
|
||||
if (name in self.requirements and
|
||||
not self.requirements[name].constraint or
|
||||
name in self.requirement_aliases and
|
||||
not self.requirements[self.requirement_aliases[name]].constraint):
|
||||
return True
|
||||
return False
|
||||
|
||||
@property
|
||||
def has_requirements(self):
|
||||
return list(req for req in self.requirements.values() if not
|
||||
req.constraint) or self.unnamed_requirements
|
||||
|
||||
@property
|
||||
def is_download(self):
|
||||
if self.download_dir:
|
||||
self.download_dir = expanduser(self.download_dir)
|
||||
if os.path.exists(self.download_dir):
|
||||
return True
|
||||
else:
|
||||
logger.critical('Could not find download directory')
|
||||
raise InstallationError(
|
||||
"Could not find or access download directory '%s'"
|
||||
% display_path(self.download_dir))
|
||||
return False
|
||||
|
||||
def get_requirement(self, project_name):
|
||||
for name in project_name, project_name.lower():
|
||||
if name in self.requirements:
|
||||
return self.requirements[name]
|
||||
if name in self.requirement_aliases:
|
||||
return self.requirements[self.requirement_aliases[name]]
|
||||
raise KeyError("No project with the name %r" % project_name)
|
||||
|
||||
def uninstall(self, auto_confirm=False):
|
||||
for req in self.requirements.values():
|
||||
if req.constraint:
|
||||
continue
|
||||
req.uninstall(auto_confirm=auto_confirm)
|
||||
req.commit_uninstall()
|
||||
|
||||
def prepare_files(self, finder):
|
||||
"""
|
||||
Prepare process. Create temp directories, download and/or unpack files.
|
||||
"""
|
||||
# make the wheelhouse
|
||||
if self.wheel_download_dir:
|
||||
ensure_dir(self.wheel_download_dir)
|
||||
|
||||
# If any top-level requirement has a hash specified, enter
|
||||
# hash-checking mode, which requires hashes from all.
|
||||
root_reqs = self.unnamed_requirements + self.requirements.values()
|
||||
require_hashes = (self.require_hashes or
|
||||
any(req.has_hash_options for req in root_reqs))
|
||||
if require_hashes and self.as_egg:
|
||||
raise InstallationError(
|
||||
'--egg is not allowed with --require-hashes mode, since it '
|
||||
'delegates dependency resolution to setuptools and could thus '
|
||||
'result in installation of unhashed packages.')
|
||||
|
||||
# Actually prepare the files, and collect any exceptions. Most hash
|
||||
# exceptions cannot be checked ahead of time, because
|
||||
# req.populate_link() needs to be called before we can make decisions
|
||||
# based on link type.
|
||||
discovered_reqs = []
|
||||
hash_errors = HashErrors()
|
||||
for req in chain(root_reqs, discovered_reqs):
|
||||
try:
|
||||
discovered_reqs.extend(self._prepare_file(
|
||||
finder,
|
||||
req,
|
||||
require_hashes=require_hashes,
|
||||
ignore_dependencies=self.ignore_dependencies))
|
||||
except HashError as exc:
|
||||
exc.req = req
|
||||
hash_errors.append(exc)
|
||||
|
||||
if hash_errors:
|
||||
raise hash_errors
|
||||
|
||||
def _check_skip_installed(self, req_to_install, finder):
|
||||
"""Check if req_to_install should be skipped.
|
||||
|
||||
This will check if the req is installed, and whether we should upgrade
|
||||
or reinstall it, taking into account all the relevant user options.
|
||||
|
||||
After calling this req_to_install will only have satisfied_by set to
|
||||
None if the req_to_install is to be upgraded/reinstalled etc. Any
|
||||
other value will be a dist recording the current thing installed that
|
||||
satisfies the requirement.
|
||||
|
||||
Note that for vcs urls and the like we can't assess skipping in this
|
||||
routine - we simply identify that we need to pull the thing down,
|
||||
then later on it is pulled down and introspected to assess upgrade/
|
||||
reinstalls etc.
|
||||
|
||||
:return: A text reason for why it was skipped, or None.
|
||||
"""
|
||||
# Check whether to upgrade/reinstall this req or not.
|
||||
req_to_install.check_if_exists()
|
||||
if req_to_install.satisfied_by:
|
||||
skip_reason = 'satisfied (use --upgrade to upgrade)'
|
||||
if self.upgrade:
|
||||
best_installed = False
|
||||
# For link based requirements we have to pull the
|
||||
# tree down and inspect to assess the version #, so
|
||||
# its handled way down.
|
||||
if not (self.force_reinstall or req_to_install.link):
|
||||
try:
|
||||
finder.find_requirement(req_to_install, self.upgrade)
|
||||
except BestVersionAlreadyInstalled:
|
||||
skip_reason = 'up-to-date'
|
||||
best_installed = True
|
||||
except DistributionNotFound:
|
||||
# No distribution found, so we squash the
|
||||
# error - it will be raised later when we
|
||||
# re-try later to do the install.
|
||||
# Why don't we just raise here?
|
||||
pass
|
||||
|
||||
if not best_installed:
|
||||
# don't uninstall conflict if user install and
|
||||
# conflict is not user install
|
||||
if not (self.use_user_site and not
|
||||
dist_in_usersite(req_to_install.satisfied_by)):
|
||||
req_to_install.conflicts_with = \
|
||||
req_to_install.satisfied_by
|
||||
req_to_install.satisfied_by = None
|
||||
return skip_reason
|
||||
else:
|
||||
return None
|
||||
|
||||
def _prepare_file(self,
|
||||
finder,
|
||||
req_to_install,
|
||||
require_hashes=False,
|
||||
ignore_dependencies=False):
|
||||
"""Prepare a single requirements file.
|
||||
|
||||
:return: A list of additional InstallRequirements to also install.
|
||||
"""
|
||||
# Tell user what we are doing for this requirement:
|
||||
# obtain (editable), skipping, processing (local url), collecting
|
||||
# (remote url or package name)
|
||||
if req_to_install.constraint or req_to_install.prepared:
|
||||
return []
|
||||
|
||||
req_to_install.prepared = True
|
||||
|
||||
# ###################### #
|
||||
# # print log messages # #
|
||||
# ###################### #
|
||||
if req_to_install.editable:
|
||||
logger.info('Obtaining %s', req_to_install)
|
||||
else:
|
||||
# satisfied_by is only evaluated by calling _check_skip_installed,
|
||||
# so it must be None here.
|
||||
assert req_to_install.satisfied_by is None
|
||||
if not self.ignore_installed:
|
||||
skip_reason = self._check_skip_installed(
|
||||
req_to_install, finder)
|
||||
|
||||
if req_to_install.satisfied_by:
|
||||
assert skip_reason is not None, (
|
||||
'_check_skip_installed returned None but '
|
||||
'req_to_install.satisfied_by is set to %r'
|
||||
% (req_to_install.satisfied_by,))
|
||||
logger.info(
|
||||
'Requirement already %s: %s', skip_reason,
|
||||
req_to_install)
|
||||
else:
|
||||
if (req_to_install.link and
|
||||
req_to_install.link.scheme == 'file'):
|
||||
path = url_to_path(req_to_install.link.url)
|
||||
logger.info('Processing %s', display_path(path))
|
||||
else:
|
||||
logger.info('Collecting %s', req_to_install)
|
||||
|
||||
with indent_log():
|
||||
# ################################ #
|
||||
# # vcs update or unpack archive # #
|
||||
# ################################ #
|
||||
if req_to_install.editable:
|
||||
if require_hashes:
|
||||
raise InstallationError(
|
||||
'The editable requirement %s cannot be installed when '
|
||||
'requiring hashes, because there is no single file to '
|
||||
'hash.' % req_to_install)
|
||||
req_to_install.ensure_has_source_dir(self.src_dir)
|
||||
req_to_install.update_editable(not self.is_download)
|
||||
abstract_dist = make_abstract_dist(req_to_install)
|
||||
abstract_dist.prep_for_dist()
|
||||
if self.is_download:
|
||||
req_to_install.archive(self.download_dir)
|
||||
elif req_to_install.satisfied_by:
|
||||
if require_hashes:
|
||||
logger.debug(
|
||||
'Since it is already installed, we are trusting this '
|
||||
'package without checking its hash. To ensure a '
|
||||
'completely repeatable environment, install into an '
|
||||
'empty virtualenv.')
|
||||
abstract_dist = Installed(req_to_install)
|
||||
else:
|
||||
# @@ if filesystem packages are not marked
|
||||
# editable in a req, a non deterministic error
|
||||
# occurs when the script attempts to unpack the
|
||||
# build directory
|
||||
req_to_install.ensure_has_source_dir(self.build_dir)
|
||||
# If a checkout exists, it's unwise to keep going. version
|
||||
# inconsistencies are logged later, but do not fail the
|
||||
# installation.
|
||||
# FIXME: this won't upgrade when there's an existing
|
||||
# package unpacked in `req_to_install.source_dir`
|
||||
if os.path.exists(
|
||||
os.path.join(req_to_install.source_dir, 'setup.py')):
|
||||
raise PreviousBuildDirError(
|
||||
"pip can't proceed with requirements '%s' due to a"
|
||||
" pre-existing build directory (%s). This is "
|
||||
"likely due to a previous installation that failed"
|
||||
". pip is being responsible and not assuming it "
|
||||
"can delete this. Please delete it and try again."
|
||||
% (req_to_install, req_to_install.source_dir)
|
||||
)
|
||||
req_to_install.populate_link(
|
||||
finder, self.upgrade, require_hashes)
|
||||
# We can't hit this spot and have populate_link return None.
|
||||
# req_to_install.satisfied_by is None here (because we're
|
||||
# guarded) and upgrade has no impact except when satisfied_by
|
||||
# is not None.
|
||||
# Then inside find_requirement existing_applicable -> False
|
||||
# If no new versions are found, DistributionNotFound is raised,
|
||||
# otherwise a result is guaranteed.
|
||||
assert req_to_install.link
|
||||
link = req_to_install.link
|
||||
|
||||
# Now that we have the real link, we can tell what kind of
|
||||
# requirements we have and raise some more informative errors
|
||||
# than otherwise. (For example, we can raise VcsHashUnsupported
|
||||
# for a VCS URL rather than HashMissing.)
|
||||
if require_hashes:
|
||||
# We could check these first 2 conditions inside
|
||||
# unpack_url and save repetition of conditions, but then
|
||||
# we would report less-useful error messages for
|
||||
# unhashable requirements, complaining that there's no
|
||||
# hash provided.
|
||||
if is_vcs_url(link):
|
||||
raise VcsHashUnsupported()
|
||||
elif is_file_url(link) and is_dir_url(link):
|
||||
raise DirectoryUrlHashUnsupported()
|
||||
if (not req_to_install.original_link and
|
||||
not req_to_install.is_pinned):
|
||||
# Unpinned packages are asking for trouble when a new
|
||||
# version is uploaded. This isn't a security check, but
|
||||
# it saves users a surprising hash mismatch in the
|
||||
# future.
|
||||
#
|
||||
# file:/// URLs aren't pinnable, so don't complain
|
||||
# about them not being pinned.
|
||||
raise HashUnpinned()
|
||||
hashes = req_to_install.hashes(
|
||||
trust_internet=not require_hashes)
|
||||
if require_hashes and not hashes:
|
||||
# Known-good hashes are missing for this requirement, so
|
||||
# shim it with a facade object that will provoke hash
|
||||
# computation and then raise a HashMissing exception
|
||||
# showing the user what the hash should be.
|
||||
hashes = MissingHashes()
|
||||
|
||||
try:
|
||||
download_dir = self.download_dir
|
||||
# We always delete unpacked sdists after pip ran.
|
||||
autodelete_unpacked = True
|
||||
if req_to_install.link.is_wheel \
|
||||
and self.wheel_download_dir:
|
||||
# when doing 'pip wheel` we download wheels to a
|
||||
# dedicated dir.
|
||||
download_dir = self.wheel_download_dir
|
||||
if req_to_install.link.is_wheel:
|
||||
if download_dir:
|
||||
# When downloading, we only unpack wheels to get
|
||||
# metadata.
|
||||
autodelete_unpacked = True
|
||||
else:
|
||||
# When installing a wheel, we use the unpacked
|
||||
# wheel.
|
||||
autodelete_unpacked = False
|
||||
unpack_url(
|
||||
req_to_install.link, req_to_install.source_dir,
|
||||
download_dir, autodelete_unpacked,
|
||||
session=self.session, hashes=hashes)
|
||||
except requests.HTTPError as exc:
|
||||
logger.critical(
|
||||
'Could not install requirement %s because '
|
||||
'of error %s',
|
||||
req_to_install,
|
||||
exc,
|
||||
)
|
||||
raise InstallationError(
|
||||
'Could not install requirement %s because '
|
||||
'of HTTP error %s for URL %s' %
|
||||
(req_to_install, exc, req_to_install.link)
|
||||
)
|
||||
abstract_dist = make_abstract_dist(req_to_install)
|
||||
abstract_dist.prep_for_dist()
|
||||
if self.is_download:
|
||||
# Make a .zip of the source_dir we already created.
|
||||
if req_to_install.link.scheme in vcs.all_schemes:
|
||||
req_to_install.archive(self.download_dir)
|
||||
# req_to_install.req is only avail after unpack for URL
|
||||
# pkgs repeat check_if_exists to uninstall-on-upgrade
|
||||
# (#14)
|
||||
if not self.ignore_installed:
|
||||
req_to_install.check_if_exists()
|
||||
if req_to_install.satisfied_by:
|
||||
if self.upgrade or self.ignore_installed:
|
||||
# don't uninstall conflict if user install and
|
||||
# conflict is not user install
|
||||
if not (self.use_user_site and not
|
||||
dist_in_usersite(
|
||||
req_to_install.satisfied_by)):
|
||||
req_to_install.conflicts_with = \
|
||||
req_to_install.satisfied_by
|
||||
req_to_install.satisfied_by = None
|
||||
else:
|
||||
logger.info(
|
||||
'Requirement already satisfied (use '
|
||||
'--upgrade to upgrade): %s',
|
||||
req_to_install,
|
||||
)
|
||||
|
||||
# ###################### #
|
||||
# # parse dependencies # #
|
||||
# ###################### #
|
||||
dist = abstract_dist.dist(finder)
|
||||
more_reqs = []
|
||||
|
||||
def add_req(subreq):
|
||||
sub_install_req = InstallRequirement(
|
||||
str(subreq),
|
||||
req_to_install,
|
||||
isolated=self.isolated,
|
||||
wheel_cache=self._wheel_cache,
|
||||
)
|
||||
more_reqs.extend(self.add_requirement(
|
||||
sub_install_req, req_to_install.name))
|
||||
|
||||
# We add req_to_install before its dependencies, so that we
|
||||
# can refer to it when adding dependencies.
|
||||
if not self.has_requirement(req_to_install.name):
|
||||
# 'unnamed' requirements will get added here
|
||||
self.add_requirement(req_to_install, None)
|
||||
|
||||
if not ignore_dependencies:
|
||||
if (req_to_install.extras):
|
||||
logger.debug(
|
||||
"Installing extra requirements: %r",
|
||||
','.join(req_to_install.extras),
|
||||
)
|
||||
missing_requested = sorted(
|
||||
set(req_to_install.extras) - set(dist.extras)
|
||||
)
|
||||
for missing in missing_requested:
|
||||
logger.warning(
|
||||
'%s does not provide the extra \'%s\'',
|
||||
dist, missing
|
||||
)
|
||||
|
||||
available_requested = sorted(
|
||||
set(dist.extras) & set(req_to_install.extras)
|
||||
)
|
||||
for subreq in dist.requires(available_requested):
|
||||
add_req(subreq)
|
||||
|
||||
# cleanup tmp src
|
||||
self.reqs_to_cleanup.append(req_to_install)
|
||||
|
||||
if not req_to_install.editable and not req_to_install.satisfied_by:
|
||||
# XXX: --no-install leads this to report 'Successfully
|
||||
# downloaded' for only non-editable reqs, even though we took
|
||||
# action on them.
|
||||
self.successfully_downloaded.append(req_to_install)
|
||||
|
||||
return more_reqs
|
||||
|
||||
def cleanup_files(self):
|
||||
"""Clean up files, remove builds."""
|
||||
logger.debug('Cleaning up...')
|
||||
with indent_log():
|
||||
for req in self.reqs_to_cleanup:
|
||||
req.remove_temporary_source()
|
||||
|
||||
def _to_install(self):
|
||||
"""Create the installation order.
|
||||
|
||||
The installation order is topological - requirements are installed
|
||||
before the requiring thing. We break cycles at an arbitrary point,
|
||||
and make no other guarantees.
|
||||
"""
|
||||
# The current implementation, which we may change at any point
|
||||
# installs the user specified things in the order given, except when
|
||||
# dependencies must come earlier to achieve topological order.
|
||||
order = []
|
||||
ordered_reqs = set()
|
||||
|
||||
def schedule(req):
|
||||
if req.satisfied_by or req in ordered_reqs:
|
||||
return
|
||||
if req.constraint:
|
||||
return
|
||||
ordered_reqs.add(req)
|
||||
for dep in self._dependencies[req]:
|
||||
schedule(dep)
|
||||
order.append(req)
|
||||
for install_req in self.requirements.values():
|
||||
schedule(install_req)
|
||||
return order
|
||||
|
||||
def install(self, install_options, global_options=(), *args, **kwargs):
|
||||
"""
|
||||
Install everything in this set (after having downloaded and unpacked
|
||||
the packages)
|
||||
"""
|
||||
to_install = self._to_install()
|
||||
|
||||
if to_install:
|
||||
logger.info(
|
||||
'Installing collected packages: %s',
|
||||
', '.join([req.name for req in to_install]),
|
||||
)
|
||||
|
||||
with indent_log():
|
||||
for requirement in to_install:
|
||||
if requirement.conflicts_with:
|
||||
logger.info(
|
||||
'Found existing installation: %s',
|
||||
requirement.conflicts_with,
|
||||
)
|
||||
with indent_log():
|
||||
requirement.uninstall(auto_confirm=True)
|
||||
try:
|
||||
requirement.install(
|
||||
install_options,
|
||||
global_options,
|
||||
*args,
|
||||
**kwargs
|
||||
)
|
||||
except:
|
||||
# if install did not succeed, rollback previous uninstall
|
||||
if (requirement.conflicts_with and not
|
||||
requirement.install_succeeded):
|
||||
requirement.rollback_uninstall()
|
||||
raise
|
||||
else:
|
||||
if (requirement.conflicts_with and
|
||||
requirement.install_succeeded):
|
||||
requirement.commit_uninstall()
|
||||
requirement.remove_temporary_source()
|
||||
|
||||
self.successfully_installed = to_install
|
||||
195
Shared/lib/python3.4/site-packages/pip/req/req_uninstall.py
Normal file
195
Shared/lib/python3.4/site-packages/pip/req/req_uninstall.py
Normal file
|
|
@ -0,0 +1,195 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import logging
|
||||
import os
|
||||
import tempfile
|
||||
|
||||
from pip.compat import uses_pycache, WINDOWS, cache_from_source
|
||||
from pip.exceptions import UninstallationError
|
||||
from pip.utils import rmtree, ask, is_local, renames, normalize_path
|
||||
from pip.utils.logging import indent_log
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class UninstallPathSet(object):
|
||||
"""A set of file paths to be removed in the uninstallation of a
|
||||
requirement."""
|
||||
def __init__(self, dist):
|
||||
self.paths = set()
|
||||
self._refuse = set()
|
||||
self.pth = {}
|
||||
self.dist = dist
|
||||
self.save_dir = None
|
||||
self._moved_paths = []
|
||||
|
||||
def _permitted(self, path):
|
||||
"""
|
||||
Return True if the given path is one we are permitted to
|
||||
remove/modify, False otherwise.
|
||||
|
||||
"""
|
||||
return is_local(path)
|
||||
|
||||
def add(self, path):
|
||||
head, tail = os.path.split(path)
|
||||
|
||||
# we normalize the head to resolve parent directory symlinks, but not
|
||||
# the tail, since we only want to uninstall symlinks, not their targets
|
||||
path = os.path.join(normalize_path(head), os.path.normcase(tail))
|
||||
|
||||
if not os.path.exists(path):
|
||||
return
|
||||
if self._permitted(path):
|
||||
self.paths.add(path)
|
||||
else:
|
||||
self._refuse.add(path)
|
||||
|
||||
# __pycache__ files can show up after 'installed-files.txt' is created,
|
||||
# due to imports
|
||||
if os.path.splitext(path)[1] == '.py' and uses_pycache:
|
||||
self.add(cache_from_source(path))
|
||||
|
||||
def add_pth(self, pth_file, entry):
|
||||
pth_file = normalize_path(pth_file)
|
||||
if self._permitted(pth_file):
|
||||
if pth_file not in self.pth:
|
||||
self.pth[pth_file] = UninstallPthEntries(pth_file)
|
||||
self.pth[pth_file].add(entry)
|
||||
else:
|
||||
self._refuse.add(pth_file)
|
||||
|
||||
def compact(self, paths):
|
||||
"""Compact a path set to contain the minimal number of paths
|
||||
necessary to contain all paths in the set. If /a/path/ and
|
||||
/a/path/to/a/file.txt are both in the set, leave only the
|
||||
shorter path."""
|
||||
short_paths = set()
|
||||
for path in sorted(paths, key=len):
|
||||
if not any([
|
||||
(path.startswith(shortpath) and
|
||||
path[len(shortpath.rstrip(os.path.sep))] == os.path.sep)
|
||||
for shortpath in short_paths]):
|
||||
short_paths.add(path)
|
||||
return short_paths
|
||||
|
||||
def _stash(self, path):
|
||||
return os.path.join(
|
||||
self.save_dir, os.path.splitdrive(path)[1].lstrip(os.path.sep))
|
||||
|
||||
def remove(self, auto_confirm=False):
|
||||
"""Remove paths in ``self.paths`` with confirmation (unless
|
||||
``auto_confirm`` is True)."""
|
||||
if not self.paths:
|
||||
logger.info(
|
||||
"Can't uninstall '%s'. No files were found to uninstall.",
|
||||
self.dist.project_name,
|
||||
)
|
||||
return
|
||||
logger.info(
|
||||
'Uninstalling %s-%s:',
|
||||
self.dist.project_name, self.dist.version
|
||||
)
|
||||
|
||||
with indent_log():
|
||||
paths = sorted(self.compact(self.paths))
|
||||
|
||||
if auto_confirm:
|
||||
response = 'y'
|
||||
else:
|
||||
for path in paths:
|
||||
logger.info(path)
|
||||
response = ask('Proceed (y/n)? ', ('y', 'n'))
|
||||
if self._refuse:
|
||||
logger.info('Not removing or modifying (outside of prefix):')
|
||||
for path in self.compact(self._refuse):
|
||||
logger.info(path)
|
||||
if response == 'y':
|
||||
self.save_dir = tempfile.mkdtemp(suffix='-uninstall',
|
||||
prefix='pip-')
|
||||
for path in paths:
|
||||
new_path = self._stash(path)
|
||||
logger.debug('Removing file or directory %s', path)
|
||||
self._moved_paths.append(path)
|
||||
renames(path, new_path)
|
||||
for pth in self.pth.values():
|
||||
pth.remove()
|
||||
logger.info(
|
||||
'Successfully uninstalled %s-%s',
|
||||
self.dist.project_name, self.dist.version
|
||||
)
|
||||
|
||||
def rollback(self):
|
||||
"""Rollback the changes previously made by remove()."""
|
||||
if self.save_dir is None:
|
||||
logger.error(
|
||||
"Can't roll back %s; was not uninstalled",
|
||||
self.dist.project_name,
|
||||
)
|
||||
return False
|
||||
logger.info('Rolling back uninstall of %s', self.dist.project_name)
|
||||
for path in self._moved_paths:
|
||||
tmp_path = self._stash(path)
|
||||
logger.debug('Replacing %s', path)
|
||||
renames(tmp_path, path)
|
||||
for pth in self.pth.values():
|
||||
pth.rollback()
|
||||
|
||||
def commit(self):
|
||||
"""Remove temporary save dir: rollback will no longer be possible."""
|
||||
if self.save_dir is not None:
|
||||
rmtree(self.save_dir)
|
||||
self.save_dir = None
|
||||
self._moved_paths = []
|
||||
|
||||
|
||||
class UninstallPthEntries(object):
|
||||
def __init__(self, pth_file):
|
||||
if not os.path.isfile(pth_file):
|
||||
raise UninstallationError(
|
||||
"Cannot remove entries from nonexistent file %s" % pth_file
|
||||
)
|
||||
self.file = pth_file
|
||||
self.entries = set()
|
||||
self._saved_lines = None
|
||||
|
||||
def add(self, entry):
|
||||
entry = os.path.normcase(entry)
|
||||
# On Windows, os.path.normcase converts the entry to use
|
||||
# backslashes. This is correct for entries that describe absolute
|
||||
# paths outside of site-packages, but all the others use forward
|
||||
# slashes.
|
||||
if WINDOWS and not os.path.splitdrive(entry)[0]:
|
||||
entry = entry.replace('\\', '/')
|
||||
self.entries.add(entry)
|
||||
|
||||
def remove(self):
|
||||
logger.debug('Removing pth entries from %s:', self.file)
|
||||
with open(self.file, 'rb') as fh:
|
||||
# windows uses '\r\n' with py3k, but uses '\n' with py2.x
|
||||
lines = fh.readlines()
|
||||
self._saved_lines = lines
|
||||
if any(b'\r\n' in line for line in lines):
|
||||
endline = '\r\n'
|
||||
else:
|
||||
endline = '\n'
|
||||
for entry in self.entries:
|
||||
try:
|
||||
logger.debug('Removing entry: %s', entry)
|
||||
lines.remove((entry + endline).encode("utf-8"))
|
||||
except ValueError:
|
||||
pass
|
||||
with open(self.file, 'wb') as fh:
|
||||
fh.writelines(lines)
|
||||
|
||||
def rollback(self):
|
||||
if self._saved_lines is None:
|
||||
logger.error(
|
||||
'Cannot roll back changes to %s, none were made', self.file
|
||||
)
|
||||
return False
|
||||
logger.debug('Rolling %s back to previous state', self.file)
|
||||
with open(self.file, 'wb') as fh:
|
||||
fh.writelines(self._saved_lines)
|
||||
return True
|
||||
8
Shared/lib/python3.4/site-packages/pip/status_codes.py
Normal file
8
Shared/lib/python3.4/site-packages/pip/status_codes.py
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
SUCCESS = 0
|
||||
ERROR = 1
|
||||
UNKNOWN_ERROR = 2
|
||||
VIRTUALENV_NOT_FOUND = 3
|
||||
PREVIOUS_BUILD_DIR_ERROR = 4
|
||||
NO_MATCHES_FOUND = 23
|
||||
852
Shared/lib/python3.4/site-packages/pip/utils/__init__.py
Normal file
852
Shared/lib/python3.4/site-packages/pip/utils/__init__.py
Normal file
|
|
@ -0,0 +1,852 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
from collections import deque
|
||||
import contextlib
|
||||
import errno
|
||||
import locale
|
||||
# we have a submodule named 'logging' which would shadow this if we used the
|
||||
# regular name:
|
||||
import logging as std_logging
|
||||
import re
|
||||
import os
|
||||
import posixpath
|
||||
import shutil
|
||||
import stat
|
||||
import subprocess
|
||||
import sys
|
||||
import tarfile
|
||||
import zipfile
|
||||
|
||||
from pip.exceptions import InstallationError
|
||||
from pip.compat import console_to_str, expanduser, stdlib_pkgs
|
||||
from pip.locations import (
|
||||
site_packages, user_site, running_under_virtualenv, virtualenv_no_global,
|
||||
write_delete_marker_file,
|
||||
)
|
||||
from pip._vendor import pkg_resources
|
||||
from pip._vendor.six.moves import input
|
||||
from pip._vendor.six import PY2
|
||||
from pip._vendor.retrying import retry
|
||||
|
||||
if PY2:
|
||||
from io import BytesIO as StringIO
|
||||
else:
|
||||
from io import StringIO
|
||||
|
||||
__all__ = ['rmtree', 'display_path', 'backup_dir',
|
||||
'ask', 'splitext',
|
||||
'format_size', 'is_installable_dir',
|
||||
'is_svn_page', 'file_contents',
|
||||
'split_leading_dir', 'has_leading_dir',
|
||||
'normalize_path', 'canonicalize_name',
|
||||
'renames', 'get_terminal_size', 'get_prog',
|
||||
'unzip_file', 'untar_file', 'unpack_file', 'call_subprocess',
|
||||
'captured_stdout', 'remove_tracebacks', 'ensure_dir',
|
||||
'ARCHIVE_EXTENSIONS', 'SUPPORTED_EXTENSIONS',
|
||||
'get_installed_version']
|
||||
|
||||
|
||||
logger = std_logging.getLogger(__name__)
|
||||
|
||||
BZ2_EXTENSIONS = ('.tar.bz2', '.tbz')
|
||||
XZ_EXTENSIONS = ('.tar.xz', '.txz', '.tlz', '.tar.lz', '.tar.lzma')
|
||||
ZIP_EXTENSIONS = ('.zip', '.whl')
|
||||
TAR_EXTENSIONS = ('.tar.gz', '.tgz', '.tar')
|
||||
ARCHIVE_EXTENSIONS = (
|
||||
ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS)
|
||||
SUPPORTED_EXTENSIONS = ZIP_EXTENSIONS + TAR_EXTENSIONS
|
||||
try:
|
||||
import bz2 # noqa
|
||||
SUPPORTED_EXTENSIONS += BZ2_EXTENSIONS
|
||||
except ImportError:
|
||||
logger.debug('bz2 module is not available')
|
||||
|
||||
try:
|
||||
# Only for Python 3.3+
|
||||
import lzma # noqa
|
||||
SUPPORTED_EXTENSIONS += XZ_EXTENSIONS
|
||||
except ImportError:
|
||||
logger.debug('lzma module is not available')
|
||||
|
||||
|
||||
def import_or_raise(pkg_or_module_string, ExceptionType, *args, **kwargs):
|
||||
try:
|
||||
return __import__(pkg_or_module_string)
|
||||
except ImportError:
|
||||
raise ExceptionType(*args, **kwargs)
|
||||
|
||||
|
||||
def ensure_dir(path):
|
||||
"""os.path.makedirs without EEXIST."""
|
||||
try:
|
||||
os.makedirs(path)
|
||||
except OSError as e:
|
||||
if e.errno != errno.EEXIST:
|
||||
raise
|
||||
|
||||
|
||||
def get_prog():
|
||||
try:
|
||||
if os.path.basename(sys.argv[0]) in ('__main__.py', '-c'):
|
||||
return "%s -m pip" % sys.executable
|
||||
except (AttributeError, TypeError, IndexError):
|
||||
pass
|
||||
return 'pip'
|
||||
|
||||
|
||||
# Retry every half second for up to 3 seconds
|
||||
@retry(stop_max_delay=3000, wait_fixed=500)
|
||||
def rmtree(dir, ignore_errors=False):
|
||||
shutil.rmtree(dir, ignore_errors=ignore_errors,
|
||||
onerror=rmtree_errorhandler)
|
||||
|
||||
|
||||
def rmtree_errorhandler(func, path, exc_info):
|
||||
"""On Windows, the files in .svn are read-only, so when rmtree() tries to
|
||||
remove them, an exception is thrown. We catch that here, remove the
|
||||
read-only attribute, and hopefully continue without problems."""
|
||||
# if file type currently read only
|
||||
if os.stat(path).st_mode & stat.S_IREAD:
|
||||
# convert to read/write
|
||||
os.chmod(path, stat.S_IWRITE)
|
||||
# use the original function to repeat the operation
|
||||
func(path)
|
||||
return
|
||||
else:
|
||||
raise
|
||||
|
||||
|
||||
def display_path(path):
|
||||
"""Gives the display value for a given path, making it relative to cwd
|
||||
if possible."""
|
||||
path = os.path.normcase(os.path.abspath(path))
|
||||
if sys.version_info[0] == 2:
|
||||
path = path.decode(sys.getfilesystemencoding(), 'replace')
|
||||
path = path.encode(sys.getdefaultencoding(), 'replace')
|
||||
if path.startswith(os.getcwd() + os.path.sep):
|
||||
path = '.' + path[len(os.getcwd()):]
|
||||
return path
|
||||
|
||||
|
||||
def backup_dir(dir, ext='.bak'):
|
||||
"""Figure out the name of a directory to back up the given dir to
|
||||
(adding .bak, .bak2, etc)"""
|
||||
n = 1
|
||||
extension = ext
|
||||
while os.path.exists(dir + extension):
|
||||
n += 1
|
||||
extension = ext + str(n)
|
||||
return dir + extension
|
||||
|
||||
|
||||
def ask_path_exists(message, options):
|
||||
for action in os.environ.get('PIP_EXISTS_ACTION', '').split():
|
||||
if action in options:
|
||||
return action
|
||||
return ask(message, options)
|
||||
|
||||
|
||||
def ask(message, options):
|
||||
"""Ask the message interactively, with the given possible responses"""
|
||||
while 1:
|
||||
if os.environ.get('PIP_NO_INPUT'):
|
||||
raise Exception(
|
||||
'No input was expected ($PIP_NO_INPUT set); question: %s' %
|
||||
message
|
||||
)
|
||||
response = input(message)
|
||||
response = response.strip().lower()
|
||||
if response not in options:
|
||||
print(
|
||||
'Your response (%r) was not one of the expected responses: '
|
||||
'%s' % (response, ', '.join(options))
|
||||
)
|
||||
else:
|
||||
return response
|
||||
|
||||
|
||||
def format_size(bytes):
|
||||
if bytes > 1000 * 1000:
|
||||
return '%.1fMB' % (bytes / 1000.0 / 1000)
|
||||
elif bytes > 10 * 1000:
|
||||
return '%ikB' % (bytes / 1000)
|
||||
elif bytes > 1000:
|
||||
return '%.1fkB' % (bytes / 1000.0)
|
||||
else:
|
||||
return '%ibytes' % bytes
|
||||
|
||||
|
||||
def is_installable_dir(path):
|
||||
"""Return True if `path` is a directory containing a setup.py file."""
|
||||
if not os.path.isdir(path):
|
||||
return False
|
||||
setup_py = os.path.join(path, 'setup.py')
|
||||
if os.path.isfile(setup_py):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def is_svn_page(html):
|
||||
"""
|
||||
Returns true if the page appears to be the index page of an svn repository
|
||||
"""
|
||||
return (re.search(r'<title>[^<]*Revision \d+:', html) and
|
||||
re.search(r'Powered by (?:<a[^>]*?>)?Subversion', html, re.I))
|
||||
|
||||
|
||||
def file_contents(filename):
|
||||
with open(filename, 'rb') as fp:
|
||||
return fp.read().decode('utf-8')
|
||||
|
||||
|
||||
def read_chunks(file, size=4096):
|
||||
"""Yield pieces of data from a file-like object until EOF."""
|
||||
while True:
|
||||
chunk = file.read(size)
|
||||
if not chunk:
|
||||
break
|
||||
yield chunk
|
||||
|
||||
|
||||
def split_leading_dir(path):
|
||||
path = path.lstrip('/').lstrip('\\')
|
||||
if '/' in path and (('\\' in path and path.find('/') < path.find('\\')) or
|
||||
'\\' not in path):
|
||||
return path.split('/', 1)
|
||||
elif '\\' in path:
|
||||
return path.split('\\', 1)
|
||||
else:
|
||||
return path, ''
|
||||
|
||||
|
||||
def has_leading_dir(paths):
|
||||
"""Returns true if all the paths have the same leading path name
|
||||
(i.e., everything is in one subdirectory in an archive)"""
|
||||
common_prefix = None
|
||||
for path in paths:
|
||||
prefix, rest = split_leading_dir(path)
|
||||
if not prefix:
|
||||
return False
|
||||
elif common_prefix is None:
|
||||
common_prefix = prefix
|
||||
elif prefix != common_prefix:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def normalize_path(path, resolve_symlinks=True):
|
||||
"""
|
||||
Convert a path to its canonical, case-normalized, absolute version.
|
||||
|
||||
"""
|
||||
path = expanduser(path)
|
||||
if resolve_symlinks:
|
||||
path = os.path.realpath(path)
|
||||
else:
|
||||
path = os.path.abspath(path)
|
||||
return os.path.normcase(path)
|
||||
|
||||
|
||||
def splitext(path):
|
||||
"""Like os.path.splitext, but take off .tar too"""
|
||||
base, ext = posixpath.splitext(path)
|
||||
if base.lower().endswith('.tar'):
|
||||
ext = base[-4:] + ext
|
||||
base = base[:-4]
|
||||
return base, ext
|
||||
|
||||
|
||||
def renames(old, new):
|
||||
"""Like os.renames(), but handles renaming across devices."""
|
||||
# Implementation borrowed from os.renames().
|
||||
head, tail = os.path.split(new)
|
||||
if head and tail and not os.path.exists(head):
|
||||
os.makedirs(head)
|
||||
|
||||
shutil.move(old, new)
|
||||
|
||||
head, tail = os.path.split(old)
|
||||
if head and tail:
|
||||
try:
|
||||
os.removedirs(head)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
def is_local(path):
|
||||
"""
|
||||
Return True if this is a path pip is allowed to modify.
|
||||
|
||||
If we're in a virtualenv, sys.prefix points to the virtualenv's
|
||||
prefix; only sys.prefix is considered local.
|
||||
|
||||
If we're not in a virtualenv, in general we can modify anything.
|
||||
However, if the OS vendor has configured distutils to install
|
||||
somewhere other than sys.prefix (which could be a subdirectory of
|
||||
sys.prefix, e.g. /usr/local), we consider sys.prefix itself nonlocal
|
||||
and the domain of the OS vendor. (In other words, everything _other
|
||||
than_ sys.prefix is considered local.)
|
||||
|
||||
"""
|
||||
|
||||
path = normalize_path(path)
|
||||
prefix = normalize_path(sys.prefix)
|
||||
|
||||
if running_under_virtualenv():
|
||||
return path.startswith(normalize_path(sys.prefix))
|
||||
else:
|
||||
from pip.locations import distutils_scheme
|
||||
if path.startswith(prefix):
|
||||
for local_path in distutils_scheme("").values():
|
||||
if path.startswith(normalize_path(local_path)):
|
||||
return True
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
def dist_is_local(dist):
|
||||
"""
|
||||
Return True if given Distribution object is installed somewhere pip
|
||||
is allowed to modify.
|
||||
|
||||
"""
|
||||
return is_local(dist_location(dist))
|
||||
|
||||
|
||||
def dist_in_usersite(dist):
|
||||
"""
|
||||
Return True if given Distribution is installed in user site.
|
||||
"""
|
||||
norm_path = normalize_path(dist_location(dist))
|
||||
return norm_path.startswith(normalize_path(user_site))
|
||||
|
||||
|
||||
def dist_in_site_packages(dist):
|
||||
"""
|
||||
Return True if given Distribution is installed in
|
||||
distutils.sysconfig.get_python_lib().
|
||||
"""
|
||||
return normalize_path(
|
||||
dist_location(dist)
|
||||
).startswith(normalize_path(site_packages))
|
||||
|
||||
|
||||
def dist_is_editable(dist):
|
||||
"""Is distribution an editable install?"""
|
||||
for path_item in sys.path:
|
||||
egg_link = os.path.join(path_item, dist.project_name + '.egg-link')
|
||||
if os.path.isfile(egg_link):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def get_installed_distributions(local_only=True,
|
||||
skip=stdlib_pkgs,
|
||||
include_editables=True,
|
||||
editables_only=False,
|
||||
user_only=False):
|
||||
"""
|
||||
Return a list of installed Distribution objects.
|
||||
|
||||
If ``local_only`` is True (default), only return installations
|
||||
local to the current virtualenv, if in a virtualenv.
|
||||
|
||||
``skip`` argument is an iterable of lower-case project names to
|
||||
ignore; defaults to stdlib_pkgs
|
||||
|
||||
If ``editables`` is False, don't report editables.
|
||||
|
||||
If ``editables_only`` is True , only report editables.
|
||||
|
||||
If ``user_only`` is True , only report installations in the user
|
||||
site directory.
|
||||
|
||||
"""
|
||||
if local_only:
|
||||
local_test = dist_is_local
|
||||
else:
|
||||
def local_test(d):
|
||||
return True
|
||||
|
||||
if include_editables:
|
||||
def editable_test(d):
|
||||
return True
|
||||
else:
|
||||
def editable_test(d):
|
||||
return not dist_is_editable(d)
|
||||
|
||||
if editables_only:
|
||||
def editables_only_test(d):
|
||||
return dist_is_editable(d)
|
||||
else:
|
||||
def editables_only_test(d):
|
||||
return True
|
||||
|
||||
if user_only:
|
||||
user_test = dist_in_usersite
|
||||
else:
|
||||
def user_test(d):
|
||||
return True
|
||||
|
||||
return [d for d in pkg_resources.working_set
|
||||
if local_test(d) and
|
||||
d.key not in skip and
|
||||
editable_test(d) and
|
||||
editables_only_test(d) and
|
||||
user_test(d)
|
||||
]
|
||||
|
||||
|
||||
def egg_link_path(dist):
|
||||
"""
|
||||
Return the path for the .egg-link file if it exists, otherwise, None.
|
||||
|
||||
There's 3 scenarios:
|
||||
1) not in a virtualenv
|
||||
try to find in site.USER_SITE, then site_packages
|
||||
2) in a no-global virtualenv
|
||||
try to find in site_packages
|
||||
3) in a yes-global virtualenv
|
||||
try to find in site_packages, then site.USER_SITE
|
||||
(don't look in global location)
|
||||
|
||||
For #1 and #3, there could be odd cases, where there's an egg-link in 2
|
||||
locations.
|
||||
|
||||
This method will just return the first one found.
|
||||
"""
|
||||
sites = []
|
||||
if running_under_virtualenv():
|
||||
if virtualenv_no_global():
|
||||
sites.append(site_packages)
|
||||
else:
|
||||
sites.append(site_packages)
|
||||
if user_site:
|
||||
sites.append(user_site)
|
||||
else:
|
||||
if user_site:
|
||||
sites.append(user_site)
|
||||
sites.append(site_packages)
|
||||
|
||||
for site in sites:
|
||||
egglink = os.path.join(site, dist.project_name) + '.egg-link'
|
||||
if os.path.isfile(egglink):
|
||||
return egglink
|
||||
|
||||
|
||||
def dist_location(dist):
|
||||
"""
|
||||
Get the site-packages location of this distribution. Generally
|
||||
this is dist.location, except in the case of develop-installed
|
||||
packages, where dist.location is the source code location, and we
|
||||
want to know where the egg-link file is.
|
||||
|
||||
"""
|
||||
egg_link = egg_link_path(dist)
|
||||
if egg_link:
|
||||
return egg_link
|
||||
return dist.location
|
||||
|
||||
|
||||
def get_terminal_size():
|
||||
"""Returns a tuple (x, y) representing the width(x) and the height(x)
|
||||
in characters of the terminal window."""
|
||||
def ioctl_GWINSZ(fd):
|
||||
try:
|
||||
import fcntl
|
||||
import termios
|
||||
import struct
|
||||
cr = struct.unpack(
|
||||
'hh',
|
||||
fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234')
|
||||
)
|
||||
except:
|
||||
return None
|
||||
if cr == (0, 0):
|
||||
return None
|
||||
return cr
|
||||
cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
|
||||
if not cr:
|
||||
try:
|
||||
fd = os.open(os.ctermid(), os.O_RDONLY)
|
||||
cr = ioctl_GWINSZ(fd)
|
||||
os.close(fd)
|
||||
except:
|
||||
pass
|
||||
if not cr:
|
||||
cr = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80))
|
||||
return int(cr[1]), int(cr[0])
|
||||
|
||||
|
||||
def current_umask():
|
||||
"""Get the current umask which involves having to set it temporarily."""
|
||||
mask = os.umask(0)
|
||||
os.umask(mask)
|
||||
return mask
|
||||
|
||||
|
||||
def unzip_file(filename, location, flatten=True):
|
||||
"""
|
||||
Unzip the file (with path `filename`) to the destination `location`. All
|
||||
files are written based on system defaults and umask (i.e. permissions are
|
||||
not preserved), except that regular file members with any execute
|
||||
permissions (user, group, or world) have "chmod +x" applied after being
|
||||
written. Note that for windows, any execute changes using os.chmod are
|
||||
no-ops per the python docs.
|
||||
"""
|
||||
ensure_dir(location)
|
||||
zipfp = open(filename, 'rb')
|
||||
try:
|
||||
zip = zipfile.ZipFile(zipfp, allowZip64=True)
|
||||
leading = has_leading_dir(zip.namelist()) and flatten
|
||||
for info in zip.infolist():
|
||||
name = info.filename
|
||||
data = zip.read(name)
|
||||
fn = name
|
||||
if leading:
|
||||
fn = split_leading_dir(name)[1]
|
||||
fn = os.path.join(location, fn)
|
||||
dir = os.path.dirname(fn)
|
||||
if fn.endswith('/') or fn.endswith('\\'):
|
||||
# A directory
|
||||
ensure_dir(fn)
|
||||
else:
|
||||
ensure_dir(dir)
|
||||
fp = open(fn, 'wb')
|
||||
try:
|
||||
fp.write(data)
|
||||
finally:
|
||||
fp.close()
|
||||
mode = info.external_attr >> 16
|
||||
# if mode and regular file and any execute permissions for
|
||||
# user/group/world?
|
||||
if mode and stat.S_ISREG(mode) and mode & 0o111:
|
||||
# make dest file have execute for user/group/world
|
||||
# (chmod +x) no-op on windows per python docs
|
||||
os.chmod(fn, (0o777 - current_umask() | 0o111))
|
||||
finally:
|
||||
zipfp.close()
|
||||
|
||||
|
||||
def untar_file(filename, location):
|
||||
"""
|
||||
Untar the file (with path `filename`) to the destination `location`.
|
||||
All files are written based on system defaults and umask (i.e. permissions
|
||||
are not preserved), except that regular file members with any execute
|
||||
permissions (user, group, or world) have "chmod +x" applied after being
|
||||
written. Note that for windows, any execute changes using os.chmod are
|
||||
no-ops per the python docs.
|
||||
"""
|
||||
ensure_dir(location)
|
||||
if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'):
|
||||
mode = 'r:gz'
|
||||
elif filename.lower().endswith(BZ2_EXTENSIONS):
|
||||
mode = 'r:bz2'
|
||||
elif filename.lower().endswith(XZ_EXTENSIONS):
|
||||
mode = 'r:xz'
|
||||
elif filename.lower().endswith('.tar'):
|
||||
mode = 'r'
|
||||
else:
|
||||
logger.warning(
|
||||
'Cannot determine compression type for file %s', filename,
|
||||
)
|
||||
mode = 'r:*'
|
||||
tar = tarfile.open(filename, mode)
|
||||
try:
|
||||
# note: python<=2.5 doesn't seem to know about pax headers, filter them
|
||||
leading = has_leading_dir([
|
||||
member.name for member in tar.getmembers()
|
||||
if member.name != 'pax_global_header'
|
||||
])
|
||||
for member in tar.getmembers():
|
||||
fn = member.name
|
||||
if fn == 'pax_global_header':
|
||||
continue
|
||||
if leading:
|
||||
fn = split_leading_dir(fn)[1]
|
||||
path = os.path.join(location, fn)
|
||||
if member.isdir():
|
||||
ensure_dir(path)
|
||||
elif member.issym():
|
||||
try:
|
||||
tar._extract_member(member, path)
|
||||
except Exception as exc:
|
||||
# Some corrupt tar files seem to produce this
|
||||
# (specifically bad symlinks)
|
||||
logger.warning(
|
||||
'In the tar file %s the member %s is invalid: %s',
|
||||
filename, member.name, exc,
|
||||
)
|
||||
continue
|
||||
else:
|
||||
try:
|
||||
fp = tar.extractfile(member)
|
||||
except (KeyError, AttributeError) as exc:
|
||||
# Some corrupt tar files seem to produce this
|
||||
# (specifically bad symlinks)
|
||||
logger.warning(
|
||||
'In the tar file %s the member %s is invalid: %s',
|
||||
filename, member.name, exc,
|
||||
)
|
||||
continue
|
||||
ensure_dir(os.path.dirname(path))
|
||||
with open(path, 'wb') as destfp:
|
||||
shutil.copyfileobj(fp, destfp)
|
||||
fp.close()
|
||||
# Update the timestamp (useful for cython compiled files)
|
||||
tar.utime(member, path)
|
||||
# member have any execute permissions for user/group/world?
|
||||
if member.mode & 0o111:
|
||||
# make dest file have execute for user/group/world
|
||||
# no-op on windows per python docs
|
||||
os.chmod(path, (0o777 - current_umask() | 0o111))
|
||||
finally:
|
||||
tar.close()
|
||||
|
||||
|
||||
def unpack_file(filename, location, content_type, link):
|
||||
filename = os.path.realpath(filename)
|
||||
if (content_type == 'application/zip' or
|
||||
filename.lower().endswith(ZIP_EXTENSIONS) or
|
||||
zipfile.is_zipfile(filename)):
|
||||
unzip_file(
|
||||
filename,
|
||||
location,
|
||||
flatten=not filename.endswith('.whl')
|
||||
)
|
||||
elif (content_type == 'application/x-gzip' or
|
||||
tarfile.is_tarfile(filename) or
|
||||
filename.lower().endswith(
|
||||
TAR_EXTENSIONS + BZ2_EXTENSIONS + XZ_EXTENSIONS)):
|
||||
untar_file(filename, location)
|
||||
elif (content_type and content_type.startswith('text/html') and
|
||||
is_svn_page(file_contents(filename))):
|
||||
# We don't really care about this
|
||||
from pip.vcs.subversion import Subversion
|
||||
Subversion('svn+' + link.url).unpack(location)
|
||||
else:
|
||||
# FIXME: handle?
|
||||
# FIXME: magic signatures?
|
||||
logger.critical(
|
||||
'Cannot unpack file %s (downloaded from %s, content-type: %s); '
|
||||
'cannot detect archive format',
|
||||
filename, location, content_type,
|
||||
)
|
||||
raise InstallationError(
|
||||
'Cannot determine archive format of %s' % location
|
||||
)
|
||||
|
||||
|
||||
def remove_tracebacks(output):
|
||||
pattern = (r'(?:\W+File "(?:.*)", line (?:.*)\W+(?:.*)\W+\^\W+)?'
|
||||
r'Syntax(?:Error|Warning): (?:.*)')
|
||||
output = re.sub(pattern, '', output)
|
||||
if PY2:
|
||||
return output
|
||||
# compileall.compile_dir() prints different messages to stdout
|
||||
# in Python 3
|
||||
return re.sub(r"\*\*\* Error compiling (?:.*)", '', output)
|
||||
|
||||
|
||||
def call_subprocess(cmd, show_stdout=True, cwd=None,
|
||||
on_returncode='raise',
|
||||
command_level=std_logging.DEBUG, command_desc=None,
|
||||
extra_environ=None, spinner=None):
|
||||
if command_desc is None:
|
||||
cmd_parts = []
|
||||
for part in cmd:
|
||||
if ' ' in part or '\n' in part or '"' in part or "'" in part:
|
||||
part = '"%s"' % part.replace('"', '\\"')
|
||||
cmd_parts.append(part)
|
||||
command_desc = ' '.join(cmd_parts)
|
||||
logger.log(command_level, "Running command %s", command_desc)
|
||||
env = os.environ.copy()
|
||||
if extra_environ:
|
||||
env.update(extra_environ)
|
||||
try:
|
||||
proc = subprocess.Popen(
|
||||
cmd, stderr=subprocess.STDOUT, stdin=None, stdout=subprocess.PIPE,
|
||||
cwd=cwd, env=env)
|
||||
except Exception as exc:
|
||||
logger.critical(
|
||||
"Error %s while executing command %s", exc, command_desc,
|
||||
)
|
||||
raise
|
||||
all_output = []
|
||||
while True:
|
||||
line = console_to_str(proc.stdout.readline())
|
||||
if not line:
|
||||
break
|
||||
line = line.rstrip()
|
||||
all_output.append(line + '\n')
|
||||
if show_stdout:
|
||||
logger.debug(line)
|
||||
if spinner is not None:
|
||||
spinner.spin()
|
||||
proc.wait()
|
||||
if spinner is not None:
|
||||
if proc.returncode:
|
||||
spinner.finish("error")
|
||||
else:
|
||||
spinner.finish("done")
|
||||
if proc.returncode:
|
||||
if on_returncode == 'raise':
|
||||
if all_output:
|
||||
logger.info(
|
||||
'Complete output from command %s:', command_desc,
|
||||
)
|
||||
logger.info(
|
||||
''.join(all_output) +
|
||||
'\n----------------------------------------'
|
||||
)
|
||||
raise InstallationError(
|
||||
'Command "%s" failed with error code %s in %s'
|
||||
% (command_desc, proc.returncode, cwd))
|
||||
elif on_returncode == 'warn':
|
||||
logger.warning(
|
||||
'Command "%s" had error code %s in %s',
|
||||
command_desc, proc.returncode, cwd,
|
||||
)
|
||||
elif on_returncode == 'ignore':
|
||||
pass
|
||||
else:
|
||||
raise ValueError('Invalid value: on_returncode=%s' %
|
||||
repr(on_returncode))
|
||||
if not show_stdout:
|
||||
return remove_tracebacks(''.join(all_output))
|
||||
|
||||
|
||||
def read_text_file(filename):
|
||||
"""Return the contents of *filename*.
|
||||
|
||||
Try to decode the file contents with utf-8, the preferred system encoding
|
||||
(e.g., cp1252 on some Windows machines), and latin1, in that order.
|
||||
Decoding a byte string with latin1 will never raise an error. In the worst
|
||||
case, the returned string will contain some garbage characters.
|
||||
|
||||
"""
|
||||
with open(filename, 'rb') as fp:
|
||||
data = fp.read()
|
||||
|
||||
encodings = ['utf-8', locale.getpreferredencoding(False), 'latin1']
|
||||
for enc in encodings:
|
||||
try:
|
||||
data = data.decode(enc)
|
||||
except UnicodeDecodeError:
|
||||
continue
|
||||
break
|
||||
|
||||
assert type(data) != bytes # Latin1 should have worked.
|
||||
return data
|
||||
|
||||
|
||||
def _make_build_dir(build_dir):
|
||||
os.makedirs(build_dir)
|
||||
write_delete_marker_file(build_dir)
|
||||
|
||||
|
||||
class FakeFile(object):
|
||||
"""Wrap a list of lines in an object with readline() to make
|
||||
ConfigParser happy."""
|
||||
def __init__(self, lines):
|
||||
self._gen = (l for l in lines)
|
||||
|
||||
def readline(self):
|
||||
try:
|
||||
try:
|
||||
return next(self._gen)
|
||||
except NameError:
|
||||
return self._gen.next()
|
||||
except StopIteration:
|
||||
return ''
|
||||
|
||||
def __iter__(self):
|
||||
return self._gen
|
||||
|
||||
|
||||
class StreamWrapper(StringIO):
|
||||
|
||||
@classmethod
|
||||
def from_stream(cls, orig_stream):
|
||||
cls.orig_stream = orig_stream
|
||||
return cls()
|
||||
|
||||
# compileall.compile_dir() needs stdout.encoding to print to stdout
|
||||
@property
|
||||
def encoding(self):
|
||||
return self.orig_stream.encoding
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def captured_output(stream_name):
|
||||
"""Return a context manager used by captured_stdout/stdin/stderr
|
||||
that temporarily replaces the sys stream *stream_name* with a StringIO.
|
||||
|
||||
Taken from Lib/support/__init__.py in the CPython repo.
|
||||
"""
|
||||
orig_stdout = getattr(sys, stream_name)
|
||||
setattr(sys, stream_name, StreamWrapper.from_stream(orig_stdout))
|
||||
try:
|
||||
yield getattr(sys, stream_name)
|
||||
finally:
|
||||
setattr(sys, stream_name, orig_stdout)
|
||||
|
||||
|
||||
def captured_stdout():
|
||||
"""Capture the output of sys.stdout:
|
||||
|
||||
with captured_stdout() as stdout:
|
||||
print('hello')
|
||||
self.assertEqual(stdout.getvalue(), 'hello\n')
|
||||
|
||||
Taken from Lib/support/__init__.py in the CPython repo.
|
||||
"""
|
||||
return captured_output('stdout')
|
||||
|
||||
|
||||
class cached_property(object):
|
||||
"""A property that is only computed once per instance and then replaces
|
||||
itself with an ordinary attribute. Deleting the attribute resets the
|
||||
property.
|
||||
|
||||
Source: https://github.com/bottlepy/bottle/blob/0.11.5/bottle.py#L175
|
||||
"""
|
||||
|
||||
def __init__(self, func):
|
||||
self.__doc__ = getattr(func, '__doc__')
|
||||
self.func = func
|
||||
|
||||
def __get__(self, obj, cls):
|
||||
if obj is None:
|
||||
# We're being accessed from the class itself, not from an object
|
||||
return self
|
||||
value = obj.__dict__[self.func.__name__] = self.func(obj)
|
||||
return value
|
||||
|
||||
|
||||
def get_installed_version(dist_name):
|
||||
"""Get the installed version of dist_name avoiding pkg_resources cache"""
|
||||
# Create a requirement that we'll look for inside of setuptools.
|
||||
req = pkg_resources.Requirement.parse(dist_name)
|
||||
|
||||
# We want to avoid having this cached, so we need to construct a new
|
||||
# working set each time.
|
||||
working_set = pkg_resources.WorkingSet()
|
||||
|
||||
# Get the installed distribution from our working set
|
||||
dist = working_set.find(req)
|
||||
|
||||
# Check to see if we got an installed distribution or not, if we did
|
||||
# we want to return it's version.
|
||||
return dist.version if dist else None
|
||||
|
||||
|
||||
def canonicalize_name(name):
|
||||
"""Convert an arbitrary string to a canonical name used for comparison"""
|
||||
return pkg_resources.safe_name(name).lower()
|
||||
|
||||
|
||||
def consume(iterator):
|
||||
"""Consume an iterable at C speed."""
|
||||
deque(iterator, maxlen=0)
|
||||
224
Shared/lib/python3.4/site-packages/pip/utils/appdirs.py
Normal file
224
Shared/lib/python3.4/site-packages/pip/utils/appdirs.py
Normal file
|
|
@ -0,0 +1,224 @@
|
|||
"""
|
||||
This code was taken from https://github.com/ActiveState/appdirs and modified
|
||||
to suit our purposes.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
from pip.compat import WINDOWS, expanduser
|
||||
|
||||
|
||||
def user_cache_dir(appname):
|
||||
r"""
|
||||
Return full path to the user-specific cache dir for this application.
|
||||
|
||||
"appname" is the name of application.
|
||||
|
||||
Typical user cache directories are:
|
||||
Mac OS X: ~/Library/Caches/<AppName>
|
||||
Unix: ~/.cache/<AppName> (XDG default)
|
||||
Windows: C:\Users\<username>\AppData\Local\<AppName>\Cache
|
||||
|
||||
On Windows the only suggestion in the MSDN docs is that local settings go
|
||||
in the `CSIDL_LOCAL_APPDATA` directory. This is identical to the
|
||||
non-roaming app data dir (the default returned by `user_data_dir`). Apps
|
||||
typically put cache data somewhere *under* the given dir here. Some
|
||||
examples:
|
||||
...\Mozilla\Firefox\Profiles\<ProfileName>\Cache
|
||||
...\Acme\SuperApp\Cache\1.0
|
||||
|
||||
OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value.
|
||||
"""
|
||||
if WINDOWS:
|
||||
# Get the base path
|
||||
path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA"))
|
||||
|
||||
# Add our app name and Cache directory to it
|
||||
path = os.path.join(path, appname, "Cache")
|
||||
elif sys.platform == "darwin":
|
||||
# Get the base path
|
||||
path = expanduser("~/Library/Caches")
|
||||
|
||||
# Add our app name to it
|
||||
path = os.path.join(path, appname)
|
||||
else:
|
||||
# Get the base path
|
||||
path = os.getenv("XDG_CACHE_HOME", expanduser("~/.cache"))
|
||||
|
||||
# Add our app name to it
|
||||
path = os.path.join(path, appname)
|
||||
|
||||
return path
|
||||
|
||||
|
||||
def user_data_dir(appname, roaming=False):
|
||||
"""
|
||||
Return full path to the user-specific data dir for this application.
|
||||
|
||||
"appname" is the name of application.
|
||||
If None, just the system directory is returned.
|
||||
"roaming" (boolean, default False) can be set True to use the Windows
|
||||
roaming appdata directory. That means that for users on a Windows
|
||||
network setup for roaming profiles, this user data will be
|
||||
sync'd on login. See
|
||||
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
|
||||
for a discussion of issues.
|
||||
|
||||
Typical user data directories are:
|
||||
Mac OS X: ~/Library/Application Support/<AppName>
|
||||
Unix: ~/.local/share/<AppName> # or in
|
||||
$XDG_DATA_HOME, if defined
|
||||
Win XP (not roaming): C:\Documents and Settings\<username>\ ...
|
||||
...Application Data\<AppName>
|
||||
Win XP (roaming): C:\Documents and Settings\<username>\Local ...
|
||||
...Settings\Application Data\<AppName>
|
||||
Win 7 (not roaming): C:\\Users\<username>\AppData\Local\<AppName>
|
||||
Win 7 (roaming): C:\\Users\<username>\AppData\Roaming\<AppName>
|
||||
|
||||
For Unix, we follow the XDG spec and support $XDG_DATA_HOME.
|
||||
That means, by default "~/.local/share/<AppName>".
|
||||
"""
|
||||
if WINDOWS:
|
||||
const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA"
|
||||
path = os.path.join(os.path.normpath(_get_win_folder(const)), appname)
|
||||
elif sys.platform == "darwin":
|
||||
path = os.path.join(
|
||||
expanduser('~/Library/Application Support/'),
|
||||
appname,
|
||||
)
|
||||
else:
|
||||
path = os.path.join(
|
||||
os.getenv('XDG_DATA_HOME', expanduser("~/.local/share")),
|
||||
appname,
|
||||
)
|
||||
|
||||
return path
|
||||
|
||||
|
||||
def user_config_dir(appname, roaming=True):
|
||||
"""Return full path to the user-specific config dir for this application.
|
||||
|
||||
"appname" is the name of application.
|
||||
If None, just the system directory is returned.
|
||||
"roaming" (boolean, default True) can be set False to not use the
|
||||
Windows roaming appdata directory. That means that for users on a
|
||||
Windows network setup for roaming profiles, this user data will be
|
||||
sync'd on login. See
|
||||
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
|
||||
for a discussion of issues.
|
||||
|
||||
Typical user data directories are:
|
||||
Mac OS X: same as user_data_dir
|
||||
Unix: ~/.config/<AppName>
|
||||
Win *: same as user_data_dir
|
||||
|
||||
For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME.
|
||||
That means, by deafult "~/.config/<AppName>".
|
||||
"""
|
||||
if WINDOWS:
|
||||
path = user_data_dir(appname, roaming=roaming)
|
||||
elif sys.platform == "darwin":
|
||||
path = user_data_dir(appname)
|
||||
else:
|
||||
path = os.getenv('XDG_CONFIG_HOME', expanduser("~/.config"))
|
||||
path = os.path.join(path, appname)
|
||||
|
||||
return path
|
||||
|
||||
|
||||
# for the discussion regarding site_config_dirs locations
|
||||
# see <https://github.com/pypa/pip/issues/1733>
|
||||
def site_config_dirs(appname):
|
||||
"""Return a list of potential user-shared config dirs for this application.
|
||||
|
||||
"appname" is the name of application.
|
||||
|
||||
Typical user config directories are:
|
||||
Mac OS X: /Library/Application Support/<AppName>/
|
||||
Unix: /etc or $XDG_CONFIG_DIRS[i]/<AppName>/ for each value in
|
||||
$XDG_CONFIG_DIRS
|
||||
Win XP: C:\Documents and Settings\All Users\Application ...
|
||||
...Data\<AppName>\
|
||||
Vista: (Fail! "C:\ProgramData" is a hidden *system* directory
|
||||
on Vista.)
|
||||
Win 7: Hidden, but writeable on Win 7:
|
||||
C:\ProgramData\<AppName>\
|
||||
"""
|
||||
if WINDOWS:
|
||||
path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA"))
|
||||
pathlist = [os.path.join(path, appname)]
|
||||
elif sys.platform == 'darwin':
|
||||
pathlist = [os.path.join('/Library/Application Support', appname)]
|
||||
else:
|
||||
# try looking in $XDG_CONFIG_DIRS
|
||||
xdg_config_dirs = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg')
|
||||
if xdg_config_dirs:
|
||||
pathlist = [
|
||||
os.path.join(expanduser(x), appname)
|
||||
for x in xdg_config_dirs.split(os.pathsep)
|
||||
]
|
||||
else:
|
||||
pathlist = []
|
||||
|
||||
# always look in /etc directly as well
|
||||
pathlist.append('/etc')
|
||||
|
||||
return pathlist
|
||||
|
||||
|
||||
# -- Windows support functions --
|
||||
|
||||
def _get_win_folder_from_registry(csidl_name):
|
||||
"""
|
||||
This is a fallback technique at best. I'm not sure if using the
|
||||
registry for this guarantees us the correct answer for all CSIDL_*
|
||||
names.
|
||||
"""
|
||||
import _winreg
|
||||
|
||||
shell_folder_name = {
|
||||
"CSIDL_APPDATA": "AppData",
|
||||
"CSIDL_COMMON_APPDATA": "Common AppData",
|
||||
"CSIDL_LOCAL_APPDATA": "Local AppData",
|
||||
}[csidl_name]
|
||||
|
||||
key = _winreg.OpenKey(
|
||||
_winreg.HKEY_CURRENT_USER,
|
||||
r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders"
|
||||
)
|
||||
directory, _type = _winreg.QueryValueEx(key, shell_folder_name)
|
||||
return directory
|
||||
|
||||
|
||||
def _get_win_folder_with_ctypes(csidl_name):
|
||||
csidl_const = {
|
||||
"CSIDL_APPDATA": 26,
|
||||
"CSIDL_COMMON_APPDATA": 35,
|
||||
"CSIDL_LOCAL_APPDATA": 28,
|
||||
}[csidl_name]
|
||||
|
||||
buf = ctypes.create_unicode_buffer(1024)
|
||||
ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)
|
||||
|
||||
# Downgrade to short path name if have highbit chars. See
|
||||
# <http://bugs.activestate.com/show_bug.cgi?id=85099>.
|
||||
has_high_char = False
|
||||
for c in buf:
|
||||
if ord(c) > 255:
|
||||
has_high_char = True
|
||||
break
|
||||
if has_high_char:
|
||||
buf2 = ctypes.create_unicode_buffer(1024)
|
||||
if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
|
||||
buf = buf2
|
||||
|
||||
return buf.value
|
||||
|
||||
if WINDOWS:
|
||||
try:
|
||||
import ctypes
|
||||
_get_win_folder = _get_win_folder_with_ctypes
|
||||
except ImportError:
|
||||
_get_win_folder = _get_win_folder_from_registry
|
||||
42
Shared/lib/python3.4/site-packages/pip/utils/build.py
Normal file
42
Shared/lib/python3.4/site-packages/pip/utils/build.py
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import os.path
|
||||
import tempfile
|
||||
|
||||
from pip.utils import rmtree
|
||||
|
||||
|
||||
class BuildDirectory(object):
|
||||
|
||||
def __init__(self, name=None, delete=None):
|
||||
# If we were not given an explicit directory, and we were not given an
|
||||
# explicit delete option, then we'll default to deleting.
|
||||
if name is None and delete is None:
|
||||
delete = True
|
||||
|
||||
if name is None:
|
||||
# We realpath here because some systems have their default tmpdir
|
||||
# symlinked to another directory. This tends to confuse build
|
||||
# scripts, so we canonicalize the path by traversing potential
|
||||
# symlinks here.
|
||||
name = os.path.realpath(tempfile.mkdtemp(prefix="pip-build-"))
|
||||
# If we were not given an explicit directory, and we were not given
|
||||
# an explicit delete option, then we'll default to deleting.
|
||||
if delete is None:
|
||||
delete = True
|
||||
|
||||
self.name = name
|
||||
self.delete = delete
|
||||
|
||||
def __repr__(self):
|
||||
return "<{} {!r}>".format(self.__class__.__name__, self.name)
|
||||
|
||||
def __enter__(self):
|
||||
return self.name
|
||||
|
||||
def __exit__(self, exc, value, tb):
|
||||
self.cleanup()
|
||||
|
||||
def cleanup(self):
|
||||
if self.delete:
|
||||
rmtree(self.name)
|
||||
76
Shared/lib/python3.4/site-packages/pip/utils/deprecation.py
Normal file
76
Shared/lib/python3.4/site-packages/pip/utils/deprecation.py
Normal file
|
|
@ -0,0 +1,76 @@
|
|||
"""
|
||||
A module that implments tooling to enable easy warnings about deprecations.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
import logging
|
||||
import warnings
|
||||
|
||||
|
||||
class PipDeprecationWarning(Warning):
|
||||
pass
|
||||
|
||||
|
||||
class RemovedInPip9Warning(PipDeprecationWarning, DeprecationWarning):
|
||||
pass
|
||||
|
||||
|
||||
class RemovedInPip10Warning(PipDeprecationWarning, PendingDeprecationWarning):
|
||||
pass
|
||||
|
||||
|
||||
class Python26DeprecationWarning(
|
||||
PipDeprecationWarning, PendingDeprecationWarning
|
||||
):
|
||||
pass
|
||||
|
||||
|
||||
DEPRECATIONS = [
|
||||
RemovedInPip9Warning, RemovedInPip10Warning, Python26DeprecationWarning
|
||||
]
|
||||
|
||||
|
||||
# Warnings <-> Logging Integration
|
||||
|
||||
|
||||
_warnings_showwarning = None
|
||||
|
||||
|
||||
def _showwarning(message, category, filename, lineno, file=None, line=None):
|
||||
if file is not None:
|
||||
if _warnings_showwarning is not None:
|
||||
_warnings_showwarning(
|
||||
message, category, filename, lineno, file, line,
|
||||
)
|
||||
else:
|
||||
if issubclass(category, PipDeprecationWarning):
|
||||
# We use a specially named logger which will handle all of the
|
||||
# deprecation messages for pip.
|
||||
logger = logging.getLogger("pip.deprecations")
|
||||
|
||||
# This is purposely using the % formatter here instead of letting
|
||||
# the logging module handle the interpolation. This is because we
|
||||
# want it to appear as if someone typed this entire message out.
|
||||
log_message = "DEPRECATION: %s" % message
|
||||
|
||||
# Things that are DeprecationWarnings will be removed in the very
|
||||
# next version of pip. We want these to be more obvious so we
|
||||
# use the ERROR logging level while the PendingDeprecationWarnings
|
||||
# are still have at least 2 versions to go until they are removed
|
||||
# so they can just be warnings.
|
||||
if issubclass(category, DeprecationWarning):
|
||||
logger.error(log_message)
|
||||
else:
|
||||
logger.warning(log_message)
|
||||
else:
|
||||
_warnings_showwarning(
|
||||
message, category, filename, lineno, file, line,
|
||||
)
|
||||
|
||||
|
||||
def install_warning_logger():
|
||||
global _warnings_showwarning
|
||||
|
||||
if _warnings_showwarning is None:
|
||||
_warnings_showwarning = warnings.showwarning
|
||||
warnings.showwarning = _showwarning
|
||||
28
Shared/lib/python3.4/site-packages/pip/utils/filesystem.py
Normal file
28
Shared/lib/python3.4/site-packages/pip/utils/filesystem.py
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
import os
|
||||
import os.path
|
||||
|
||||
from pip.compat import get_path_uid
|
||||
|
||||
|
||||
def check_path_owner(path):
|
||||
# If we don't have a way to check the effective uid of this process, then
|
||||
# we'll just assume that we own the directory.
|
||||
if not hasattr(os, "geteuid"):
|
||||
return True
|
||||
|
||||
previous = None
|
||||
while path != previous:
|
||||
if os.path.lexists(path):
|
||||
# Check if path is writable by current user.
|
||||
if os.geteuid() == 0:
|
||||
# Special handling for root user in order to handle properly
|
||||
# cases where users use sudo without -H flag.
|
||||
try:
|
||||
path_uid = get_path_uid(path)
|
||||
except OSError:
|
||||
return False
|
||||
return path_uid == 0
|
||||
else:
|
||||
return os.access(path, os.W_OK)
|
||||
else:
|
||||
previous, path = path, os.path.dirname(path)
|
||||
92
Shared/lib/python3.4/site-packages/pip/utils/hashes.py
Normal file
92
Shared/lib/python3.4/site-packages/pip/utils/hashes.py
Normal file
|
|
@ -0,0 +1,92 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import hashlib
|
||||
|
||||
from pip.exceptions import HashMismatch, HashMissing, InstallationError
|
||||
from pip.utils import read_chunks
|
||||
from pip._vendor.six import iteritems, iterkeys, itervalues
|
||||
|
||||
|
||||
# The recommended hash algo of the moment. Change this whenever the state of
|
||||
# the art changes; it won't hurt backward compatibility.
|
||||
FAVORITE_HASH = 'sha256'
|
||||
|
||||
|
||||
# Names of hashlib algorithms allowed by the --hash option and ``pip hash``
|
||||
# Currently, those are the ones at least as collision-resistant as sha256.
|
||||
STRONG_HASHES = ['sha256', 'sha384', 'sha512']
|
||||
|
||||
|
||||
class Hashes(object):
|
||||
"""A wrapper that builds multiple hashes at once and checks them against
|
||||
known-good values
|
||||
|
||||
"""
|
||||
def __init__(self, hashes=None):
|
||||
"""
|
||||
:param hashes: A dict of algorithm names pointing to lists of allowed
|
||||
hex digests
|
||||
"""
|
||||
self._allowed = {} if hashes is None else hashes
|
||||
|
||||
def check_against_chunks(self, chunks):
|
||||
"""Check good hashes against ones built from iterable of chunks of
|
||||
data.
|
||||
|
||||
Raise HashMismatch if none match.
|
||||
|
||||
"""
|
||||
gots = {}
|
||||
for hash_name in iterkeys(self._allowed):
|
||||
try:
|
||||
gots[hash_name] = hashlib.new(hash_name)
|
||||
except (ValueError, TypeError):
|
||||
raise InstallationError('Unknown hash name: %s' % hash_name)
|
||||
|
||||
for chunk in chunks:
|
||||
for hash in itervalues(gots):
|
||||
hash.update(chunk)
|
||||
|
||||
for hash_name, got in iteritems(gots):
|
||||
if got.hexdigest() in self._allowed[hash_name]:
|
||||
return
|
||||
self._raise(gots)
|
||||
|
||||
def _raise(self, gots):
|
||||
raise HashMismatch(self._allowed, gots)
|
||||
|
||||
def check_against_file(self, file):
|
||||
"""Check good hashes against a file-like object
|
||||
|
||||
Raise HashMismatch if none match.
|
||||
|
||||
"""
|
||||
return self.check_against_chunks(read_chunks(file))
|
||||
|
||||
def check_against_path(self, path):
|
||||
with open(path, 'rb') as file:
|
||||
return self.check_against_file(file)
|
||||
|
||||
def __nonzero__(self):
|
||||
"""Return whether I know any known-good hashes."""
|
||||
return bool(self._allowed)
|
||||
|
||||
def __bool__(self):
|
||||
return self.__nonzero__()
|
||||
|
||||
|
||||
class MissingHashes(Hashes):
|
||||
"""A workalike for Hashes used when we're missing a hash for a requirement
|
||||
|
||||
It computes the actual hash of the requirement and raises a HashMissing
|
||||
exception showing it to the user.
|
||||
|
||||
"""
|
||||
def __init__(self):
|
||||
"""Don't offer the ``hashes`` kwarg."""
|
||||
# Pass our favorite hash in to generate a "gotten hash". With the
|
||||
# empty list, it will never match, so an error will always raise.
|
||||
super(MissingHashes, self).__init__(hashes={FAVORITE_HASH: []})
|
||||
|
||||
def _raise(self, gots):
|
||||
raise HashMissing(gots[FAVORITE_HASH].hexdigest())
|
||||
130
Shared/lib/python3.4/site-packages/pip/utils/logging.py
Normal file
130
Shared/lib/python3.4/site-packages/pip/utils/logging.py
Normal file
|
|
@ -0,0 +1,130 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import contextlib
|
||||
import logging
|
||||
import logging.handlers
|
||||
import os
|
||||
|
||||
try:
|
||||
import threading
|
||||
except ImportError:
|
||||
import dummy_threading as threading
|
||||
|
||||
from pip.compat import WINDOWS
|
||||
from pip.utils import ensure_dir
|
||||
|
||||
try:
|
||||
from pip._vendor import colorama
|
||||
# Lots of different errors can come from this, including SystemError and
|
||||
# ImportError.
|
||||
except Exception:
|
||||
colorama = None
|
||||
|
||||
|
||||
_log_state = threading.local()
|
||||
_log_state.indentation = 0
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def indent_log(num=2):
|
||||
"""
|
||||
A context manager which will cause the log output to be indented for any
|
||||
log messages emitted inside it.
|
||||
"""
|
||||
_log_state.indentation += num
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
_log_state.indentation -= num
|
||||
|
||||
|
||||
def get_indentation():
|
||||
return getattr(_log_state, 'indentation', 0)
|
||||
|
||||
|
||||
class IndentingFormatter(logging.Formatter):
|
||||
|
||||
def format(self, record):
|
||||
"""
|
||||
Calls the standard formatter, but will indent all of the log messages
|
||||
by our current indentation level.
|
||||
"""
|
||||
formatted = logging.Formatter.format(self, record)
|
||||
formatted = "".join([
|
||||
(" " * get_indentation()) + line
|
||||
for line in formatted.splitlines(True)
|
||||
])
|
||||
return formatted
|
||||
|
||||
|
||||
def _color_wrap(*colors):
|
||||
def wrapped(inp):
|
||||
return "".join(list(colors) + [inp, colorama.Style.RESET_ALL])
|
||||
return wrapped
|
||||
|
||||
|
||||
class ColorizedStreamHandler(logging.StreamHandler):
|
||||
|
||||
# Don't build up a list of colors if we don't have colorama
|
||||
if colorama:
|
||||
COLORS = [
|
||||
# This needs to be in order from highest logging level to lowest.
|
||||
(logging.ERROR, _color_wrap(colorama.Fore.RED)),
|
||||
(logging.WARNING, _color_wrap(colorama.Fore.YELLOW)),
|
||||
]
|
||||
else:
|
||||
COLORS = []
|
||||
|
||||
def __init__(self, stream=None):
|
||||
logging.StreamHandler.__init__(self, stream)
|
||||
|
||||
if WINDOWS and colorama:
|
||||
self.stream = colorama.AnsiToWin32(self.stream)
|
||||
|
||||
def should_color(self):
|
||||
# Don't colorize things if we do not have colorama
|
||||
if not colorama:
|
||||
return False
|
||||
|
||||
real_stream = (
|
||||
self.stream if not isinstance(self.stream, colorama.AnsiToWin32)
|
||||
else self.stream.wrapped
|
||||
)
|
||||
|
||||
# If the stream is a tty we should color it
|
||||
if hasattr(real_stream, "isatty") and real_stream.isatty():
|
||||
return True
|
||||
|
||||
# If we have an ASNI term we should color it
|
||||
if os.environ.get("TERM") == "ANSI":
|
||||
return True
|
||||
|
||||
# If anything else we should not color it
|
||||
return False
|
||||
|
||||
def format(self, record):
|
||||
msg = logging.StreamHandler.format(self, record)
|
||||
|
||||
if self.should_color():
|
||||
for level, color in self.COLORS:
|
||||
if record.levelno >= level:
|
||||
msg = color(msg)
|
||||
break
|
||||
|
||||
return msg
|
||||
|
||||
|
||||
class BetterRotatingFileHandler(logging.handlers.RotatingFileHandler):
|
||||
|
||||
def _open(self):
|
||||
ensure_dir(os.path.dirname(self.baseFilename))
|
||||
return logging.handlers.RotatingFileHandler._open(self)
|
||||
|
||||
|
||||
class MaxLevelFilter(logging.Filter):
|
||||
|
||||
def __init__(self, level):
|
||||
self.level = level
|
||||
|
||||
def filter(self, record):
|
||||
return record.levelno < self.level
|
||||
162
Shared/lib/python3.4/site-packages/pip/utils/outdated.py
Normal file
162
Shared/lib/python3.4/site-packages/pip/utils/outdated.py
Normal file
|
|
@ -0,0 +1,162 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import datetime
|
||||
import json
|
||||
import logging
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
from pip._vendor import lockfile
|
||||
from pip._vendor.packaging import version as packaging_version
|
||||
|
||||
from pip.compat import total_seconds, WINDOWS
|
||||
from pip.models import PyPI
|
||||
from pip.locations import USER_CACHE_DIR, running_under_virtualenv
|
||||
from pip.utils import ensure_dir, get_installed_version
|
||||
from pip.utils.filesystem import check_path_owner
|
||||
|
||||
|
||||
SELFCHECK_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ"
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class VirtualenvSelfCheckState(object):
|
||||
def __init__(self):
|
||||
self.statefile_path = os.path.join(sys.prefix, "pip-selfcheck.json")
|
||||
|
||||
# Load the existing state
|
||||
try:
|
||||
with open(self.statefile_path) as statefile:
|
||||
self.state = json.load(statefile)
|
||||
except (IOError, ValueError):
|
||||
self.state = {}
|
||||
|
||||
def save(self, pypi_version, current_time):
|
||||
# Attempt to write out our version check file
|
||||
with open(self.statefile_path, "w") as statefile:
|
||||
json.dump(
|
||||
{
|
||||
"last_check": current_time.strftime(SELFCHECK_DATE_FMT),
|
||||
"pypi_version": pypi_version,
|
||||
},
|
||||
statefile,
|
||||
sort_keys=True,
|
||||
separators=(",", ":")
|
||||
)
|
||||
|
||||
|
||||
class GlobalSelfCheckState(object):
|
||||
def __init__(self):
|
||||
self.statefile_path = os.path.join(USER_CACHE_DIR, "selfcheck.json")
|
||||
|
||||
# Load the existing state
|
||||
try:
|
||||
with open(self.statefile_path) as statefile:
|
||||
self.state = json.load(statefile)[sys.prefix]
|
||||
except (IOError, ValueError, KeyError):
|
||||
self.state = {}
|
||||
|
||||
def save(self, pypi_version, current_time):
|
||||
# Check to make sure that we own the directory
|
||||
if not check_path_owner(os.path.dirname(self.statefile_path)):
|
||||
return
|
||||
|
||||
# Now that we've ensured the directory is owned by this user, we'll go
|
||||
# ahead and make sure that all our directories are created.
|
||||
ensure_dir(os.path.dirname(self.statefile_path))
|
||||
|
||||
# Attempt to write out our version check file
|
||||
with lockfile.LockFile(self.statefile_path):
|
||||
if os.path.exists(self.statefile_path):
|
||||
with open(self.statefile_path) as statefile:
|
||||
state = json.load(statefile)
|
||||
else:
|
||||
state = {}
|
||||
|
||||
state[sys.prefix] = {
|
||||
"last_check": current_time.strftime(SELFCHECK_DATE_FMT),
|
||||
"pypi_version": pypi_version,
|
||||
}
|
||||
|
||||
with open(self.statefile_path, "w") as statefile:
|
||||
json.dump(state, statefile, sort_keys=True,
|
||||
separators=(",", ":"))
|
||||
|
||||
|
||||
def load_selfcheck_statefile():
|
||||
if running_under_virtualenv():
|
||||
return VirtualenvSelfCheckState()
|
||||
else:
|
||||
return GlobalSelfCheckState()
|
||||
|
||||
|
||||
def pip_version_check(session):
|
||||
"""Check for an update for pip.
|
||||
|
||||
Limit the frequency of checks to once per week. State is stored either in
|
||||
the active virtualenv or in the user's USER_CACHE_DIR keyed off the prefix
|
||||
of the pip script path.
|
||||
"""
|
||||
installed_version = get_installed_version("pip")
|
||||
if installed_version is None:
|
||||
return
|
||||
|
||||
pip_version = packaging_version.parse(installed_version)
|
||||
pypi_version = None
|
||||
|
||||
try:
|
||||
state = load_selfcheck_statefile()
|
||||
|
||||
current_time = datetime.datetime.utcnow()
|
||||
# Determine if we need to refresh the state
|
||||
if "last_check" in state.state and "pypi_version" in state.state:
|
||||
last_check = datetime.datetime.strptime(
|
||||
state.state["last_check"],
|
||||
SELFCHECK_DATE_FMT
|
||||
)
|
||||
if total_seconds(current_time - last_check) < 7 * 24 * 60 * 60:
|
||||
pypi_version = state.state["pypi_version"]
|
||||
|
||||
# Refresh the version if we need to or just see if we need to warn
|
||||
if pypi_version is None:
|
||||
resp = session.get(
|
||||
PyPI.pip_json_url,
|
||||
headers={"Accept": "application/json"},
|
||||
)
|
||||
resp.raise_for_status()
|
||||
pypi_version = [
|
||||
v for v in sorted(
|
||||
list(resp.json()["releases"]),
|
||||
key=packaging_version.parse,
|
||||
)
|
||||
if not packaging_version.parse(v).is_prerelease
|
||||
][-1]
|
||||
|
||||
# save that we've performed a check
|
||||
state.save(pypi_version, current_time)
|
||||
|
||||
remote_version = packaging_version.parse(pypi_version)
|
||||
|
||||
# Determine if our pypi_version is older
|
||||
if (pip_version < remote_version and
|
||||
pip_version.base_version != remote_version.base_version):
|
||||
# Advise "python -m pip" on Windows to avoid issues
|
||||
# with overwriting pip.exe.
|
||||
if WINDOWS:
|
||||
pip_cmd = "python -m pip"
|
||||
else:
|
||||
pip_cmd = "pip"
|
||||
logger.warning(
|
||||
"You are using pip version %s, however version %s is "
|
||||
"available.\nYou should consider upgrading via the "
|
||||
"'%s install --upgrade pip' command.",
|
||||
pip_version, pypi_version, pip_cmd
|
||||
)
|
||||
|
||||
except Exception:
|
||||
logger.debug(
|
||||
"There was an error checking the latest version of pip",
|
||||
exc_info=True,
|
||||
)
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
# Shim to wrap setup.py invocation with setuptools
|
||||
SETUPTOOLS_SHIM = (
|
||||
"import setuptools, tokenize;__file__=%r;"
|
||||
"exec(compile(getattr(tokenize, 'open', open)(__file__).read()"
|
||||
".replace('\\r\\n', '\\n'), __file__, 'exec'))"
|
||||
)
|
||||
339
Shared/lib/python3.4/site-packages/pip/utils/ui.py
Normal file
339
Shared/lib/python3.4/site-packages/pip/utils/ui.py
Normal file
|
|
@ -0,0 +1,339 @@
|
|||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
|
||||
import itertools
|
||||
import sys
|
||||
from signal import signal, SIGINT, default_int_handler
|
||||
import time
|
||||
import contextlib
|
||||
import logging
|
||||
|
||||
from pip.compat import WINDOWS
|
||||
from pip.utils import format_size
|
||||
from pip.utils.logging import get_indentation
|
||||
from pip._vendor import six
|
||||
from pip._vendor.progress.bar import Bar, IncrementalBar
|
||||
from pip._vendor.progress.helpers import (WritelnMixin,
|
||||
HIDE_CURSOR, SHOW_CURSOR)
|
||||
from pip._vendor.progress.spinner import Spinner
|
||||
|
||||
try:
|
||||
from pip._vendor import colorama
|
||||
# Lots of different errors can come from this, including SystemError and
|
||||
# ImportError.
|
||||
except Exception:
|
||||
colorama = None
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _select_progress_class(preferred, fallback):
|
||||
encoding = getattr(preferred.file, "encoding", None)
|
||||
|
||||
# If we don't know what encoding this file is in, then we'll just assume
|
||||
# that it doesn't support unicode and use the ASCII bar.
|
||||
if not encoding:
|
||||
return fallback
|
||||
|
||||
# Collect all of the possible characters we want to use with the preferred
|
||||
# bar.
|
||||
characters = [
|
||||
getattr(preferred, "empty_fill", six.text_type()),
|
||||
getattr(preferred, "fill", six.text_type()),
|
||||
]
|
||||
characters += list(getattr(preferred, "phases", []))
|
||||
|
||||
# Try to decode the characters we're using for the bar using the encoding
|
||||
# of the given file, if this works then we'll assume that we can use the
|
||||
# fancier bar and if not we'll fall back to the plaintext bar.
|
||||
try:
|
||||
six.text_type().join(characters).encode(encoding)
|
||||
except UnicodeEncodeError:
|
||||
return fallback
|
||||
else:
|
||||
return preferred
|
||||
|
||||
|
||||
_BaseBar = _select_progress_class(IncrementalBar, Bar)
|
||||
|
||||
|
||||
class InterruptibleMixin(object):
|
||||
"""
|
||||
Helper to ensure that self.finish() gets called on keyboard interrupt.
|
||||
|
||||
This allows downloads to be interrupted without leaving temporary state
|
||||
(like hidden cursors) behind.
|
||||
|
||||
This class is similar to the progress library's existing SigIntMixin
|
||||
helper, but as of version 1.2, that helper has the following problems:
|
||||
|
||||
1. It calls sys.exit().
|
||||
2. It discards the existing SIGINT handler completely.
|
||||
3. It leaves its own handler in place even after an uninterrupted finish,
|
||||
which will have unexpected delayed effects if the user triggers an
|
||||
unrelated keyboard interrupt some time after a progress-displaying
|
||||
download has already completed, for example.
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""
|
||||
Save the original SIGINT handler for later.
|
||||
"""
|
||||
super(InterruptibleMixin, self).__init__(*args, **kwargs)
|
||||
|
||||
self.original_handler = signal(SIGINT, self.handle_sigint)
|
||||
|
||||
# If signal() returns None, the previous handler was not installed from
|
||||
# Python, and we cannot restore it. This probably should not happen,
|
||||
# but if it does, we must restore something sensible instead, at least.
|
||||
# The least bad option should be Python's default SIGINT handler, which
|
||||
# just raises KeyboardInterrupt.
|
||||
if self.original_handler is None:
|
||||
self.original_handler = default_int_handler
|
||||
|
||||
def finish(self):
|
||||
"""
|
||||
Restore the original SIGINT handler after finishing.
|
||||
|
||||
This should happen regardless of whether the progress display finishes
|
||||
normally, or gets interrupted.
|
||||
"""
|
||||
super(InterruptibleMixin, self).finish()
|
||||
signal(SIGINT, self.original_handler)
|
||||
|
||||
def handle_sigint(self, signum, frame):
|
||||
"""
|
||||
Call self.finish() before delegating to the original SIGINT handler.
|
||||
|
||||
This handler should only be in place while the progress display is
|
||||
active.
|
||||
"""
|
||||
self.finish()
|
||||
self.original_handler(signum, frame)
|
||||
|
||||
|
||||
class DownloadProgressMixin(object):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(DownloadProgressMixin, self).__init__(*args, **kwargs)
|
||||
self.message = (" " * (get_indentation() + 2)) + self.message
|
||||
|
||||
@property
|
||||
def downloaded(self):
|
||||
return format_size(self.index)
|
||||
|
||||
@property
|
||||
def download_speed(self):
|
||||
# Avoid zero division errors...
|
||||
if self.avg == 0.0:
|
||||
return "..."
|
||||
return format_size(1 / self.avg) + "/s"
|
||||
|
||||
@property
|
||||
def pretty_eta(self):
|
||||
if self.eta:
|
||||
return "eta %s" % self.eta_td
|
||||
return ""
|
||||
|
||||
def iter(self, it, n=1):
|
||||
for x in it:
|
||||
yield x
|
||||
self.next(n)
|
||||
self.finish()
|
||||
|
||||
|
||||
class WindowsMixin(object):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
# The Windows terminal does not support the hide/show cursor ANSI codes
|
||||
# even with colorama. So we'll ensure that hide_cursor is False on
|
||||
# Windows.
|
||||
# This call neds to go before the super() call, so that hide_cursor
|
||||
# is set in time. The base progress bar class writes the "hide cursor"
|
||||
# code to the terminal in its init, so if we don't set this soon
|
||||
# enough, we get a "hide" with no corresponding "show"...
|
||||
if WINDOWS and self.hide_cursor:
|
||||
self.hide_cursor = False
|
||||
|
||||
super(WindowsMixin, self).__init__(*args, **kwargs)
|
||||
|
||||
# Check if we are running on Windows and we have the colorama module,
|
||||
# if we do then wrap our file with it.
|
||||
if WINDOWS and colorama:
|
||||
self.file = colorama.AnsiToWin32(self.file)
|
||||
# The progress code expects to be able to call self.file.isatty()
|
||||
# but the colorama.AnsiToWin32() object doesn't have that, so we'll
|
||||
# add it.
|
||||
self.file.isatty = lambda: self.file.wrapped.isatty()
|
||||
# The progress code expects to be able to call self.file.flush()
|
||||
# but the colorama.AnsiToWin32() object doesn't have that, so we'll
|
||||
# add it.
|
||||
self.file.flush = lambda: self.file.wrapped.flush()
|
||||
|
||||
|
||||
class DownloadProgressBar(WindowsMixin, InterruptibleMixin,
|
||||
DownloadProgressMixin, _BaseBar):
|
||||
|
||||
file = sys.stdout
|
||||
message = "%(percent)d%%"
|
||||
suffix = "%(downloaded)s %(download_speed)s %(pretty_eta)s"
|
||||
|
||||
|
||||
class DownloadProgressSpinner(WindowsMixin, InterruptibleMixin,
|
||||
DownloadProgressMixin, WritelnMixin, Spinner):
|
||||
|
||||
file = sys.stdout
|
||||
suffix = "%(downloaded)s %(download_speed)s"
|
||||
|
||||
def next_phase(self):
|
||||
if not hasattr(self, "_phaser"):
|
||||
self._phaser = itertools.cycle(self.phases)
|
||||
return next(self._phaser)
|
||||
|
||||
def update(self):
|
||||
message = self.message % self
|
||||
phase = self.next_phase()
|
||||
suffix = self.suffix % self
|
||||
line = ''.join([
|
||||
message,
|
||||
" " if message else "",
|
||||
phase,
|
||||
" " if suffix else "",
|
||||
suffix,
|
||||
])
|
||||
|
||||
self.writeln(line)
|
||||
|
||||
|
||||
################################################################
|
||||
# Generic "something is happening" spinners
|
||||
#
|
||||
# We don't even try using progress.spinner.Spinner here because it's actually
|
||||
# simpler to reimplement from scratch than to coerce their code into doing
|
||||
# what we need.
|
||||
################################################################
|
||||
|
||||
@contextlib.contextmanager
|
||||
def hidden_cursor(file):
|
||||
# The Windows terminal does not support the hide/show cursor ANSI codes,
|
||||
# even via colorama. So don't even try.
|
||||
if WINDOWS:
|
||||
yield
|
||||
else:
|
||||
file.write(HIDE_CURSOR)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
file.write(SHOW_CURSOR)
|
||||
|
||||
|
||||
class RateLimiter(object):
|
||||
def __init__(self, min_update_interval_seconds):
|
||||
self._min_update_interval_seconds = min_update_interval_seconds
|
||||
self._last_update = 0
|
||||
|
||||
def ready(self):
|
||||
now = time.time()
|
||||
delta = now - self._last_update
|
||||
return delta >= self._min_update_interval_seconds
|
||||
|
||||
def reset(self):
|
||||
self._last_update = time.time()
|
||||
|
||||
|
||||
class InteractiveSpinner(object):
|
||||
def __init__(self, message, file=None, spin_chars="-\\|/",
|
||||
# Empirically, 8 updates/second looks nice
|
||||
min_update_interval_seconds=0.125):
|
||||
self._message = message
|
||||
if file is None:
|
||||
file = sys.stdout
|
||||
self._file = file
|
||||
self._rate_limiter = RateLimiter(min_update_interval_seconds)
|
||||
self._finished = False
|
||||
|
||||
self._spin_cycle = itertools.cycle(spin_chars)
|
||||
|
||||
self._file.write(" " * get_indentation() + self._message + " ... ")
|
||||
self._width = 0
|
||||
|
||||
def _write(self, status):
|
||||
assert not self._finished
|
||||
# Erase what we wrote before by backspacing to the beginning, writing
|
||||
# spaces to overwrite the old text, and then backspacing again
|
||||
backup = "\b" * self._width
|
||||
self._file.write(backup + " " * self._width + backup)
|
||||
# Now we have a blank slate to add our status
|
||||
self._file.write(status)
|
||||
self._width = len(status)
|
||||
self._file.flush()
|
||||
self._rate_limiter.reset()
|
||||
|
||||
def spin(self):
|
||||
if self._finished:
|
||||
return
|
||||
if not self._rate_limiter.ready():
|
||||
return
|
||||
self._write(next(self._spin_cycle))
|
||||
|
||||
def finish(self, final_status):
|
||||
if self._finished:
|
||||
return
|
||||
self._write(final_status)
|
||||
self._file.write("\n")
|
||||
self._file.flush()
|
||||
self._finished = True
|
||||
|
||||
|
||||
# Used for dumb terminals, non-interactive installs (no tty), etc.
|
||||
# We still print updates occasionally (once every 60 seconds by default) to
|
||||
# act as a keep-alive for systems like Travis-CI that take lack-of-output as
|
||||
# an indication that a task has frozen.
|
||||
class NonInteractiveSpinner(object):
|
||||
def __init__(self, message, min_update_interval_seconds=60):
|
||||
self._message = message
|
||||
self._finished = False
|
||||
self._rate_limiter = RateLimiter(min_update_interval_seconds)
|
||||
self._update("started")
|
||||
|
||||
def _update(self, status):
|
||||
assert not self._finished
|
||||
self._rate_limiter.reset()
|
||||
logger.info("%s: %s", self._message, status)
|
||||
|
||||
def spin(self):
|
||||
if self._finished:
|
||||
return
|
||||
if not self._rate_limiter.ready():
|
||||
return
|
||||
self._update("still running...")
|
||||
|
||||
def finish(self, final_status):
|
||||
if self._finished:
|
||||
return
|
||||
self._update("finished with status '%s'" % (final_status,))
|
||||
self._finished = True
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def open_spinner(message):
|
||||
# Interactive spinner goes directly to sys.stdout rather than being routed
|
||||
# through the logging system, but it acts like it has level INFO,
|
||||
# i.e. it's only displayed if we're at level INFO or better.
|
||||
# Non-interactive spinner goes through the logging system, so it is always
|
||||
# in sync with logging configuration.
|
||||
if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO:
|
||||
spinner = InteractiveSpinner(message)
|
||||
else:
|
||||
spinner = NonInteractiveSpinner(message)
|
||||
try:
|
||||
with hidden_cursor(sys.stdout):
|
||||
yield spinner
|
||||
except KeyboardInterrupt:
|
||||
spinner.finish("canceled")
|
||||
raise
|
||||
except Exception:
|
||||
spinner.finish("error")
|
||||
raise
|
||||
else:
|
||||
spinner.finish("done")
|
||||
363
Shared/lib/python3.4/site-packages/pip/vcs/__init__.py
Normal file
363
Shared/lib/python3.4/site-packages/pip/vcs/__init__.py
Normal file
|
|
@ -0,0 +1,363 @@
|
|||
"""Handles all VCS (version control) support"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
import errno
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from pip._vendor.six.moves.urllib import parse as urllib_parse
|
||||
|
||||
from pip.exceptions import BadCommand
|
||||
from pip.utils import (display_path, backup_dir, call_subprocess,
|
||||
rmtree, ask_path_exists)
|
||||
|
||||
|
||||
__all__ = ['vcs', 'get_src_requirement']
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class VcsSupport(object):
|
||||
_registry = {}
|
||||
schemes = ['ssh', 'git', 'hg', 'bzr', 'sftp', 'svn']
|
||||
|
||||
def __init__(self):
|
||||
# Register more schemes with urlparse for various version control
|
||||
# systems
|
||||
urllib_parse.uses_netloc.extend(self.schemes)
|
||||
# Python >= 2.7.4, 3.3 doesn't have uses_fragment
|
||||
if getattr(urllib_parse, 'uses_fragment', None):
|
||||
urllib_parse.uses_fragment.extend(self.schemes)
|
||||
super(VcsSupport, self).__init__()
|
||||
|
||||
def __iter__(self):
|
||||
return self._registry.__iter__()
|
||||
|
||||
@property
|
||||
def backends(self):
|
||||
return list(self._registry.values())
|
||||
|
||||
@property
|
||||
def dirnames(self):
|
||||
return [backend.dirname for backend in self.backends]
|
||||
|
||||
@property
|
||||
def all_schemes(self):
|
||||
schemes = []
|
||||
for backend in self.backends:
|
||||
schemes.extend(backend.schemes)
|
||||
return schemes
|
||||
|
||||
def register(self, cls):
|
||||
if not hasattr(cls, 'name'):
|
||||
logger.warning('Cannot register VCS %s', cls.__name__)
|
||||
return
|
||||
if cls.name not in self._registry:
|
||||
self._registry[cls.name] = cls
|
||||
logger.debug('Registered VCS backend: %s', cls.name)
|
||||
|
||||
def unregister(self, cls=None, name=None):
|
||||
if name in self._registry:
|
||||
del self._registry[name]
|
||||
elif cls in self._registry.values():
|
||||
del self._registry[cls.name]
|
||||
else:
|
||||
logger.warning('Cannot unregister because no class or name given')
|
||||
|
||||
def get_backend_name(self, location):
|
||||
"""
|
||||
Return the name of the version control backend if found at given
|
||||
location, e.g. vcs.get_backend_name('/path/to/vcs/checkout')
|
||||
"""
|
||||
for vc_type in self._registry.values():
|
||||
if vc_type.controls_location(location):
|
||||
logger.debug('Determine that %s uses VCS: %s',
|
||||
location, vc_type.name)
|
||||
return vc_type.name
|
||||
return None
|
||||
|
||||
def get_backend(self, name):
|
||||
name = name.lower()
|
||||
if name in self._registry:
|
||||
return self._registry[name]
|
||||
|
||||
def get_backend_from_location(self, location):
|
||||
vc_type = self.get_backend_name(location)
|
||||
if vc_type:
|
||||
return self.get_backend(vc_type)
|
||||
return None
|
||||
|
||||
|
||||
vcs = VcsSupport()
|
||||
|
||||
|
||||
class VersionControl(object):
|
||||
name = ''
|
||||
dirname = ''
|
||||
# List of supported schemes for this Version Control
|
||||
schemes = ()
|
||||
|
||||
def __init__(self, url=None, *args, **kwargs):
|
||||
self.url = url
|
||||
super(VersionControl, self).__init__(*args, **kwargs)
|
||||
|
||||
def _is_local_repository(self, repo):
|
||||
"""
|
||||
posix absolute paths start with os.path.sep,
|
||||
win32 ones ones start with drive (like c:\\folder)
|
||||
"""
|
||||
drive, tail = os.path.splitdrive(repo)
|
||||
return repo.startswith(os.path.sep) or drive
|
||||
|
||||
# See issue #1083 for why this method was introduced:
|
||||
# https://github.com/pypa/pip/issues/1083
|
||||
def translate_egg_surname(self, surname):
|
||||
# For example, Django has branches of the form "stable/1.7.x".
|
||||
return surname.replace('/', '_')
|
||||
|
||||
def export(self, location):
|
||||
"""
|
||||
Export the repository at the url to the destination location
|
||||
i.e. only download the files, without vcs informations
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def get_url_rev(self):
|
||||
"""
|
||||
Returns the correct repository URL and revision by parsing the given
|
||||
repository URL
|
||||
"""
|
||||
error_message = (
|
||||
"Sorry, '%s' is a malformed VCS url. "
|
||||
"The format is <vcs>+<protocol>://<url>, "
|
||||
"e.g. svn+http://myrepo/svn/MyApp#egg=MyApp"
|
||||
)
|
||||
assert '+' in self.url, error_message % self.url
|
||||
url = self.url.split('+', 1)[1]
|
||||
scheme, netloc, path, query, frag = urllib_parse.urlsplit(url)
|
||||
rev = None
|
||||
if '@' in path:
|
||||
path, rev = path.rsplit('@', 1)
|
||||
url = urllib_parse.urlunsplit((scheme, netloc, path, query, ''))
|
||||
return url, rev
|
||||
|
||||
def get_info(self, location):
|
||||
"""
|
||||
Returns (url, revision), where both are strings
|
||||
"""
|
||||
assert not location.rstrip('/').endswith(self.dirname), \
|
||||
'Bad directory: %s' % location
|
||||
return self.get_url(location), self.get_revision(location)
|
||||
|
||||
def normalize_url(self, url):
|
||||
"""
|
||||
Normalize a URL for comparison by unquoting it and removing any
|
||||
trailing slash.
|
||||
"""
|
||||
return urllib_parse.unquote(url).rstrip('/')
|
||||
|
||||
def compare_urls(self, url1, url2):
|
||||
"""
|
||||
Compare two repo URLs for identity, ignoring incidental differences.
|
||||
"""
|
||||
return (self.normalize_url(url1) == self.normalize_url(url2))
|
||||
|
||||
def obtain(self, dest):
|
||||
"""
|
||||
Called when installing or updating an editable package, takes the
|
||||
source path of the checkout.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def switch(self, dest, url, rev_options):
|
||||
"""
|
||||
Switch the repo at ``dest`` to point to ``URL``.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def update(self, dest, rev_options):
|
||||
"""
|
||||
Update an already-existing repo to the given ``rev_options``.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def check_version(self, dest, rev_options):
|
||||
"""
|
||||
Return True if the version is identical to what exists and
|
||||
doesn't need to be updated.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def check_destination(self, dest, url, rev_options, rev_display):
|
||||
"""
|
||||
Prepare a location to receive a checkout/clone.
|
||||
|
||||
Return True if the location is ready for (and requires) a
|
||||
checkout/clone, False otherwise.
|
||||
"""
|
||||
checkout = True
|
||||
prompt = False
|
||||
if os.path.exists(dest):
|
||||
checkout = False
|
||||
if os.path.exists(os.path.join(dest, self.dirname)):
|
||||
existing_url = self.get_url(dest)
|
||||
if self.compare_urls(existing_url, url):
|
||||
logger.debug(
|
||||
'%s in %s exists, and has correct URL (%s)',
|
||||
self.repo_name.title(),
|
||||
display_path(dest),
|
||||
url,
|
||||
)
|
||||
if not self.check_version(dest, rev_options):
|
||||
logger.info(
|
||||
'Updating %s %s%s',
|
||||
display_path(dest),
|
||||
self.repo_name,
|
||||
rev_display,
|
||||
)
|
||||
self.update(dest, rev_options)
|
||||
else:
|
||||
logger.info(
|
||||
'Skipping because already up-to-date.')
|
||||
else:
|
||||
logger.warning(
|
||||
'%s %s in %s exists with URL %s',
|
||||
self.name,
|
||||
self.repo_name,
|
||||
display_path(dest),
|
||||
existing_url,
|
||||
)
|
||||
prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ',
|
||||
('s', 'i', 'w', 'b'))
|
||||
else:
|
||||
logger.warning(
|
||||
'Directory %s already exists, and is not a %s %s.',
|
||||
dest,
|
||||
self.name,
|
||||
self.repo_name,
|
||||
)
|
||||
prompt = ('(i)gnore, (w)ipe, (b)ackup ', ('i', 'w', 'b'))
|
||||
if prompt:
|
||||
logger.warning(
|
||||
'The plan is to install the %s repository %s',
|
||||
self.name,
|
||||
url,
|
||||
)
|
||||
response = ask_path_exists('What to do? %s' % prompt[0],
|
||||
prompt[1])
|
||||
|
||||
if response == 's':
|
||||
logger.info(
|
||||
'Switching %s %s to %s%s',
|
||||
self.repo_name,
|
||||
display_path(dest),
|
||||
url,
|
||||
rev_display,
|
||||
)
|
||||
self.switch(dest, url, rev_options)
|
||||
elif response == 'i':
|
||||
# do nothing
|
||||
pass
|
||||
elif response == 'w':
|
||||
logger.warning('Deleting %s', display_path(dest))
|
||||
rmtree(dest)
|
||||
checkout = True
|
||||
elif response == 'b':
|
||||
dest_dir = backup_dir(dest)
|
||||
logger.warning(
|
||||
'Backing up %s to %s', display_path(dest), dest_dir,
|
||||
)
|
||||
shutil.move(dest, dest_dir)
|
||||
checkout = True
|
||||
return checkout
|
||||
|
||||
def unpack(self, location):
|
||||
"""
|
||||
Clean up current location and download the url repository
|
||||
(and vcs infos) into location
|
||||
"""
|
||||
if os.path.exists(location):
|
||||
rmtree(location)
|
||||
self.obtain(location)
|
||||
|
||||
def get_src_requirement(self, dist, location):
|
||||
"""
|
||||
Return a string representing the requirement needed to
|
||||
redownload the files currently present in location, something
|
||||
like:
|
||||
{repository_url}@{revision}#egg={project_name}-{version_identifier}
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def get_url(self, location):
|
||||
"""
|
||||
Return the url used at location
|
||||
Used in get_info or check_destination
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def get_revision(self, location):
|
||||
"""
|
||||
Return the current revision of the files at location
|
||||
Used in get_info
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def run_command(self, cmd, show_stdout=True, cwd=None,
|
||||
on_returncode='raise',
|
||||
command_level=logging.DEBUG, command_desc=None,
|
||||
extra_environ=None, spinner=None):
|
||||
"""
|
||||
Run a VCS subcommand
|
||||
This is simply a wrapper around call_subprocess that adds the VCS
|
||||
command name, and checks that the VCS is available
|
||||
"""
|
||||
cmd = [self.name] + cmd
|
||||
try:
|
||||
return call_subprocess(cmd, show_stdout, cwd,
|
||||
on_returncode, command_level,
|
||||
command_desc, extra_environ,
|
||||
spinner)
|
||||
except OSError as e:
|
||||
# errno.ENOENT = no such file or directory
|
||||
# In other words, the VCS executable isn't available
|
||||
if e.errno == errno.ENOENT:
|
||||
raise BadCommand('Cannot find command %r' % self.name)
|
||||
else:
|
||||
raise # re-raise exception if a different error occurred
|
||||
|
||||
@classmethod
|
||||
def controls_location(cls, location):
|
||||
"""
|
||||
Check if a location is controlled by the vcs.
|
||||
It is meant to be overridden to implement smarter detection
|
||||
mechanisms for specific vcs.
|
||||
"""
|
||||
logger.debug('Checking in %s for %s (%s)...',
|
||||
location, cls.dirname, cls.name)
|
||||
path = os.path.join(location, cls.dirname)
|
||||
return os.path.exists(path)
|
||||
|
||||
|
||||
def get_src_requirement(dist, location):
|
||||
version_control = vcs.get_backend_from_location(location)
|
||||
if version_control:
|
||||
try:
|
||||
return version_control().get_src_requirement(dist,
|
||||
location)
|
||||
except BadCommand:
|
||||
logger.warning(
|
||||
'cannot determine version of editable source in %s '
|
||||
'(%s command not found in path)',
|
||||
location,
|
||||
version_control.name,
|
||||
)
|
||||
return dist.as_requirement()
|
||||
logger.warning(
|
||||
'cannot determine version of editable source in %s (is not SVN '
|
||||
'checkout, Git clone, Mercurial clone or Bazaar branch)',
|
||||
location,
|
||||
)
|
||||
return dist.as_requirement()
|
||||
116
Shared/lib/python3.4/site-packages/pip/vcs/bazaar.py
Normal file
116
Shared/lib/python3.4/site-packages/pip/vcs/bazaar.py
Normal file
|
|
@ -0,0 +1,116 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import logging
|
||||
import os
|
||||
import tempfile
|
||||
|
||||
# TODO: Get this into six.moves.urllib.parse
|
||||
try:
|
||||
from urllib import parse as urllib_parse
|
||||
except ImportError:
|
||||
import urlparse as urllib_parse
|
||||
|
||||
from pip.utils import rmtree, display_path
|
||||
from pip.vcs import vcs, VersionControl
|
||||
from pip.download import path_to_url
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Bazaar(VersionControl):
|
||||
name = 'bzr'
|
||||
dirname = '.bzr'
|
||||
repo_name = 'branch'
|
||||
schemes = (
|
||||
'bzr', 'bzr+http', 'bzr+https', 'bzr+ssh', 'bzr+sftp', 'bzr+ftp',
|
||||
'bzr+lp',
|
||||
)
|
||||
|
||||
def __init__(self, url=None, *args, **kwargs):
|
||||
super(Bazaar, self).__init__(url, *args, **kwargs)
|
||||
# Python >= 2.7.4, 3.3 doesn't have uses_fragment or non_hierarchical
|
||||
# Register lp but do not expose as a scheme to support bzr+lp.
|
||||
if getattr(urllib_parse, 'uses_fragment', None):
|
||||
urllib_parse.uses_fragment.extend(['lp'])
|
||||
urllib_parse.non_hierarchical.extend(['lp'])
|
||||
|
||||
def export(self, location):
|
||||
"""
|
||||
Export the Bazaar repository at the url to the destination location
|
||||
"""
|
||||
temp_dir = tempfile.mkdtemp('-export', 'pip-')
|
||||
self.unpack(temp_dir)
|
||||
if os.path.exists(location):
|
||||
# Remove the location to make sure Bazaar can export it correctly
|
||||
rmtree(location)
|
||||
try:
|
||||
self.run_command(['export', location], cwd=temp_dir,
|
||||
show_stdout=False)
|
||||
finally:
|
||||
rmtree(temp_dir)
|
||||
|
||||
def switch(self, dest, url, rev_options):
|
||||
self.run_command(['switch', url], cwd=dest)
|
||||
|
||||
def update(self, dest, rev_options):
|
||||
self.run_command(['pull', '-q'] + rev_options, cwd=dest)
|
||||
|
||||
def obtain(self, dest):
|
||||
url, rev = self.get_url_rev()
|
||||
if rev:
|
||||
rev_options = ['-r', rev]
|
||||
rev_display = ' (to revision %s)' % rev
|
||||
else:
|
||||
rev_options = []
|
||||
rev_display = ''
|
||||
if self.check_destination(dest, url, rev_options, rev_display):
|
||||
logger.info(
|
||||
'Checking out %s%s to %s',
|
||||
url,
|
||||
rev_display,
|
||||
display_path(dest),
|
||||
)
|
||||
self.run_command(['branch', '-q'] + rev_options + [url, dest])
|
||||
|
||||
def get_url_rev(self):
|
||||
# hotfix the URL scheme after removing bzr+ from bzr+ssh:// readd it
|
||||
url, rev = super(Bazaar, self).get_url_rev()
|
||||
if url.startswith('ssh://'):
|
||||
url = 'bzr+' + url
|
||||
return url, rev
|
||||
|
||||
def get_url(self, location):
|
||||
urls = self.run_command(['info'], show_stdout=False, cwd=location)
|
||||
for line in urls.splitlines():
|
||||
line = line.strip()
|
||||
for x in ('checkout of branch: ',
|
||||
'parent branch: '):
|
||||
if line.startswith(x):
|
||||
repo = line.split(x)[1]
|
||||
if self._is_local_repository(repo):
|
||||
return path_to_url(repo)
|
||||
return repo
|
||||
return None
|
||||
|
||||
def get_revision(self, location):
|
||||
revision = self.run_command(
|
||||
['revno'], show_stdout=False, cwd=location)
|
||||
return revision.splitlines()[-1]
|
||||
|
||||
def get_src_requirement(self, dist, location):
|
||||
repo = self.get_url(location)
|
||||
if not repo:
|
||||
return None
|
||||
if not repo.lower().startswith('bzr:'):
|
||||
repo = 'bzr+' + repo
|
||||
egg_project_name = dist.egg_name().split('-', 1)[0]
|
||||
current_rev = self.get_revision(location)
|
||||
return '%s@%s#egg=%s' % (repo, current_rev, egg_project_name)
|
||||
|
||||
def check_version(self, dest, rev_options):
|
||||
"""Always assume the versions don't match"""
|
||||
return False
|
||||
|
||||
|
||||
vcs.register(Bazaar)
|
||||
274
Shared/lib/python3.4/site-packages/pip/vcs/git.py
Normal file
274
Shared/lib/python3.4/site-packages/pip/vcs/git.py
Normal file
|
|
@ -0,0 +1,274 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import logging
|
||||
import tempfile
|
||||
import os.path
|
||||
|
||||
from pip.compat import samefile
|
||||
from pip.exceptions import BadCommand
|
||||
from pip._vendor.six.moves.urllib import parse as urllib_parse
|
||||
from pip._vendor.six.moves.urllib import request as urllib_request
|
||||
|
||||
from pip.utils import display_path, rmtree
|
||||
from pip.vcs import vcs, VersionControl
|
||||
|
||||
|
||||
urlsplit = urllib_parse.urlsplit
|
||||
urlunsplit = urllib_parse.urlunsplit
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Git(VersionControl):
|
||||
name = 'git'
|
||||
dirname = '.git'
|
||||
repo_name = 'clone'
|
||||
schemes = (
|
||||
'git', 'git+http', 'git+https', 'git+ssh', 'git+git', 'git+file',
|
||||
)
|
||||
|
||||
def __init__(self, url=None, *args, **kwargs):
|
||||
|
||||
# Works around an apparent Git bug
|
||||
# (see http://article.gmane.org/gmane.comp.version-control.git/146500)
|
||||
if url:
|
||||
scheme, netloc, path, query, fragment = urlsplit(url)
|
||||
if scheme.endswith('file'):
|
||||
initial_slashes = path[:-len(path.lstrip('/'))]
|
||||
newpath = (
|
||||
initial_slashes +
|
||||
urllib_request.url2pathname(path)
|
||||
.replace('\\', '/').lstrip('/')
|
||||
)
|
||||
url = urlunsplit((scheme, netloc, newpath, query, fragment))
|
||||
after_plus = scheme.find('+') + 1
|
||||
url = scheme[:after_plus] + urlunsplit(
|
||||
(scheme[after_plus:], netloc, newpath, query, fragment),
|
||||
)
|
||||
|
||||
super(Git, self).__init__(url, *args, **kwargs)
|
||||
|
||||
def export(self, location):
|
||||
"""Export the Git repository at the url to the destination location"""
|
||||
temp_dir = tempfile.mkdtemp('-export', 'pip-')
|
||||
self.unpack(temp_dir)
|
||||
try:
|
||||
if not location.endswith('/'):
|
||||
location = location + '/'
|
||||
self.run_command(
|
||||
['checkout-index', '-a', '-f', '--prefix', location],
|
||||
show_stdout=False, cwd=temp_dir)
|
||||
finally:
|
||||
rmtree(temp_dir)
|
||||
|
||||
def check_rev_options(self, rev, dest, rev_options):
|
||||
"""Check the revision options before checkout to compensate that tags
|
||||
and branches may need origin/ as a prefix.
|
||||
Returns the SHA1 of the branch or tag if found.
|
||||
"""
|
||||
revisions = self.get_short_refs(dest)
|
||||
|
||||
origin_rev = 'origin/%s' % rev
|
||||
if origin_rev in revisions:
|
||||
# remote branch
|
||||
return [revisions[origin_rev]]
|
||||
elif rev in revisions:
|
||||
# a local tag or branch name
|
||||
return [revisions[rev]]
|
||||
else:
|
||||
logger.warning(
|
||||
"Could not find a tag or branch '%s', assuming commit.", rev,
|
||||
)
|
||||
return rev_options
|
||||
|
||||
def check_version(self, dest, rev_options):
|
||||
"""
|
||||
Compare the current sha to the ref. ref may be a branch or tag name,
|
||||
but current rev will always point to a sha. This means that a branch
|
||||
or tag will never compare as True. So this ultimately only matches
|
||||
against exact shas.
|
||||
"""
|
||||
return self.get_revision(dest).startswith(rev_options[0])
|
||||
|
||||
def switch(self, dest, url, rev_options):
|
||||
self.run_command(['config', 'remote.origin.url', url], cwd=dest)
|
||||
self.run_command(['checkout', '-q'] + rev_options, cwd=dest)
|
||||
|
||||
self.update_submodules(dest)
|
||||
|
||||
def update(self, dest, rev_options):
|
||||
# First fetch changes from the default remote
|
||||
self.run_command(['fetch', '-q'], cwd=dest)
|
||||
# Then reset to wanted revision (maybe even origin/master)
|
||||
if rev_options:
|
||||
rev_options = self.check_rev_options(
|
||||
rev_options[0], dest, rev_options,
|
||||
)
|
||||
self.run_command(['reset', '--hard', '-q'] + rev_options, cwd=dest)
|
||||
#: update submodules
|
||||
self.update_submodules(dest)
|
||||
|
||||
def obtain(self, dest):
|
||||
url, rev = self.get_url_rev()
|
||||
if rev:
|
||||
rev_options = [rev]
|
||||
rev_display = ' (to %s)' % rev
|
||||
else:
|
||||
rev_options = ['origin/master']
|
||||
rev_display = ''
|
||||
if self.check_destination(dest, url, rev_options, rev_display):
|
||||
logger.info(
|
||||
'Cloning %s%s to %s', url, rev_display, display_path(dest),
|
||||
)
|
||||
self.run_command(['clone', '-q', url, dest])
|
||||
|
||||
if rev:
|
||||
rev_options = self.check_rev_options(rev, dest, rev_options)
|
||||
# Only do a checkout if rev_options differs from HEAD
|
||||
if not self.check_version(dest, rev_options):
|
||||
self.run_command(
|
||||
['checkout', '-q'] + rev_options,
|
||||
cwd=dest,
|
||||
)
|
||||
#: repo may contain submodules
|
||||
self.update_submodules(dest)
|
||||
|
||||
def get_url(self, location):
|
||||
url = self.run_command(
|
||||
['config', 'remote.origin.url'],
|
||||
show_stdout=False, cwd=location)
|
||||
return url.strip()
|
||||
|
||||
def get_revision(self, location):
|
||||
current_rev = self.run_command(
|
||||
['rev-parse', 'HEAD'], show_stdout=False, cwd=location)
|
||||
return current_rev.strip()
|
||||
|
||||
def get_full_refs(self, location):
|
||||
"""Yields tuples of (commit, ref) for branches and tags"""
|
||||
output = self.run_command(['show-ref'],
|
||||
show_stdout=False, cwd=location)
|
||||
for line in output.strip().splitlines():
|
||||
commit, ref = line.split(' ', 1)
|
||||
yield commit.strip(), ref.strip()
|
||||
|
||||
def is_ref_remote(self, ref):
|
||||
return ref.startswith('refs/remotes/')
|
||||
|
||||
def is_ref_branch(self, ref):
|
||||
return ref.startswith('refs/heads/')
|
||||
|
||||
def is_ref_tag(self, ref):
|
||||
return ref.startswith('refs/tags/')
|
||||
|
||||
def is_ref_commit(self, ref):
|
||||
"""A ref is a commit sha if it is not anything else"""
|
||||
return not any((
|
||||
self.is_ref_remote(ref),
|
||||
self.is_ref_branch(ref),
|
||||
self.is_ref_tag(ref),
|
||||
))
|
||||
|
||||
# Should deprecate `get_refs` since it's ambiguous
|
||||
def get_refs(self, location):
|
||||
return self.get_short_refs(location)
|
||||
|
||||
def get_short_refs(self, location):
|
||||
"""Return map of named refs (branches or tags) to commit hashes."""
|
||||
rv = {}
|
||||
for commit, ref in self.get_full_refs(location):
|
||||
ref_name = None
|
||||
if self.is_ref_remote(ref):
|
||||
ref_name = ref[len('refs/remotes/'):]
|
||||
elif self.is_ref_branch(ref):
|
||||
ref_name = ref[len('refs/heads/'):]
|
||||
elif self.is_ref_tag(ref):
|
||||
ref_name = ref[len('refs/tags/'):]
|
||||
if ref_name is not None:
|
||||
rv[ref_name] = commit
|
||||
return rv
|
||||
|
||||
def _get_subdirectory(self, location):
|
||||
"""Return the relative path of setup.py to the git repo root."""
|
||||
# find the repo root
|
||||
git_dir = self.run_command(['rev-parse', '--git-dir'],
|
||||
show_stdout=False, cwd=location).strip()
|
||||
if not os.path.isabs(git_dir):
|
||||
git_dir = os.path.join(location, git_dir)
|
||||
root_dir = os.path.join(git_dir, '..')
|
||||
# find setup.py
|
||||
orig_location = location
|
||||
while not os.path.exists(os.path.join(location, 'setup.py')):
|
||||
last_location = location
|
||||
location = os.path.dirname(location)
|
||||
if location == last_location:
|
||||
# We've traversed up to the root of the filesystem without
|
||||
# finding setup.py
|
||||
logger.warning(
|
||||
"Could not find setup.py for directory %s (tried all "
|
||||
"parent directories)",
|
||||
orig_location,
|
||||
)
|
||||
return None
|
||||
# relative path of setup.py to repo root
|
||||
if samefile(root_dir, location):
|
||||
return None
|
||||
return os.path.relpath(location, root_dir)
|
||||
|
||||
def get_src_requirement(self, dist, location):
|
||||
repo = self.get_url(location)
|
||||
if not repo.lower().startswith('git:'):
|
||||
repo = 'git+' + repo
|
||||
egg_project_name = dist.egg_name().split('-', 1)[0]
|
||||
if not repo:
|
||||
return None
|
||||
current_rev = self.get_revision(location)
|
||||
req = '%s@%s#egg=%s' % (repo, current_rev, egg_project_name)
|
||||
subdirectory = self._get_subdirectory(location)
|
||||
if subdirectory:
|
||||
req += '&subdirectory=' + subdirectory
|
||||
return req
|
||||
|
||||
def get_url_rev(self):
|
||||
"""
|
||||
Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'.
|
||||
That's required because although they use SSH they sometimes doesn't
|
||||
work with a ssh:// scheme (e.g. Github). But we need a scheme for
|
||||
parsing. Hence we remove it again afterwards and return it as a stub.
|
||||
"""
|
||||
if '://' not in self.url:
|
||||
assert 'file:' not in self.url
|
||||
self.url = self.url.replace('git+', 'git+ssh://')
|
||||
url, rev = super(Git, self).get_url_rev()
|
||||
url = url.replace('ssh://', '')
|
||||
else:
|
||||
url, rev = super(Git, self).get_url_rev()
|
||||
|
||||
return url, rev
|
||||
|
||||
def update_submodules(self, location):
|
||||
if not os.path.exists(os.path.join(location, '.gitmodules')):
|
||||
return
|
||||
self.run_command(
|
||||
['submodule', 'update', '--init', '--recursive', '-q'],
|
||||
cwd=location,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def controls_location(cls, location):
|
||||
if super(Git, cls).controls_location(location):
|
||||
return True
|
||||
try:
|
||||
r = cls().run_command(['rev-parse'],
|
||||
cwd=location,
|
||||
show_stdout=False,
|
||||
on_returncode='ignore')
|
||||
return not r
|
||||
except BadCommand:
|
||||
logger.debug("could not determine if %s is under git control "
|
||||
"because git is not available", location)
|
||||
return False
|
||||
|
||||
|
||||
vcs.register(Git)
|
||||
103
Shared/lib/python3.4/site-packages/pip/vcs/mercurial.py
Normal file
103
Shared/lib/python3.4/site-packages/pip/vcs/mercurial.py
Normal file
|
|
@ -0,0 +1,103 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import logging
|
||||
import os
|
||||
import tempfile
|
||||
|
||||
from pip.utils import display_path, rmtree
|
||||
from pip.vcs import vcs, VersionControl
|
||||
from pip.download import path_to_url
|
||||
from pip._vendor.six.moves import configparser
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Mercurial(VersionControl):
|
||||
name = 'hg'
|
||||
dirname = '.hg'
|
||||
repo_name = 'clone'
|
||||
schemes = ('hg', 'hg+http', 'hg+https', 'hg+ssh', 'hg+static-http')
|
||||
|
||||
def export(self, location):
|
||||
"""Export the Hg repository at the url to the destination location"""
|
||||
temp_dir = tempfile.mkdtemp('-export', 'pip-')
|
||||
self.unpack(temp_dir)
|
||||
try:
|
||||
self.run_command(
|
||||
['archive', location], show_stdout=False, cwd=temp_dir)
|
||||
finally:
|
||||
rmtree(temp_dir)
|
||||
|
||||
def switch(self, dest, url, rev_options):
|
||||
repo_config = os.path.join(dest, self.dirname, 'hgrc')
|
||||
config = configparser.SafeConfigParser()
|
||||
try:
|
||||
config.read(repo_config)
|
||||
config.set('paths', 'default', url)
|
||||
with open(repo_config, 'w') as config_file:
|
||||
config.write(config_file)
|
||||
except (OSError, configparser.NoSectionError) as exc:
|
||||
logger.warning(
|
||||
'Could not switch Mercurial repository to %s: %s', url, exc,
|
||||
)
|
||||
else:
|
||||
self.run_command(['update', '-q'] + rev_options, cwd=dest)
|
||||
|
||||
def update(self, dest, rev_options):
|
||||
self.run_command(['pull', '-q'], cwd=dest)
|
||||
self.run_command(['update', '-q'] + rev_options, cwd=dest)
|
||||
|
||||
def obtain(self, dest):
|
||||
url, rev = self.get_url_rev()
|
||||
if rev:
|
||||
rev_options = [rev]
|
||||
rev_display = ' (to revision %s)' % rev
|
||||
else:
|
||||
rev_options = []
|
||||
rev_display = ''
|
||||
if self.check_destination(dest, url, rev_options, rev_display):
|
||||
logger.info(
|
||||
'Cloning hg %s%s to %s',
|
||||
url,
|
||||
rev_display,
|
||||
display_path(dest),
|
||||
)
|
||||
self.run_command(['clone', '--noupdate', '-q', url, dest])
|
||||
self.run_command(['update', '-q'] + rev_options, cwd=dest)
|
||||
|
||||
def get_url(self, location):
|
||||
url = self.run_command(
|
||||
['showconfig', 'paths.default'],
|
||||
show_stdout=False, cwd=location).strip()
|
||||
if self._is_local_repository(url):
|
||||
url = path_to_url(url)
|
||||
return url.strip()
|
||||
|
||||
def get_revision(self, location):
|
||||
current_revision = self.run_command(
|
||||
['parents', '--template={rev}'],
|
||||
show_stdout=False, cwd=location).strip()
|
||||
return current_revision
|
||||
|
||||
def get_revision_hash(self, location):
|
||||
current_rev_hash = self.run_command(
|
||||
['parents', '--template={node}'],
|
||||
show_stdout=False, cwd=location).strip()
|
||||
return current_rev_hash
|
||||
|
||||
def get_src_requirement(self, dist, location):
|
||||
repo = self.get_url(location)
|
||||
if not repo.lower().startswith('hg:'):
|
||||
repo = 'hg+' + repo
|
||||
egg_project_name = dist.egg_name().split('-', 1)[0]
|
||||
if not repo:
|
||||
return None
|
||||
current_rev_hash = self.get_revision_hash(location)
|
||||
return '%s@%s#egg=%s' % (repo, current_rev_hash, egg_project_name)
|
||||
|
||||
def check_version(self, dest, rev_options):
|
||||
"""Always assume the versions don't match"""
|
||||
return False
|
||||
|
||||
vcs.register(Mercurial)
|
||||
249
Shared/lib/python3.4/site-packages/pip/vcs/subversion.py
Normal file
249
Shared/lib/python3.4/site-packages/pip/vcs/subversion.py
Normal file
|
|
@ -0,0 +1,249 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
|
||||
from pip._vendor.six.moves.urllib import parse as urllib_parse
|
||||
|
||||
from pip.index import Link
|
||||
from pip.utils import rmtree, display_path
|
||||
from pip.utils.logging import indent_log
|
||||
from pip.vcs import vcs, VersionControl
|
||||
|
||||
_svn_xml_url_re = re.compile('url="([^"]+)"')
|
||||
_svn_rev_re = re.compile('committed-rev="(\d+)"')
|
||||
_svn_url_re = re.compile(r'URL: (.+)')
|
||||
_svn_revision_re = re.compile(r'Revision: (.+)')
|
||||
_svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"')
|
||||
_svn_info_xml_url_re = re.compile(r'<url>(.*)</url>')
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Subversion(VersionControl):
|
||||
name = 'svn'
|
||||
dirname = '.svn'
|
||||
repo_name = 'checkout'
|
||||
schemes = ('svn', 'svn+ssh', 'svn+http', 'svn+https', 'svn+svn')
|
||||
|
||||
def get_info(self, location):
|
||||
"""Returns (url, revision), where both are strings"""
|
||||
assert not location.rstrip('/').endswith(self.dirname), \
|
||||
'Bad directory: %s' % location
|
||||
output = self.run_command(
|
||||
['info', location],
|
||||
show_stdout=False,
|
||||
extra_environ={'LANG': 'C'},
|
||||
)
|
||||
match = _svn_url_re.search(output)
|
||||
if not match:
|
||||
logger.warning(
|
||||
'Cannot determine URL of svn checkout %s',
|
||||
display_path(location),
|
||||
)
|
||||
logger.debug('Output that cannot be parsed: \n%s', output)
|
||||
return None, None
|
||||
url = match.group(1).strip()
|
||||
match = _svn_revision_re.search(output)
|
||||
if not match:
|
||||
logger.warning(
|
||||
'Cannot determine revision of svn checkout %s',
|
||||
display_path(location),
|
||||
)
|
||||
logger.debug('Output that cannot be parsed: \n%s', output)
|
||||
return url, None
|
||||
return url, match.group(1)
|
||||
|
||||
def export(self, location):
|
||||
"""Export the svn repository at the url to the destination location"""
|
||||
url, rev = self.get_url_rev()
|
||||
rev_options = get_rev_options(url, rev)
|
||||
logger.info('Exporting svn repository %s to %s', url, location)
|
||||
with indent_log():
|
||||
if os.path.exists(location):
|
||||
# Subversion doesn't like to check out over an existing
|
||||
# directory --force fixes this, but was only added in svn 1.5
|
||||
rmtree(location)
|
||||
self.run_command(
|
||||
['export'] + rev_options + [url, location],
|
||||
show_stdout=False)
|
||||
|
||||
def switch(self, dest, url, rev_options):
|
||||
self.run_command(['switch'] + rev_options + [url, dest])
|
||||
|
||||
def update(self, dest, rev_options):
|
||||
self.run_command(['update'] + rev_options + [dest])
|
||||
|
||||
def obtain(self, dest):
|
||||
url, rev = self.get_url_rev()
|
||||
rev_options = get_rev_options(url, rev)
|
||||
if rev:
|
||||
rev_display = ' (to revision %s)' % rev
|
||||
else:
|
||||
rev_display = ''
|
||||
if self.check_destination(dest, url, rev_options, rev_display):
|
||||
logger.info(
|
||||
'Checking out %s%s to %s',
|
||||
url,
|
||||
rev_display,
|
||||
display_path(dest),
|
||||
)
|
||||
self.run_command(['checkout', '-q'] + rev_options + [url, dest])
|
||||
|
||||
def get_location(self, dist, dependency_links):
|
||||
for url in dependency_links:
|
||||
egg_fragment = Link(url).egg_fragment
|
||||
if not egg_fragment:
|
||||
continue
|
||||
if '-' in egg_fragment:
|
||||
# FIXME: will this work when a package has - in the name?
|
||||
key = '-'.join(egg_fragment.split('-')[:-1]).lower()
|
||||
else:
|
||||
key = egg_fragment
|
||||
if key == dist.key:
|
||||
return url.split('#', 1)[0]
|
||||
return None
|
||||
|
||||
def get_revision(self, location):
|
||||
"""
|
||||
Return the maximum revision for all files under a given location
|
||||
"""
|
||||
# Note: taken from setuptools.command.egg_info
|
||||
revision = 0
|
||||
|
||||
for base, dirs, files in os.walk(location):
|
||||
if self.dirname not in dirs:
|
||||
dirs[:] = []
|
||||
continue # no sense walking uncontrolled subdirs
|
||||
dirs.remove(self.dirname)
|
||||
entries_fn = os.path.join(base, self.dirname, 'entries')
|
||||
if not os.path.exists(entries_fn):
|
||||
# FIXME: should we warn?
|
||||
continue
|
||||
|
||||
dirurl, localrev = self._get_svn_url_rev(base)
|
||||
|
||||
if base == location:
|
||||
base_url = dirurl + '/' # save the root url
|
||||
elif not dirurl or not dirurl.startswith(base_url):
|
||||
dirs[:] = []
|
||||
continue # not part of the same svn tree, skip it
|
||||
revision = max(revision, localrev)
|
||||
return revision
|
||||
|
||||
def get_url_rev(self):
|
||||
# hotfix the URL scheme after removing svn+ from svn+ssh:// readd it
|
||||
url, rev = super(Subversion, self).get_url_rev()
|
||||
if url.startswith('ssh://'):
|
||||
url = 'svn+' + url
|
||||
return url, rev
|
||||
|
||||
def get_url(self, location):
|
||||
# In cases where the source is in a subdirectory, not alongside
|
||||
# setup.py we have to look up in the location until we find a real
|
||||
# setup.py
|
||||
orig_location = location
|
||||
while not os.path.exists(os.path.join(location, 'setup.py')):
|
||||
last_location = location
|
||||
location = os.path.dirname(location)
|
||||
if location == last_location:
|
||||
# We've traversed up to the root of the filesystem without
|
||||
# finding setup.py
|
||||
logger.warning(
|
||||
"Could not find setup.py for directory %s (tried all "
|
||||
"parent directories)",
|
||||
orig_location,
|
||||
)
|
||||
return None
|
||||
|
||||
return self._get_svn_url_rev(location)[0]
|
||||
|
||||
def _get_svn_url_rev(self, location):
|
||||
from pip.exceptions import InstallationError
|
||||
|
||||
entries_path = os.path.join(location, self.dirname, 'entries')
|
||||
if os.path.exists(entries_path):
|
||||
with open(entries_path) as f:
|
||||
data = f.read()
|
||||
else: # subversion >= 1.7 does not have the 'entries' file
|
||||
data = ''
|
||||
|
||||
if (data.startswith('8') or
|
||||
data.startswith('9') or
|
||||
data.startswith('10')):
|
||||
data = list(map(str.splitlines, data.split('\n\x0c\n')))
|
||||
del data[0][0] # get rid of the '8'
|
||||
url = data[0][3]
|
||||
revs = [int(d[9]) for d in data if len(d) > 9 and d[9]] + [0]
|
||||
elif data.startswith('<?xml'):
|
||||
match = _svn_xml_url_re.search(data)
|
||||
if not match:
|
||||
raise ValueError('Badly formatted data: %r' % data)
|
||||
url = match.group(1) # get repository URL
|
||||
revs = [int(m.group(1)) for m in _svn_rev_re.finditer(data)] + [0]
|
||||
else:
|
||||
try:
|
||||
# subversion >= 1.7
|
||||
xml = self.run_command(
|
||||
['info', '--xml', location],
|
||||
show_stdout=False,
|
||||
)
|
||||
url = _svn_info_xml_url_re.search(xml).group(1)
|
||||
revs = [
|
||||
int(m.group(1)) for m in _svn_info_xml_rev_re.finditer(xml)
|
||||
]
|
||||
except InstallationError:
|
||||
url, revs = None, []
|
||||
|
||||
if revs:
|
||||
rev = max(revs)
|
||||
else:
|
||||
rev = 0
|
||||
|
||||
return url, rev
|
||||
|
||||
def get_src_requirement(self, dist, location):
|
||||
repo = self.get_url(location)
|
||||
if repo is None:
|
||||
return None
|
||||
# FIXME: why not project name?
|
||||
egg_project_name = dist.egg_name().split('-', 1)[0]
|
||||
rev = self.get_revision(location)
|
||||
return 'svn+%s@%s#egg=%s' % (repo, rev, egg_project_name)
|
||||
|
||||
def check_version(self, dest, rev_options):
|
||||
"""Always assume the versions don't match"""
|
||||
return False
|
||||
|
||||
|
||||
def get_rev_options(url, rev):
|
||||
if rev:
|
||||
rev_options = ['-r', rev]
|
||||
else:
|
||||
rev_options = []
|
||||
|
||||
r = urllib_parse.urlsplit(url)
|
||||
if hasattr(r, 'username'):
|
||||
# >= Python-2.5
|
||||
username, password = r.username, r.password
|
||||
else:
|
||||
netloc = r[1]
|
||||
if '@' in netloc:
|
||||
auth = netloc.split('@')[0]
|
||||
if ':' in auth:
|
||||
username, password = auth.split(':', 1)
|
||||
else:
|
||||
username, password = auth, None
|
||||
else:
|
||||
username, password = None, None
|
||||
|
||||
if username:
|
||||
rev_options += ['--username', username]
|
||||
if password:
|
||||
rev_options += ['--password', password]
|
||||
return rev_options
|
||||
|
||||
|
||||
vcs.register(Subversion)
|
||||
854
Shared/lib/python3.4/site-packages/pip/wheel.py
Normal file
854
Shared/lib/python3.4/site-packages/pip/wheel.py
Normal file
|
|
@ -0,0 +1,854 @@
|
|||
"""
|
||||
Support for installing and building the "wheel" binary package format.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
import compileall
|
||||
import csv
|
||||
import errno
|
||||
import functools
|
||||
import hashlib
|
||||
import logging
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
import shutil
|
||||
import stat
|
||||
import sys
|
||||
import tempfile
|
||||
import warnings
|
||||
|
||||
from base64 import urlsafe_b64encode
|
||||
from email.parser import Parser
|
||||
|
||||
from pip._vendor.six import StringIO
|
||||
|
||||
import pip
|
||||
from pip.compat import expanduser
|
||||
from pip.download import path_to_url, unpack_url
|
||||
from pip.exceptions import (
|
||||
InstallationError, InvalidWheelFilename, UnsupportedWheel)
|
||||
from pip.locations import distutils_scheme, PIP_DELETE_MARKER_FILENAME
|
||||
from pip import pep425tags
|
||||
from pip.utils import (
|
||||
call_subprocess, ensure_dir, captured_stdout, rmtree, canonicalize_name,
|
||||
read_chunks)
|
||||
from pip.utils.ui import open_spinner
|
||||
from pip.utils.logging import indent_log
|
||||
from pip.utils.setuptools_build import SETUPTOOLS_SHIM
|
||||
from pip._vendor.distlib.scripts import ScriptMaker
|
||||
from pip._vendor import pkg_resources
|
||||
from pip._vendor.six.moves import configparser
|
||||
|
||||
|
||||
wheel_ext = '.whl'
|
||||
|
||||
VERSION_COMPATIBLE = (1, 0)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class WheelCache(object):
|
||||
"""A cache of wheels for future installs."""
|
||||
|
||||
def __init__(self, cache_dir, format_control):
|
||||
"""Create a wheel cache.
|
||||
|
||||
:param cache_dir: The root of the cache.
|
||||
:param format_control: A pip.index.FormatControl object to limit
|
||||
binaries being read from the cache.
|
||||
"""
|
||||
self._cache_dir = expanduser(cache_dir) if cache_dir else None
|
||||
self._format_control = format_control
|
||||
|
||||
def cached_wheel(self, link, package_name):
|
||||
return cached_wheel(
|
||||
self._cache_dir, link, self._format_control, package_name)
|
||||
|
||||
|
||||
def _cache_for_link(cache_dir, link):
|
||||
"""
|
||||
Return a directory to store cached wheels in for link.
|
||||
|
||||
Because there are M wheels for any one sdist, we provide a directory
|
||||
to cache them in, and then consult that directory when looking up
|
||||
cache hits.
|
||||
|
||||
We only insert things into the cache if they have plausible version
|
||||
numbers, so that we don't contaminate the cache with things that were not
|
||||
unique. E.g. ./package might have dozens of installs done for it and build
|
||||
a version of 0.0...and if we built and cached a wheel, we'd end up using
|
||||
the same wheel even if the source has been edited.
|
||||
|
||||
:param cache_dir: The cache_dir being used by pip.
|
||||
:param link: The link of the sdist for which this will cache wheels.
|
||||
"""
|
||||
|
||||
# We want to generate an url to use as our cache key, we don't want to just
|
||||
# re-use the URL because it might have other items in the fragment and we
|
||||
# don't care about those.
|
||||
key_parts = [link.url_without_fragment]
|
||||
if link.hash_name is not None and link.hash is not None:
|
||||
key_parts.append("=".join([link.hash_name, link.hash]))
|
||||
key_url = "#".join(key_parts)
|
||||
|
||||
# Encode our key url with sha224, we'll use this because it has similar
|
||||
# security properties to sha256, but with a shorter total output (and thus
|
||||
# less secure). However the differences don't make a lot of difference for
|
||||
# our use case here.
|
||||
hashed = hashlib.sha224(key_url.encode()).hexdigest()
|
||||
|
||||
# We want to nest the directories some to prevent having a ton of top level
|
||||
# directories where we might run out of sub directories on some FS.
|
||||
parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]]
|
||||
|
||||
# Inside of the base location for cached wheels, expand our parts and join
|
||||
# them all together.
|
||||
return os.path.join(cache_dir, "wheels", *parts)
|
||||
|
||||
|
||||
def cached_wheel(cache_dir, link, format_control, package_name):
|
||||
if not cache_dir:
|
||||
return link
|
||||
if not link:
|
||||
return link
|
||||
if link.is_wheel:
|
||||
return link
|
||||
if not link.is_artifact:
|
||||
return link
|
||||
if not package_name:
|
||||
return link
|
||||
canonical_name = canonicalize_name(package_name)
|
||||
formats = pip.index.fmt_ctl_formats(format_control, canonical_name)
|
||||
if "binary" not in formats:
|
||||
return link
|
||||
root = _cache_for_link(cache_dir, link)
|
||||
try:
|
||||
wheel_names = os.listdir(root)
|
||||
except OSError as e:
|
||||
if e.errno in (errno.ENOENT, errno.ENOTDIR):
|
||||
return link
|
||||
raise
|
||||
candidates = []
|
||||
for wheel_name in wheel_names:
|
||||
try:
|
||||
wheel = Wheel(wheel_name)
|
||||
except InvalidWheelFilename:
|
||||
continue
|
||||
if not wheel.supported():
|
||||
# Built for a different python/arch/etc
|
||||
continue
|
||||
candidates.append((wheel.support_index_min(), wheel_name))
|
||||
if not candidates:
|
||||
return link
|
||||
candidates.sort()
|
||||
path = os.path.join(root, candidates[0][1])
|
||||
return pip.index.Link(path_to_url(path))
|
||||
|
||||
|
||||
def rehash(path, algo='sha256', blocksize=1 << 20):
|
||||
"""Return (hash, length) for path using hashlib.new(algo)"""
|
||||
h = hashlib.new(algo)
|
||||
length = 0
|
||||
with open(path, 'rb') as f:
|
||||
for block in read_chunks(f, size=blocksize):
|
||||
length += len(block)
|
||||
h.update(block)
|
||||
digest = 'sha256=' + urlsafe_b64encode(
|
||||
h.digest()
|
||||
).decode('latin1').rstrip('=')
|
||||
return (digest, length)
|
||||
|
||||
|
||||
def open_for_csv(name, mode):
|
||||
if sys.version_info[0] < 3:
|
||||
nl = {}
|
||||
bin = 'b'
|
||||
else:
|
||||
nl = {'newline': ''}
|
||||
bin = ''
|
||||
return open(name, mode + bin, **nl)
|
||||
|
||||
|
||||
def fix_script(path):
|
||||
"""Replace #!python with #!/path/to/python
|
||||
Return True if file was changed."""
|
||||
# XXX RECORD hashes will need to be updated
|
||||
if os.path.isfile(path):
|
||||
with open(path, 'rb') as script:
|
||||
firstline = script.readline()
|
||||
if not firstline.startswith(b'#!python'):
|
||||
return False
|
||||
exename = sys.executable.encode(sys.getfilesystemencoding())
|
||||
firstline = b'#!' + exename + os.linesep.encode("ascii")
|
||||
rest = script.read()
|
||||
with open(path, 'wb') as script:
|
||||
script.write(firstline)
|
||||
script.write(rest)
|
||||
return True
|
||||
|
||||
dist_info_re = re.compile(r"""^(?P<namever>(?P<name>.+?)(-(?P<ver>\d.+?))?)
|
||||
\.dist-info$""", re.VERBOSE)
|
||||
|
||||
|
||||
def root_is_purelib(name, wheeldir):
|
||||
"""
|
||||
Return True if the extracted wheel in wheeldir should go into purelib.
|
||||
"""
|
||||
name_folded = name.replace("-", "_")
|
||||
for item in os.listdir(wheeldir):
|
||||
match = dist_info_re.match(item)
|
||||
if match and match.group('name') == name_folded:
|
||||
with open(os.path.join(wheeldir, item, 'WHEEL')) as wheel:
|
||||
for line in wheel:
|
||||
line = line.lower().rstrip()
|
||||
if line == "root-is-purelib: true":
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def get_entrypoints(filename):
|
||||
if not os.path.exists(filename):
|
||||
return {}, {}
|
||||
|
||||
# This is done because you can pass a string to entry_points wrappers which
|
||||
# means that they may or may not be valid INI files. The attempt here is to
|
||||
# strip leading and trailing whitespace in order to make them valid INI
|
||||
# files.
|
||||
with open(filename) as fp:
|
||||
data = StringIO()
|
||||
for line in fp:
|
||||
data.write(line.strip())
|
||||
data.write("\n")
|
||||
data.seek(0)
|
||||
|
||||
cp = configparser.RawConfigParser()
|
||||
cp.optionxform = lambda option: option
|
||||
cp.readfp(data)
|
||||
|
||||
console = {}
|
||||
gui = {}
|
||||
if cp.has_section('console_scripts'):
|
||||
console = dict(cp.items('console_scripts'))
|
||||
if cp.has_section('gui_scripts'):
|
||||
gui = dict(cp.items('gui_scripts'))
|
||||
return console, gui
|
||||
|
||||
|
||||
def move_wheel_files(name, req, wheeldir, user=False, home=None, root=None,
|
||||
pycompile=True, scheme=None, isolated=False, prefix=None):
|
||||
"""Install a wheel"""
|
||||
|
||||
if not scheme:
|
||||
scheme = distutils_scheme(
|
||||
name, user=user, home=home, root=root, isolated=isolated,
|
||||
prefix=prefix,
|
||||
)
|
||||
|
||||
if root_is_purelib(name, wheeldir):
|
||||
lib_dir = scheme['purelib']
|
||||
else:
|
||||
lib_dir = scheme['platlib']
|
||||
|
||||
info_dir = []
|
||||
data_dirs = []
|
||||
source = wheeldir.rstrip(os.path.sep) + os.path.sep
|
||||
|
||||
# Record details of the files moved
|
||||
# installed = files copied from the wheel to the destination
|
||||
# changed = files changed while installing (scripts #! line typically)
|
||||
# generated = files newly generated during the install (script wrappers)
|
||||
installed = {}
|
||||
changed = set()
|
||||
generated = []
|
||||
|
||||
# Compile all of the pyc files that we're going to be installing
|
||||
if pycompile:
|
||||
with captured_stdout() as stdout:
|
||||
with warnings.catch_warnings():
|
||||
warnings.filterwarnings('ignore')
|
||||
compileall.compile_dir(source, force=True, quiet=True)
|
||||
logger.debug(stdout.getvalue())
|
||||
|
||||
def normpath(src, p):
|
||||
return os.path.relpath(src, p).replace(os.path.sep, '/')
|
||||
|
||||
def record_installed(srcfile, destfile, modified=False):
|
||||
"""Map archive RECORD paths to installation RECORD paths."""
|
||||
oldpath = normpath(srcfile, wheeldir)
|
||||
newpath = normpath(destfile, lib_dir)
|
||||
installed[oldpath] = newpath
|
||||
if modified:
|
||||
changed.add(destfile)
|
||||
|
||||
def clobber(source, dest, is_base, fixer=None, filter=None):
|
||||
ensure_dir(dest) # common for the 'include' path
|
||||
|
||||
for dir, subdirs, files in os.walk(source):
|
||||
basedir = dir[len(source):].lstrip(os.path.sep)
|
||||
destdir = os.path.join(dest, basedir)
|
||||
if is_base and basedir.split(os.path.sep, 1)[0].endswith('.data'):
|
||||
continue
|
||||
for s in subdirs:
|
||||
destsubdir = os.path.join(dest, basedir, s)
|
||||
if is_base and basedir == '' and destsubdir.endswith('.data'):
|
||||
data_dirs.append(s)
|
||||
continue
|
||||
elif (is_base and
|
||||
s.endswith('.dist-info') and
|
||||
# is self.req.project_name case preserving?
|
||||
s.lower().startswith(
|
||||
req.project_name.replace('-', '_').lower())):
|
||||
assert not info_dir, 'Multiple .dist-info directories'
|
||||
info_dir.append(destsubdir)
|
||||
for f in files:
|
||||
# Skip unwanted files
|
||||
if filter and filter(f):
|
||||
continue
|
||||
srcfile = os.path.join(dir, f)
|
||||
destfile = os.path.join(dest, basedir, f)
|
||||
# directory creation is lazy and after the file filtering above
|
||||
# to ensure we don't install empty dirs; empty dirs can't be
|
||||
# uninstalled.
|
||||
ensure_dir(destdir)
|
||||
|
||||
# We use copyfile (not move, copy, or copy2) to be extra sure
|
||||
# that we are not moving directories over (copyfile fails for
|
||||
# directories) as well as to ensure that we are not copying
|
||||
# over any metadata because we want more control over what
|
||||
# metadata we actually copy over.
|
||||
shutil.copyfile(srcfile, destfile)
|
||||
|
||||
# Copy over the metadata for the file, currently this only
|
||||
# includes the atime and mtime.
|
||||
st = os.stat(srcfile)
|
||||
if hasattr(os, "utime"):
|
||||
os.utime(destfile, (st.st_atime, st.st_mtime))
|
||||
|
||||
# If our file is executable, then make our destination file
|
||||
# executable.
|
||||
if os.access(srcfile, os.X_OK):
|
||||
st = os.stat(srcfile)
|
||||
permissions = (
|
||||
st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
|
||||
)
|
||||
os.chmod(destfile, permissions)
|
||||
|
||||
changed = False
|
||||
if fixer:
|
||||
changed = fixer(destfile)
|
||||
record_installed(srcfile, destfile, changed)
|
||||
|
||||
clobber(source, lib_dir, True)
|
||||
|
||||
assert info_dir, "%s .dist-info directory not found" % req
|
||||
|
||||
# Get the defined entry points
|
||||
ep_file = os.path.join(info_dir[0], 'entry_points.txt')
|
||||
console, gui = get_entrypoints(ep_file)
|
||||
|
||||
def is_entrypoint_wrapper(name):
|
||||
# EP, EP.exe and EP-script.py are scripts generated for
|
||||
# entry point EP by setuptools
|
||||
if name.lower().endswith('.exe'):
|
||||
matchname = name[:-4]
|
||||
elif name.lower().endswith('-script.py'):
|
||||
matchname = name[:-10]
|
||||
elif name.lower().endswith(".pya"):
|
||||
matchname = name[:-4]
|
||||
else:
|
||||
matchname = name
|
||||
# Ignore setuptools-generated scripts
|
||||
return (matchname in console or matchname in gui)
|
||||
|
||||
for datadir in data_dirs:
|
||||
fixer = None
|
||||
filter = None
|
||||
for subdir in os.listdir(os.path.join(wheeldir, datadir)):
|
||||
fixer = None
|
||||
if subdir == 'scripts':
|
||||
fixer = fix_script
|
||||
filter = is_entrypoint_wrapper
|
||||
source = os.path.join(wheeldir, datadir, subdir)
|
||||
dest = scheme[subdir]
|
||||
clobber(source, dest, False, fixer=fixer, filter=filter)
|
||||
|
||||
maker = ScriptMaker(None, scheme['scripts'])
|
||||
|
||||
# Ensure old scripts are overwritten.
|
||||
# See https://github.com/pypa/pip/issues/1800
|
||||
maker.clobber = True
|
||||
|
||||
# Ensure we don't generate any variants for scripts because this is almost
|
||||
# never what somebody wants.
|
||||
# See https://bitbucket.org/pypa/distlib/issue/35/
|
||||
maker.variants = set(('', ))
|
||||
|
||||
# This is required because otherwise distlib creates scripts that are not
|
||||
# executable.
|
||||
# See https://bitbucket.org/pypa/distlib/issue/32/
|
||||
maker.set_mode = True
|
||||
|
||||
# Simplify the script and fix the fact that the default script swallows
|
||||
# every single stack trace.
|
||||
# See https://bitbucket.org/pypa/distlib/issue/34/
|
||||
# See https://bitbucket.org/pypa/distlib/issue/33/
|
||||
def _get_script_text(entry):
|
||||
if entry.suffix is None:
|
||||
raise InstallationError(
|
||||
"Invalid script entry point: %s for req: %s - A callable "
|
||||
"suffix is required. Cf https://packaging.python.org/en/"
|
||||
"latest/distributing.html#console-scripts for more "
|
||||
"information." % (entry, req)
|
||||
)
|
||||
return maker.script_template % {
|
||||
"module": entry.prefix,
|
||||
"import_name": entry.suffix.split(".")[0],
|
||||
"func": entry.suffix,
|
||||
}
|
||||
|
||||
maker._get_script_text = _get_script_text
|
||||
maker.script_template = """# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
|
||||
from %(module)s import %(import_name)s
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(%(func)s())
|
||||
"""
|
||||
|
||||
# Special case pip and setuptools to generate versioned wrappers
|
||||
#
|
||||
# The issue is that some projects (specifically, pip and setuptools) use
|
||||
# code in setup.py to create "versioned" entry points - pip2.7 on Python
|
||||
# 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into
|
||||
# the wheel metadata at build time, and so if the wheel is installed with
|
||||
# a *different* version of Python the entry points will be wrong. The
|
||||
# correct fix for this is to enhance the metadata to be able to describe
|
||||
# such versioned entry points, but that won't happen till Metadata 2.0 is
|
||||
# available.
|
||||
# In the meantime, projects using versioned entry points will either have
|
||||
# incorrect versioned entry points, or they will not be able to distribute
|
||||
# "universal" wheels (i.e., they will need a wheel per Python version).
|
||||
#
|
||||
# Because setuptools and pip are bundled with _ensurepip and virtualenv,
|
||||
# we need to use universal wheels. So, as a stopgap until Metadata 2.0, we
|
||||
# override the versioned entry points in the wheel and generate the
|
||||
# correct ones. This code is purely a short-term measure until Metadat 2.0
|
||||
# is available.
|
||||
#
|
||||
# To add the level of hack in this section of code, in order to support
|
||||
# ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment
|
||||
# variable which will control which version scripts get installed.
|
||||
#
|
||||
# ENSUREPIP_OPTIONS=altinstall
|
||||
# - Only pipX.Y and easy_install-X.Y will be generated and installed
|
||||
# ENSUREPIP_OPTIONS=install
|
||||
# - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note
|
||||
# that this option is technically if ENSUREPIP_OPTIONS is set and is
|
||||
# not altinstall
|
||||
# DEFAULT
|
||||
# - The default behavior is to install pip, pipX, pipX.Y, easy_install
|
||||
# and easy_install-X.Y.
|
||||
pip_script = console.pop('pip', None)
|
||||
if pip_script:
|
||||
if "ENSUREPIP_OPTIONS" not in os.environ:
|
||||
spec = 'pip = ' + pip_script
|
||||
generated.extend(maker.make(spec))
|
||||
|
||||
if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall":
|
||||
spec = 'pip%s = %s' % (sys.version[:1], pip_script)
|
||||
generated.extend(maker.make(spec))
|
||||
|
||||
spec = 'pip%s = %s' % (sys.version[:3], pip_script)
|
||||
generated.extend(maker.make(spec))
|
||||
# Delete any other versioned pip entry points
|
||||
pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)]
|
||||
for k in pip_ep:
|
||||
del console[k]
|
||||
easy_install_script = console.pop('easy_install', None)
|
||||
if easy_install_script:
|
||||
if "ENSUREPIP_OPTIONS" not in os.environ:
|
||||
spec = 'easy_install = ' + easy_install_script
|
||||
generated.extend(maker.make(spec))
|
||||
|
||||
spec = 'easy_install-%s = %s' % (sys.version[:3], easy_install_script)
|
||||
generated.extend(maker.make(spec))
|
||||
# Delete any other versioned easy_install entry points
|
||||
easy_install_ep = [
|
||||
k for k in console if re.match(r'easy_install(-\d\.\d)?$', k)
|
||||
]
|
||||
for k in easy_install_ep:
|
||||
del console[k]
|
||||
|
||||
# Generate the console and GUI entry points specified in the wheel
|
||||
if len(console) > 0:
|
||||
generated.extend(
|
||||
maker.make_multiple(['%s = %s' % kv for kv in console.items()])
|
||||
)
|
||||
if len(gui) > 0:
|
||||
generated.extend(
|
||||
maker.make_multiple(
|
||||
['%s = %s' % kv for kv in gui.items()],
|
||||
{'gui': True}
|
||||
)
|
||||
)
|
||||
|
||||
# Record pip as the installer
|
||||
installer = os.path.join(info_dir[0], 'INSTALLER')
|
||||
temp_installer = os.path.join(info_dir[0], 'INSTALLER.pip')
|
||||
with open(temp_installer, 'wb') as installer_file:
|
||||
installer_file.write(b'pip\n')
|
||||
shutil.move(temp_installer, installer)
|
||||
generated.append(installer)
|
||||
|
||||
# Record details of all files installed
|
||||
record = os.path.join(info_dir[0], 'RECORD')
|
||||
temp_record = os.path.join(info_dir[0], 'RECORD.pip')
|
||||
with open_for_csv(record, 'r') as record_in:
|
||||
with open_for_csv(temp_record, 'w+') as record_out:
|
||||
reader = csv.reader(record_in)
|
||||
writer = csv.writer(record_out)
|
||||
for row in reader:
|
||||
row[0] = installed.pop(row[0], row[0])
|
||||
if row[0] in changed:
|
||||
row[1], row[2] = rehash(row[0])
|
||||
writer.writerow(row)
|
||||
for f in generated:
|
||||
h, l = rehash(f)
|
||||
writer.writerow((f, h, l))
|
||||
for f in installed:
|
||||
writer.writerow((installed[f], '', ''))
|
||||
shutil.move(temp_record, record)
|
||||
|
||||
|
||||
def _unique(fn):
|
||||
@functools.wraps(fn)
|
||||
def unique(*args, **kw):
|
||||
seen = set()
|
||||
for item in fn(*args, **kw):
|
||||
if item not in seen:
|
||||
seen.add(item)
|
||||
yield item
|
||||
return unique
|
||||
|
||||
|
||||
# TODO: this goes somewhere besides the wheel module
|
||||
@_unique
|
||||
def uninstallation_paths(dist):
|
||||
"""
|
||||
Yield all the uninstallation paths for dist based on RECORD-without-.pyc
|
||||
|
||||
Yield paths to all the files in RECORD. For each .py file in RECORD, add
|
||||
the .pyc in the same directory.
|
||||
|
||||
UninstallPathSet.add() takes care of the __pycache__ .pyc.
|
||||
"""
|
||||
from pip.utils import FakeFile # circular import
|
||||
r = csv.reader(FakeFile(dist.get_metadata_lines('RECORD')))
|
||||
for row in r:
|
||||
path = os.path.join(dist.location, row[0])
|
||||
yield path
|
||||
if path.endswith('.py'):
|
||||
dn, fn = os.path.split(path)
|
||||
base = fn[:-3]
|
||||
path = os.path.join(dn, base + '.pyc')
|
||||
yield path
|
||||
|
||||
|
||||
def wheel_version(source_dir):
|
||||
"""
|
||||
Return the Wheel-Version of an extracted wheel, if possible.
|
||||
|
||||
Otherwise, return False if we couldn't parse / extract it.
|
||||
"""
|
||||
try:
|
||||
dist = [d for d in pkg_resources.find_on_path(None, source_dir)][0]
|
||||
|
||||
wheel_data = dist.get_metadata('WHEEL')
|
||||
wheel_data = Parser().parsestr(wheel_data)
|
||||
|
||||
version = wheel_data['Wheel-Version'].strip()
|
||||
version = tuple(map(int, version.split('.')))
|
||||
return version
|
||||
except:
|
||||
return False
|
||||
|
||||
|
||||
def check_compatibility(version, name):
|
||||
"""
|
||||
Raises errors or warns if called with an incompatible Wheel-Version.
|
||||
|
||||
Pip should refuse to install a Wheel-Version that's a major series
|
||||
ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when
|
||||
installing a version only minor version ahead (e.g 1.2 > 1.1).
|
||||
|
||||
version: a 2-tuple representing a Wheel-Version (Major, Minor)
|
||||
name: name of wheel or package to raise exception about
|
||||
|
||||
:raises UnsupportedWheel: when an incompatible Wheel-Version is given
|
||||
"""
|
||||
if not version:
|
||||
raise UnsupportedWheel(
|
||||
"%s is in an unsupported or invalid wheel" % name
|
||||
)
|
||||
if version[0] > VERSION_COMPATIBLE[0]:
|
||||
raise UnsupportedWheel(
|
||||
"%s's Wheel-Version (%s) is not compatible with this version "
|
||||
"of pip" % (name, '.'.join(map(str, version)))
|
||||
)
|
||||
elif version > VERSION_COMPATIBLE:
|
||||
logger.warning(
|
||||
'Installing from a newer Wheel-Version (%s)',
|
||||
'.'.join(map(str, version)),
|
||||
)
|
||||
|
||||
|
||||
class Wheel(object):
|
||||
"""A wheel file"""
|
||||
|
||||
# TODO: maybe move the install code into this class
|
||||
|
||||
wheel_file_re = re.compile(
|
||||
r"""^(?P<namever>(?P<name>.+?)-(?P<ver>\d.*?))
|
||||
((-(?P<build>\d.*?))?-(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?)
|
||||
\.whl|\.dist-info)$""",
|
||||
re.VERBOSE
|
||||
)
|
||||
|
||||
def __init__(self, filename):
|
||||
"""
|
||||
:raises InvalidWheelFilename: when the filename is invalid for a wheel
|
||||
"""
|
||||
wheel_info = self.wheel_file_re.match(filename)
|
||||
if not wheel_info:
|
||||
raise InvalidWheelFilename(
|
||||
"%s is not a valid wheel filename." % filename
|
||||
)
|
||||
self.filename = filename
|
||||
self.name = wheel_info.group('name').replace('_', '-')
|
||||
# we'll assume "_" means "-" due to wheel naming scheme
|
||||
# (https://github.com/pypa/pip/issues/1150)
|
||||
self.version = wheel_info.group('ver').replace('_', '-')
|
||||
self.pyversions = wheel_info.group('pyver').split('.')
|
||||
self.abis = wheel_info.group('abi').split('.')
|
||||
self.plats = wheel_info.group('plat').split('.')
|
||||
|
||||
# All the tag combinations from this file
|
||||
self.file_tags = set(
|
||||
(x, y, z) for x in self.pyversions
|
||||
for y in self.abis for z in self.plats
|
||||
)
|
||||
|
||||
def support_index_min(self, tags=None):
|
||||
"""
|
||||
Return the lowest index that one of the wheel's file_tag combinations
|
||||
achieves in the supported_tags list e.g. if there are 8 supported tags,
|
||||
and one of the file tags is first in the list, then return 0. Returns
|
||||
None is the wheel is not supported.
|
||||
"""
|
||||
if tags is None: # for mock
|
||||
tags = pep425tags.supported_tags
|
||||
indexes = [tags.index(c) for c in self.file_tags if c in tags]
|
||||
return min(indexes) if indexes else None
|
||||
|
||||
def supported(self, tags=None):
|
||||
"""Is this wheel supported on this system?"""
|
||||
if tags is None: # for mock
|
||||
tags = pep425tags.supported_tags
|
||||
return bool(set(tags).intersection(self.file_tags))
|
||||
|
||||
|
||||
class WheelBuilder(object):
|
||||
"""Build wheels from a RequirementSet."""
|
||||
|
||||
def __init__(self, requirement_set, finder, build_options=None,
|
||||
global_options=None):
|
||||
self.requirement_set = requirement_set
|
||||
self.finder = finder
|
||||
self._cache_root = requirement_set._wheel_cache._cache_dir
|
||||
self._wheel_dir = requirement_set.wheel_download_dir
|
||||
self.build_options = build_options or []
|
||||
self.global_options = global_options or []
|
||||
|
||||
def _build_one(self, req, output_dir, python_tag=None):
|
||||
"""Build one wheel.
|
||||
|
||||
:return: The filename of the built wheel, or None if the build failed.
|
||||
"""
|
||||
tempd = tempfile.mkdtemp('pip-wheel-')
|
||||
try:
|
||||
if self.__build_one(req, tempd, python_tag=python_tag):
|
||||
try:
|
||||
wheel_name = os.listdir(tempd)[0]
|
||||
wheel_path = os.path.join(output_dir, wheel_name)
|
||||
shutil.move(os.path.join(tempd, wheel_name), wheel_path)
|
||||
logger.info('Stored in directory: %s', output_dir)
|
||||
return wheel_path
|
||||
except:
|
||||
pass
|
||||
# Ignore return, we can't do anything else useful.
|
||||
self._clean_one(req)
|
||||
return None
|
||||
finally:
|
||||
rmtree(tempd)
|
||||
|
||||
def _base_setup_args(self, req):
|
||||
return [
|
||||
sys.executable, "-u", '-c',
|
||||
SETUPTOOLS_SHIM % req.setup_py
|
||||
] + list(self.global_options)
|
||||
|
||||
def __build_one(self, req, tempd, python_tag=None):
|
||||
base_args = self._base_setup_args(req)
|
||||
|
||||
spin_message = 'Running setup.py bdist_wheel for %s' % (req.name,)
|
||||
with open_spinner(spin_message) as spinner:
|
||||
logger.debug('Destination directory: %s', tempd)
|
||||
wheel_args = base_args + ['bdist_wheel', '-d', tempd] \
|
||||
+ self.build_options
|
||||
|
||||
if python_tag is not None:
|
||||
wheel_args += ["--python-tag", python_tag]
|
||||
|
||||
try:
|
||||
call_subprocess(wheel_args, cwd=req.source_dir,
|
||||
show_stdout=False, spinner=spinner)
|
||||
return True
|
||||
except:
|
||||
spinner.finish("error")
|
||||
logger.error('Failed building wheel for %s', req.name)
|
||||
return False
|
||||
|
||||
def _clean_one(self, req):
|
||||
base_args = self._base_setup_args(req)
|
||||
|
||||
logger.info('Running setup.py clean for %s', req.name)
|
||||
clean_args = base_args + ['clean', '--all']
|
||||
try:
|
||||
call_subprocess(clean_args, cwd=req.source_dir, show_stdout=False)
|
||||
return True
|
||||
except:
|
||||
logger.error('Failed cleaning build dir for %s', req.name)
|
||||
return False
|
||||
|
||||
def build(self, autobuilding=False):
|
||||
"""Build wheels.
|
||||
|
||||
:param unpack: If True, replace the sdist we built from with the
|
||||
newly built wheel, in preparation for installation.
|
||||
:return: True if all the wheels built correctly.
|
||||
"""
|
||||
assert self._wheel_dir or (autobuilding and self._cache_root)
|
||||
# unpack sdists and constructs req set
|
||||
self.requirement_set.prepare_files(self.finder)
|
||||
|
||||
reqset = self.requirement_set.requirements.values()
|
||||
|
||||
buildset = []
|
||||
for req in reqset:
|
||||
if req.constraint:
|
||||
continue
|
||||
if req.is_wheel:
|
||||
if not autobuilding:
|
||||
logger.info(
|
||||
'Skipping %s, due to already being wheel.', req.name)
|
||||
elif req.editable:
|
||||
if not autobuilding:
|
||||
logger.info(
|
||||
'Skipping bdist_wheel for %s, due to being editable',
|
||||
req.name)
|
||||
elif autobuilding and req.link and not req.link.is_artifact:
|
||||
pass
|
||||
elif autobuilding and not req.source_dir:
|
||||
pass
|
||||
else:
|
||||
if autobuilding:
|
||||
link = req.link
|
||||
base, ext = link.splitext()
|
||||
if pip.index.egg_info_matches(base, None, link) is None:
|
||||
# Doesn't look like a package - don't autobuild a wheel
|
||||
# because we'll have no way to lookup the result sanely
|
||||
continue
|
||||
if "binary" not in pip.index.fmt_ctl_formats(
|
||||
self.finder.format_control,
|
||||
canonicalize_name(req.name)):
|
||||
logger.info(
|
||||
"Skipping bdist_wheel for %s, due to binaries "
|
||||
"being disabled for it.", req.name)
|
||||
continue
|
||||
buildset.append(req)
|
||||
|
||||
if not buildset:
|
||||
return True
|
||||
|
||||
# Build the wheels.
|
||||
logger.info(
|
||||
'Building wheels for collected packages: %s',
|
||||
', '.join([req.name for req in buildset]),
|
||||
)
|
||||
with indent_log():
|
||||
build_success, build_failure = [], []
|
||||
for req in buildset:
|
||||
python_tag = None
|
||||
if autobuilding:
|
||||
python_tag = pep425tags.implementation_tag
|
||||
output_dir = _cache_for_link(self._cache_root, req.link)
|
||||
try:
|
||||
ensure_dir(output_dir)
|
||||
except OSError as e:
|
||||
logger.warn("Building wheel for %s failed: %s",
|
||||
req.name, e)
|
||||
build_failure.append(req)
|
||||
continue
|
||||
else:
|
||||
output_dir = self._wheel_dir
|
||||
wheel_file = self._build_one(
|
||||
req, output_dir,
|
||||
python_tag=python_tag,
|
||||
)
|
||||
if wheel_file:
|
||||
build_success.append(req)
|
||||
if autobuilding:
|
||||
# XXX: This is mildly duplicative with prepare_files,
|
||||
# but not close enough to pull out to a single common
|
||||
# method.
|
||||
# The code below assumes temporary source dirs -
|
||||
# prevent it doing bad things.
|
||||
if req.source_dir and not os.path.exists(os.path.join(
|
||||
req.source_dir, PIP_DELETE_MARKER_FILENAME)):
|
||||
raise AssertionError(
|
||||
"bad source dir - missing marker")
|
||||
# Delete the source we built the wheel from
|
||||
req.remove_temporary_source()
|
||||
# set the build directory again - name is known from
|
||||
# the work prepare_files did.
|
||||
req.source_dir = req.build_location(
|
||||
self.requirement_set.build_dir)
|
||||
# Update the link for this.
|
||||
req.link = pip.index.Link(
|
||||
path_to_url(wheel_file))
|
||||
assert req.link.is_wheel
|
||||
# extract the wheel into the dir
|
||||
unpack_url(
|
||||
req.link, req.source_dir, None, False,
|
||||
session=self.requirement_set.session)
|
||||
else:
|
||||
build_failure.append(req)
|
||||
|
||||
# notify success/failure
|
||||
if build_success:
|
||||
logger.info(
|
||||
'Successfully built %s',
|
||||
' '.join([req.name for req in build_success]),
|
||||
)
|
||||
if build_failure:
|
||||
logger.info(
|
||||
'Failed to build %s',
|
||||
' '.join([req.name for req in build_failure]),
|
||||
)
|
||||
# Return True if all builds were successful
|
||||
return len(build_failure) == 0
|
||||
Loading…
Add table
Add a link
Reference in a new issue