platform for raspberry pi
This commit is contained in:
commit
73d4832b38
523 changed files with 190349 additions and 0 deletions
340
lib/python3.4/site-packages/pip/__init__.py
Normal file
340
lib/python3.4/site-packages/pip/__init__.py
Normal file
|
|
@ -0,0 +1,340 @@
|
|||
#!/usr/bin/env python
|
||||
import os
|
||||
import optparse
|
||||
|
||||
import sys
|
||||
import re
|
||||
import errno
|
||||
|
||||
# Upstream pip vendorizes a bunch of its dependencies. Debian de-vendorizes
|
||||
# (unbundles) these dependencies to be compliant with Debian policy. Instead,
|
||||
# these dependencies are packaged as wheel (.whl) files in a known location.
|
||||
# When pip itself executes, we have to arrange for these wheels to show up
|
||||
# earlier on sys.path than any other version of these packages, otherwise
|
||||
# things can break. See for example Bug #744145.
|
||||
#
|
||||
# The location of the wheels differs depending on whether we're inside or
|
||||
# outside a virtual environment, regardless of whether that venv was created
|
||||
# with virtualenv or pyvenv. The first thing we have to do is figure out if
|
||||
# we're inside or outside a venv, then search the appropriate wheel directory
|
||||
# and add all the .whls found there to the front of sys.path. As per Debian
|
||||
# Python Policy, only the wheels needed to support this de-vendorization will
|
||||
# be present, so it's safe to add them all.
|
||||
#
|
||||
# venv determination is a bit of a black art, but this algorithm should work
|
||||
# in both Python 2 (virtualenv-only) and Python 3 (pyvenv and virtualenv). -
|
||||
# updated by barry@debian.org 2015-02-25
|
||||
|
||||
base_prefix = getattr(sys, 'base_prefix', None)
|
||||
real_prefix = getattr(sys, 'real_prefix', None)
|
||||
if base_prefix is None:
|
||||
# Python 2 has no base_prefix at all. It also has no pyvenv. Fall back
|
||||
# to checking real_prefix.
|
||||
if real_prefix is None:
|
||||
# We are not in a venv.
|
||||
in_venv = False
|
||||
else:
|
||||
# We're in a Python 2 virtualenv created venv, but real_prefix should
|
||||
# never be the same as sys.prefix.
|
||||
assert sys.prefix != real_prefix
|
||||
in_venv = True
|
||||
elif sys.prefix != base_prefix:
|
||||
# We're in a Python 3, pyvenv created venv.
|
||||
in_venv = True
|
||||
elif real_prefix is None:
|
||||
# We're in Python 3, outside a venv, but base better equal prefix.
|
||||
assert sys.prefix == base_prefix
|
||||
in_venv = False
|
||||
else:
|
||||
# We're in a Python 3, virtualenv created venv.
|
||||
assert real_prefix != sys.prefix
|
||||
in_venv = True
|
||||
|
||||
|
||||
if in_venv:
|
||||
wheel_dir = os.path.join(sys.prefix, 'lib', 'python-wheels')
|
||||
else:
|
||||
wheel_dir = '/usr/share/python-wheels'
|
||||
|
||||
# We'll add all the wheels we find to the front of sys.path so that they're
|
||||
# found first, even if the same dependencies are available in site-packages.
|
||||
try:
|
||||
for filename in os.listdir(wheel_dir):
|
||||
if os.path.splitext(filename)[1] == '.whl':
|
||||
sys.path.insert(0, os.path.join(wheel_dir, filename))
|
||||
# FileNotFoundError doesn't exist in Python 2, but ignore it anyway.
|
||||
except OSError as error:
|
||||
if error.errno != errno.ENOENT:
|
||||
raise
|
||||
|
||||
|
||||
from pip.exceptions import InstallationError, CommandError, PipError
|
||||
from pip.log import logger
|
||||
from pip.util import get_installed_distributions, get_prog
|
||||
from pip.vcs import git, mercurial, subversion, bazaar # noqa
|
||||
from pip.baseparser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
|
||||
from pip.commands import commands, get_summaries, get_similar_commands
|
||||
|
||||
# This fixes a peculiarity when importing via __import__ - as we are
|
||||
# initialising the pip module, "from pip import cmdoptions" is recursive
|
||||
# and appears not to work properly in that situation.
|
||||
import pip.cmdoptions
|
||||
cmdoptions = pip.cmdoptions
|
||||
|
||||
# The version as used in the setup.py and the docs conf.py
|
||||
__version__ = "1.5.6"
|
||||
|
||||
|
||||
def autocomplete():
|
||||
"""Command and option completion for the main option parser (and options)
|
||||
and its subcommands (and options).
|
||||
|
||||
Enable by sourcing one of the completion shell scripts (bash or zsh).
|
||||
"""
|
||||
# Don't complete if user hasn't sourced bash_completion file.
|
||||
if 'PIP_AUTO_COMPLETE' not in os.environ:
|
||||
return
|
||||
cwords = os.environ['COMP_WORDS'].split()[1:]
|
||||
cword = int(os.environ['COMP_CWORD'])
|
||||
try:
|
||||
current = cwords[cword - 1]
|
||||
except IndexError:
|
||||
current = ''
|
||||
|
||||
subcommands = [cmd for cmd, summary in get_summaries()]
|
||||
options = []
|
||||
# subcommand
|
||||
try:
|
||||
subcommand_name = [w for w in cwords if w in subcommands][0]
|
||||
except IndexError:
|
||||
subcommand_name = None
|
||||
|
||||
parser = create_main_parser()
|
||||
# subcommand options
|
||||
if subcommand_name:
|
||||
# special case: 'help' subcommand has no options
|
||||
if subcommand_name == 'help':
|
||||
sys.exit(1)
|
||||
# special case: list locally installed dists for uninstall command
|
||||
if subcommand_name == 'uninstall' and not current.startswith('-'):
|
||||
installed = []
|
||||
lc = current.lower()
|
||||
for dist in get_installed_distributions(local_only=True):
|
||||
if dist.key.startswith(lc) and dist.key not in cwords[1:]:
|
||||
installed.append(dist.key)
|
||||
# if there are no dists installed, fall back to option completion
|
||||
if installed:
|
||||
for dist in installed:
|
||||
print(dist)
|
||||
sys.exit(1)
|
||||
|
||||
subcommand = commands[subcommand_name]()
|
||||
options += [(opt.get_opt_string(), opt.nargs)
|
||||
for opt in subcommand.parser.option_list_all
|
||||
if opt.help != optparse.SUPPRESS_HELP]
|
||||
|
||||
# filter out previously specified options from available options
|
||||
prev_opts = [x.split('=')[0] for x in cwords[1:cword - 1]]
|
||||
options = [(x, v) for (x, v) in options if x not in prev_opts]
|
||||
# filter options by current input
|
||||
options = [(k, v) for k, v in options if k.startswith(current)]
|
||||
for option in options:
|
||||
opt_label = option[0]
|
||||
# append '=' to options which require args
|
||||
if option[1]:
|
||||
opt_label += '='
|
||||
print(opt_label)
|
||||
else:
|
||||
# show main parser options only when necessary
|
||||
if current.startswith('-') or current.startswith('--'):
|
||||
opts = [i.option_list for i in parser.option_groups]
|
||||
opts.append(parser.option_list)
|
||||
opts = (o for it in opts for o in it)
|
||||
|
||||
subcommands += [i.get_opt_string() for i in opts
|
||||
if i.help != optparse.SUPPRESS_HELP]
|
||||
|
||||
print(' '.join([x for x in subcommands if x.startswith(current)]))
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def create_main_parser():
|
||||
parser_kw = {
|
||||
'usage': '\n%prog <command> [options]',
|
||||
'add_help_option': False,
|
||||
'formatter': UpdatingDefaultsHelpFormatter(),
|
||||
'name': 'global',
|
||||
'prog': get_prog(),
|
||||
}
|
||||
|
||||
parser = ConfigOptionParser(**parser_kw)
|
||||
parser.disable_interspersed_args()
|
||||
|
||||
pip_pkg_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
parser.version = 'pip %s from %s (python %s)' % (
|
||||
__version__, pip_pkg_dir, sys.version[:3])
|
||||
|
||||
# add the general options
|
||||
gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser)
|
||||
parser.add_option_group(gen_opts)
|
||||
|
||||
parser.main = True # so the help formatter knows
|
||||
|
||||
# create command listing for description
|
||||
command_summaries = get_summaries()
|
||||
description = [''] + ['%-27s %s' % (i, j) for i, j in command_summaries]
|
||||
parser.description = '\n'.join(description)
|
||||
|
||||
return parser
|
||||
|
||||
|
||||
def parseopts(args):
|
||||
parser = create_main_parser()
|
||||
|
||||
# Note: parser calls disable_interspersed_args(), so the result of this call
|
||||
# is to split the initial args into the general options before the
|
||||
# subcommand and everything else.
|
||||
# For example:
|
||||
# args: ['--timeout=5', 'install', '--user', 'INITools']
|
||||
# general_options: ['--timeout==5']
|
||||
# args_else: ['install', '--user', 'INITools']
|
||||
general_options, args_else = parser.parse_args(args)
|
||||
|
||||
# --version
|
||||
if general_options.version:
|
||||
sys.stdout.write(parser.version)
|
||||
sys.stdout.write(os.linesep)
|
||||
sys.exit()
|
||||
|
||||
# pip || pip help -> print_help()
|
||||
if not args_else or (args_else[0] == 'help' and len(args_else) == 1):
|
||||
parser.print_help()
|
||||
sys.exit()
|
||||
|
||||
# the subcommand name
|
||||
cmd_name = args_else[0].lower()
|
||||
|
||||
#all the args without the subcommand
|
||||
cmd_args = args[:]
|
||||
cmd_args.remove(args_else[0].lower())
|
||||
|
||||
if cmd_name not in commands:
|
||||
guess = get_similar_commands(cmd_name)
|
||||
|
||||
msg = ['unknown command "%s"' % cmd_name]
|
||||
if guess:
|
||||
msg.append('maybe you meant "%s"' % guess)
|
||||
|
||||
raise CommandError(' - '.join(msg))
|
||||
|
||||
return cmd_name, cmd_args
|
||||
|
||||
|
||||
def main(initial_args=None):
|
||||
if initial_args is None:
|
||||
initial_args = sys.argv[1:]
|
||||
|
||||
autocomplete()
|
||||
|
||||
try:
|
||||
cmd_name, cmd_args = parseopts(initial_args)
|
||||
except PipError:
|
||||
e = sys.exc_info()[1]
|
||||
sys.stderr.write("ERROR: %s" % e)
|
||||
sys.stderr.write(os.linesep)
|
||||
sys.exit(1)
|
||||
|
||||
command = commands[cmd_name]()
|
||||
return command.main(cmd_args)
|
||||
|
||||
|
||||
def bootstrap():
|
||||
"""
|
||||
Bootstrapping function to be called from install-pip.py script.
|
||||
"""
|
||||
pkgs = ['pip']
|
||||
try:
|
||||
import setuptools
|
||||
except ImportError:
|
||||
pkgs.append('setuptools')
|
||||
return main(['install', '--upgrade'] + pkgs + sys.argv[1:])
|
||||
|
||||
############################################################
|
||||
## Writing freeze files
|
||||
|
||||
|
||||
class FrozenRequirement(object):
|
||||
|
||||
def __init__(self, name, req, editable, comments=()):
|
||||
self.name = name
|
||||
self.req = req
|
||||
self.editable = editable
|
||||
self.comments = comments
|
||||
|
||||
_rev_re = re.compile(r'-r(\d+)$')
|
||||
_date_re = re.compile(r'-(20\d\d\d\d\d\d)$')
|
||||
|
||||
@classmethod
|
||||
def from_dist(cls, dist, dependency_links, find_tags=False):
|
||||
location = os.path.normcase(os.path.abspath(dist.location))
|
||||
comments = []
|
||||
from pip.vcs import vcs, get_src_requirement
|
||||
if vcs.get_backend_name(location):
|
||||
editable = True
|
||||
try:
|
||||
req = get_src_requirement(dist, location, find_tags)
|
||||
except InstallationError:
|
||||
ex = sys.exc_info()[1]
|
||||
logger.warn("Error when trying to get requirement for VCS system %s, falling back to uneditable format" % ex)
|
||||
req = None
|
||||
if req is None:
|
||||
logger.warn('Could not determine repository location of %s' % location)
|
||||
comments.append('## !! Could not determine repository location')
|
||||
req = dist.as_requirement()
|
||||
editable = False
|
||||
else:
|
||||
editable = False
|
||||
req = dist.as_requirement()
|
||||
specs = req.specs
|
||||
assert len(specs) == 1 and specs[0][0] == '=='
|
||||
version = specs[0][1]
|
||||
ver_match = cls._rev_re.search(version)
|
||||
date_match = cls._date_re.search(version)
|
||||
if ver_match or date_match:
|
||||
svn_backend = vcs.get_backend('svn')
|
||||
if svn_backend:
|
||||
svn_location = svn_backend(
|
||||
).get_location(dist, dependency_links)
|
||||
if not svn_location:
|
||||
logger.warn(
|
||||
'Warning: cannot find svn location for %s' % req)
|
||||
comments.append('## FIXME: could not find svn URL in dependency_links for this package:')
|
||||
else:
|
||||
comments.append('# Installing as editable to satisfy requirement %s:' % req)
|
||||
if ver_match:
|
||||
rev = ver_match.group(1)
|
||||
else:
|
||||
rev = '{%s}' % date_match.group(1)
|
||||
editable = True
|
||||
req = '%s@%s#egg=%s' % (svn_location, rev, cls.egg_name(dist))
|
||||
return cls(dist.project_name, req, editable, comments)
|
||||
|
||||
@staticmethod
|
||||
def egg_name(dist):
|
||||
name = dist.egg_name()
|
||||
match = re.search(r'-py\d\.\d$', name)
|
||||
if match:
|
||||
name = name[:match.start()]
|
||||
return name
|
||||
|
||||
def __str__(self):
|
||||
req = self.req
|
||||
if self.editable:
|
||||
req = '-e %s' % req
|
||||
return '\n'.join(list(self.comments) + [str(req)]) + '\n'
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
exit = main()
|
||||
if exit:
|
||||
sys.exit(exit)
|
||||
7
lib/python3.4/site-packages/pip/__main__.py
Normal file
7
lib/python3.4/site-packages/pip/__main__.py
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
import sys
|
||||
from .runner import run
|
||||
|
||||
if __name__ == '__main__':
|
||||
exit = run()
|
||||
if exit:
|
||||
sys.exit(exit)
|
||||
138
lib/python3.4/site-packages/pip/backwardcompat/__init__.py
Normal file
138
lib/python3.4/site-packages/pip/backwardcompat/__init__.py
Normal file
|
|
@ -0,0 +1,138 @@
|
|||
"""Stuff that differs in different Python versions and platform
|
||||
distributions."""
|
||||
|
||||
import os
|
||||
import imp
|
||||
import sys
|
||||
import site
|
||||
|
||||
__all__ = ['WindowsError']
|
||||
|
||||
uses_pycache = hasattr(imp, 'cache_from_source')
|
||||
|
||||
class NeverUsedException(Exception):
|
||||
"""this exception should never be raised"""
|
||||
|
||||
try:
|
||||
WindowsError = WindowsError
|
||||
except NameError:
|
||||
WindowsError = NeverUsedException
|
||||
|
||||
try:
|
||||
#new in Python 3.3
|
||||
PermissionError = PermissionError
|
||||
except NameError:
|
||||
PermissionError = NeverUsedException
|
||||
|
||||
console_encoding = sys.__stdout__.encoding
|
||||
|
||||
if sys.version_info >= (3,):
|
||||
from io import StringIO, BytesIO
|
||||
from functools import reduce
|
||||
from urllib.error import URLError, HTTPError
|
||||
from queue import Queue, Empty
|
||||
from urllib.request import url2pathname, urlretrieve, pathname2url
|
||||
from email import message as emailmessage
|
||||
import urllib.parse as urllib
|
||||
import urllib.request as urllib2
|
||||
import configparser as ConfigParser
|
||||
import xmlrpc.client as xmlrpclib
|
||||
import urllib.parse as urlparse
|
||||
import http.client as httplib
|
||||
|
||||
def cmp(a, b):
|
||||
return (a > b) - (a < b)
|
||||
|
||||
def b(s):
|
||||
return s.encode('utf-8')
|
||||
|
||||
def u(s):
|
||||
return s.decode('utf-8')
|
||||
|
||||
def console_to_str(s):
|
||||
try:
|
||||
return s.decode(console_encoding)
|
||||
except UnicodeDecodeError:
|
||||
return s.decode('utf_8')
|
||||
|
||||
def get_http_message_param(http_message, param, default_value):
|
||||
return http_message.get_param(param, default_value)
|
||||
|
||||
bytes = bytes
|
||||
string_types = (str,)
|
||||
raw_input = input
|
||||
else:
|
||||
from cStringIO import StringIO
|
||||
from urllib2 import URLError, HTTPError
|
||||
from Queue import Queue, Empty
|
||||
from urllib import url2pathname, urlretrieve, pathname2url
|
||||
from email import Message as emailmessage
|
||||
import urllib
|
||||
import urllib2
|
||||
import urlparse
|
||||
import ConfigParser
|
||||
import xmlrpclib
|
||||
import httplib
|
||||
|
||||
def b(s):
|
||||
return s
|
||||
|
||||
def u(s):
|
||||
return s
|
||||
|
||||
def console_to_str(s):
|
||||
return s
|
||||
|
||||
def get_http_message_param(http_message, param, default_value):
|
||||
result = http_message.getparam(param)
|
||||
return result or default_value
|
||||
|
||||
bytes = str
|
||||
string_types = (basestring,)
|
||||
reduce = reduce
|
||||
cmp = cmp
|
||||
raw_input = raw_input
|
||||
BytesIO = StringIO
|
||||
|
||||
|
||||
from distutils.sysconfig import get_python_lib, get_python_version
|
||||
|
||||
#site.USER_SITE was created in py2.6
|
||||
user_site = getattr(site, 'USER_SITE', None)
|
||||
|
||||
|
||||
def product(*args, **kwds):
|
||||
# product('ABCD', 'xy') --> Ax Ay Bx By Cx Cy Dx Dy
|
||||
# product(range(2), repeat=3) --> 000 001 010 011 100 101 110 111
|
||||
pools = list(map(tuple, args)) * kwds.get('repeat', 1)
|
||||
result = [[]]
|
||||
for pool in pools:
|
||||
result = [x + [y] for x in result for y in pool]
|
||||
for prod in result:
|
||||
yield tuple(prod)
|
||||
|
||||
|
||||
def get_path_uid(path):
|
||||
"""
|
||||
Return path's uid.
|
||||
|
||||
Does not follow symlinks: https://github.com/pypa/pip/pull/935#discussion_r5307003
|
||||
|
||||
Placed this function in backwardcompat due to differences on AIX and Jython,
|
||||
that should eventually go away.
|
||||
|
||||
:raises OSError: When path is a symlink or can't be read.
|
||||
"""
|
||||
if hasattr(os, 'O_NOFOLLOW'):
|
||||
fd = os.open(path, os.O_RDONLY | os.O_NOFOLLOW)
|
||||
file_uid = os.fstat(fd).st_uid
|
||||
os.close(fd)
|
||||
else: # AIX and Jython
|
||||
# WARNING: time of check vulnerabity, but best we can do w/o NOFOLLOW
|
||||
if not os.path.islink(path):
|
||||
# older versions of Jython don't have `os.fstat`
|
||||
file_uid = os.stat(path).st_uid
|
||||
else:
|
||||
# raise OSError for parity with os.O_NOFOLLOW above
|
||||
raise OSError("%s is a symlink; Will not return uid for symlinks" % path)
|
||||
return file_uid
|
||||
201
lib/python3.4/site-packages/pip/basecommand.py
Normal file
201
lib/python3.4/site-packages/pip/basecommand.py
Normal file
|
|
@ -0,0 +1,201 @@
|
|||
"""Base Command class, and related routines"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
import traceback
|
||||
import time
|
||||
import optparse
|
||||
|
||||
from pip import cmdoptions
|
||||
from pip.locations import running_under_virtualenv
|
||||
from pip.log import logger
|
||||
from pip.download import PipSession
|
||||
from pip.exceptions import (BadCommand, InstallationError, UninstallationError,
|
||||
CommandError, PreviousBuildDirError)
|
||||
from pip.backwardcompat import StringIO
|
||||
from pip.baseparser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
|
||||
from pip.status_codes import (SUCCESS, ERROR, UNKNOWN_ERROR, VIRTUALENV_NOT_FOUND,
|
||||
PREVIOUS_BUILD_DIR_ERROR)
|
||||
from pip.util import get_prog
|
||||
|
||||
|
||||
__all__ = ['Command']
|
||||
|
||||
|
||||
class Command(object):
|
||||
name = None
|
||||
usage = None
|
||||
hidden = False
|
||||
|
||||
def __init__(self):
|
||||
parser_kw = {
|
||||
'usage': self.usage,
|
||||
'prog': '%s %s' % (get_prog(), self.name),
|
||||
'formatter': UpdatingDefaultsHelpFormatter(),
|
||||
'add_help_option': False,
|
||||
'name': self.name,
|
||||
'description': self.__doc__,
|
||||
}
|
||||
|
||||
self.parser = ConfigOptionParser(**parser_kw)
|
||||
|
||||
# Commands should add options to this option group
|
||||
optgroup_name = '%s Options' % self.name.capitalize()
|
||||
self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name)
|
||||
|
||||
# Add the general options
|
||||
gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, self.parser)
|
||||
self.parser.add_option_group(gen_opts)
|
||||
|
||||
def _build_session(self, options):
|
||||
session = PipSession()
|
||||
|
||||
# Handle custom ca-bundles from the user
|
||||
if options.cert:
|
||||
session.verify = options.cert
|
||||
|
||||
# Handle timeouts
|
||||
if options.timeout:
|
||||
session.timeout = options.timeout
|
||||
|
||||
# Handle configured proxies
|
||||
if options.proxy:
|
||||
session.proxies = {
|
||||
"http": options.proxy,
|
||||
"https": options.proxy,
|
||||
}
|
||||
|
||||
# Determine if we can prompt the user for authentication or not
|
||||
session.auth.prompting = not options.no_input
|
||||
|
||||
return session
|
||||
|
||||
def setup_logging(self):
|
||||
pass
|
||||
|
||||
def parse_args(self, args):
|
||||
# factored out for testability
|
||||
return self.parser.parse_args(args)
|
||||
|
||||
def main(self, args):
|
||||
options, args = self.parse_args(args)
|
||||
|
||||
level = 1 # Notify
|
||||
level += options.verbose
|
||||
level -= options.quiet
|
||||
level = logger.level_for_integer(4 - level)
|
||||
complete_log = []
|
||||
logger.add_consumers(
|
||||
(level, sys.stdout),
|
||||
(logger.DEBUG, complete_log.append),
|
||||
)
|
||||
if options.log_explicit_levels:
|
||||
logger.explicit_levels = True
|
||||
|
||||
self.setup_logging()
|
||||
|
||||
#TODO: try to get these passing down from the command?
|
||||
# without resorting to os.environ to hold these.
|
||||
|
||||
if options.no_input:
|
||||
os.environ['PIP_NO_INPUT'] = '1'
|
||||
|
||||
if options.exists_action:
|
||||
os.environ['PIP_EXISTS_ACTION'] = ' '.join(options.exists_action)
|
||||
|
||||
if options.require_venv:
|
||||
# If a venv is required check if it can really be found
|
||||
if not running_under_virtualenv():
|
||||
logger.fatal('Could not find an activated virtualenv (required).')
|
||||
sys.exit(VIRTUALENV_NOT_FOUND)
|
||||
|
||||
if options.log:
|
||||
log_fp = open_logfile(options.log, 'a')
|
||||
logger.add_consumers((logger.DEBUG, log_fp))
|
||||
else:
|
||||
log_fp = None
|
||||
|
||||
exit = SUCCESS
|
||||
store_log = False
|
||||
try:
|
||||
status = self.run(options, args)
|
||||
# FIXME: all commands should return an exit status
|
||||
# and when it is done, isinstance is not needed anymore
|
||||
if isinstance(status, int):
|
||||
exit = status
|
||||
except PreviousBuildDirError:
|
||||
e = sys.exc_info()[1]
|
||||
logger.fatal(str(e))
|
||||
logger.info('Exception information:\n%s' % format_exc())
|
||||
store_log = True
|
||||
exit = PREVIOUS_BUILD_DIR_ERROR
|
||||
except (InstallationError, UninstallationError):
|
||||
e = sys.exc_info()[1]
|
||||
logger.fatal(str(e))
|
||||
logger.info('Exception information:\n%s' % format_exc())
|
||||
store_log = True
|
||||
exit = ERROR
|
||||
except BadCommand:
|
||||
e = sys.exc_info()[1]
|
||||
logger.fatal(str(e))
|
||||
logger.info('Exception information:\n%s' % format_exc())
|
||||
store_log = True
|
||||
exit = ERROR
|
||||
except CommandError:
|
||||
e = sys.exc_info()[1]
|
||||
logger.fatal('ERROR: %s' % e)
|
||||
logger.info('Exception information:\n%s' % format_exc())
|
||||
exit = ERROR
|
||||
except KeyboardInterrupt:
|
||||
logger.fatal('Operation cancelled by user')
|
||||
logger.info('Exception information:\n%s' % format_exc())
|
||||
store_log = True
|
||||
exit = ERROR
|
||||
except:
|
||||
logger.fatal('Exception:\n%s' % format_exc())
|
||||
store_log = True
|
||||
exit = UNKNOWN_ERROR
|
||||
if store_log:
|
||||
log_file_fn = options.log_file
|
||||
text = '\n'.join(complete_log)
|
||||
try:
|
||||
log_file_fp = open_logfile(log_file_fn, 'w')
|
||||
except IOError:
|
||||
temp = tempfile.NamedTemporaryFile(delete=False)
|
||||
log_file_fn = temp.name
|
||||
log_file_fp = open_logfile(log_file_fn, 'w')
|
||||
logger.fatal('Storing debug log for failure in %s' % log_file_fn)
|
||||
log_file_fp.write(text)
|
||||
log_file_fp.close()
|
||||
if log_fp is not None:
|
||||
log_fp.close()
|
||||
return exit
|
||||
|
||||
|
||||
def format_exc(exc_info=None):
|
||||
if exc_info is None:
|
||||
exc_info = sys.exc_info()
|
||||
out = StringIO()
|
||||
traceback.print_exception(*exc_info, **dict(file=out))
|
||||
return out.getvalue()
|
||||
|
||||
|
||||
def open_logfile(filename, mode='a'):
|
||||
"""Open the named log file in append mode.
|
||||
|
||||
If the file already exists, a separator will also be printed to
|
||||
the file to separate past activity from current activity.
|
||||
"""
|
||||
filename = os.path.expanduser(filename)
|
||||
filename = os.path.abspath(filename)
|
||||
dirname = os.path.dirname(filename)
|
||||
if not os.path.exists(dirname):
|
||||
os.makedirs(dirname)
|
||||
exists = os.path.exists(filename)
|
||||
|
||||
log_fp = open(filename, mode)
|
||||
if exists:
|
||||
log_fp.write('%s\n' % ('-' * 60))
|
||||
log_fp.write('%s run on %s\n' % (sys.argv[0], time.strftime('%c')))
|
||||
return log_fp
|
||||
223
lib/python3.4/site-packages/pip/baseparser.py
Normal file
223
lib/python3.4/site-packages/pip/baseparser.py
Normal file
|
|
@ -0,0 +1,223 @@
|
|||
"""Base option parser setup"""
|
||||
|
||||
import sys
|
||||
import optparse
|
||||
import os
|
||||
import textwrap
|
||||
from distutils.util import strtobool
|
||||
|
||||
from pip.backwardcompat import ConfigParser, string_types
|
||||
from pip.locations import default_config_file
|
||||
from pip.util import get_terminal_size, get_prog
|
||||
|
||||
|
||||
class PrettyHelpFormatter(optparse.IndentedHelpFormatter):
|
||||
"""A prettier/less verbose help formatter for optparse."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
# help position must be aligned with __init__.parseopts.description
|
||||
kwargs['max_help_position'] = 30
|
||||
kwargs['indent_increment'] = 1
|
||||
kwargs['width'] = get_terminal_size()[0] - 2
|
||||
optparse.IndentedHelpFormatter.__init__(self, *args, **kwargs)
|
||||
|
||||
def format_option_strings(self, option):
|
||||
return self._format_option_strings(option, ' <%s>', ', ')
|
||||
|
||||
def _format_option_strings(self, option, mvarfmt=' <%s>', optsep=', '):
|
||||
"""
|
||||
Return a comma-separated list of option strings and metavars.
|
||||
|
||||
:param option: tuple of (short opt, long opt), e.g: ('-f', '--format')
|
||||
:param mvarfmt: metavar format string - evaluated as mvarfmt % metavar
|
||||
:param optsep: separator
|
||||
"""
|
||||
opts = []
|
||||
|
||||
if option._short_opts:
|
||||
opts.append(option._short_opts[0])
|
||||
if option._long_opts:
|
||||
opts.append(option._long_opts[0])
|
||||
if len(opts) > 1:
|
||||
opts.insert(1, optsep)
|
||||
|
||||
if option.takes_value():
|
||||
metavar = option.metavar or option.dest.lower()
|
||||
opts.append(mvarfmt % metavar.lower())
|
||||
|
||||
return ''.join(opts)
|
||||
|
||||
def format_heading(self, heading):
|
||||
if heading == 'Options':
|
||||
return ''
|
||||
return heading + ':\n'
|
||||
|
||||
def format_usage(self, usage):
|
||||
"""
|
||||
Ensure there is only one newline between usage and the first heading
|
||||
if there is no description.
|
||||
"""
|
||||
msg = '\nUsage: %s\n' % self.indent_lines(textwrap.dedent(usage), " ")
|
||||
return msg
|
||||
|
||||
def format_description(self, description):
|
||||
# leave full control over description to us
|
||||
if description:
|
||||
if hasattr(self.parser, 'main'):
|
||||
label = 'Commands'
|
||||
else:
|
||||
label = 'Description'
|
||||
#some doc strings have inital newlines, some don't
|
||||
description = description.lstrip('\n')
|
||||
#some doc strings have final newlines and spaces, some don't
|
||||
description = description.rstrip()
|
||||
#dedent, then reindent
|
||||
description = self.indent_lines(textwrap.dedent(description), " ")
|
||||
description = '%s:\n%s\n' % (label, description)
|
||||
return description
|
||||
else:
|
||||
return ''
|
||||
|
||||
def format_epilog(self, epilog):
|
||||
# leave full control over epilog to us
|
||||
if epilog:
|
||||
return epilog
|
||||
else:
|
||||
return ''
|
||||
|
||||
def indent_lines(self, text, indent):
|
||||
new_lines = [indent + line for line in text.split('\n')]
|
||||
return "\n".join(new_lines)
|
||||
|
||||
|
||||
class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter):
|
||||
"""Custom help formatter for use in ConfigOptionParser that updates
|
||||
the defaults before expanding them, allowing them to show up correctly
|
||||
in the help listing"""
|
||||
|
||||
def expand_default(self, option):
|
||||
if self.parser is not None:
|
||||
self.parser.update_defaults(self.parser.defaults)
|
||||
return optparse.IndentedHelpFormatter.expand_default(self, option)
|
||||
|
||||
|
||||
class CustomOptionParser(optparse.OptionParser):
|
||||
def insert_option_group(self, idx, *args, **kwargs):
|
||||
"""Insert an OptionGroup at a given position."""
|
||||
group = self.add_option_group(*args, **kwargs)
|
||||
|
||||
self.option_groups.pop()
|
||||
self.option_groups.insert(idx, group)
|
||||
|
||||
return group
|
||||
|
||||
@property
|
||||
def option_list_all(self):
|
||||
"""Get a list of all options, including those in option groups."""
|
||||
res = self.option_list[:]
|
||||
for i in self.option_groups:
|
||||
res.extend(i.option_list)
|
||||
|
||||
return res
|
||||
|
||||
|
||||
class ConfigOptionParser(CustomOptionParser):
|
||||
"""Custom option parser which updates its defaults by checking the
|
||||
configuration files and environmental variables"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.config = ConfigParser.RawConfigParser()
|
||||
self.name = kwargs.pop('name')
|
||||
self.files = self.get_config_files()
|
||||
if self.files:
|
||||
self.config.read(self.files)
|
||||
assert self.name
|
||||
optparse.OptionParser.__init__(self, *args, **kwargs)
|
||||
|
||||
def get_config_files(self):
|
||||
config_file = os.environ.get('PIP_CONFIG_FILE', False)
|
||||
if config_file == os.devnull:
|
||||
return []
|
||||
if config_file and os.path.exists(config_file):
|
||||
return [config_file]
|
||||
return [default_config_file]
|
||||
|
||||
def check_default(self, option, key, val):
|
||||
try:
|
||||
return option.check_value(key, val)
|
||||
except optparse.OptionValueError:
|
||||
e = sys.exc_info()[1]
|
||||
print("An error occurred during configuration: %s" % e)
|
||||
sys.exit(3)
|
||||
|
||||
def update_defaults(self, defaults):
|
||||
"""Updates the given defaults with values from the config files and
|
||||
the environ. Does a little special handling for certain types of
|
||||
options (lists)."""
|
||||
# Then go and look for the other sources of configuration:
|
||||
config = {}
|
||||
# 1. config files
|
||||
for section in ('global', self.name):
|
||||
config.update(self.normalize_keys(self.get_config_section(section)))
|
||||
# 2. environmental variables
|
||||
config.update(self.normalize_keys(self.get_environ_vars()))
|
||||
# Then set the options with those values
|
||||
for key, val in config.items():
|
||||
option = self.get_option(key)
|
||||
if option is not None:
|
||||
# ignore empty values
|
||||
if not val:
|
||||
continue
|
||||
if option.action in ('store_true', 'store_false', 'count'):
|
||||
val = strtobool(val)
|
||||
if option.action == 'append':
|
||||
val = val.split()
|
||||
val = [self.check_default(option, key, v) for v in val]
|
||||
else:
|
||||
val = self.check_default(option, key, val)
|
||||
|
||||
defaults[option.dest] = val
|
||||
return defaults
|
||||
|
||||
def normalize_keys(self, items):
|
||||
"""Return a config dictionary with normalized keys regardless of
|
||||
whether the keys were specified in environment variables or in config
|
||||
files"""
|
||||
normalized = {}
|
||||
for key, val in items:
|
||||
key = key.replace('_', '-')
|
||||
if not key.startswith('--'):
|
||||
key = '--%s' % key # only prefer long opts
|
||||
normalized[key] = val
|
||||
return normalized
|
||||
|
||||
def get_config_section(self, name):
|
||||
"""Get a section of a configuration"""
|
||||
if self.config.has_section(name):
|
||||
return self.config.items(name)
|
||||
return []
|
||||
|
||||
def get_environ_vars(self, prefix='PIP_'):
|
||||
"""Returns a generator with all environmental vars with prefix PIP_"""
|
||||
for key, val in os.environ.items():
|
||||
if key.startswith(prefix):
|
||||
yield (key.replace(prefix, '').lower(), val)
|
||||
|
||||
def get_default_values(self):
|
||||
"""Overridding to make updating the defaults after instantiation of
|
||||
the option parser possible, update_defaults() does the dirty work."""
|
||||
if not self.process_default_values:
|
||||
# Old, pre-Optik 1.5 behaviour.
|
||||
return optparse.Values(self.defaults)
|
||||
|
||||
defaults = self.update_defaults(self.defaults.copy()) # ours
|
||||
for option in self._get_all_options():
|
||||
default = defaults.get(option.dest)
|
||||
if isinstance(default, string_types):
|
||||
opt_str = option.get_opt_string()
|
||||
defaults[option.dest] = option.check_value(opt_str, default)
|
||||
return optparse.Values(defaults)
|
||||
|
||||
def error(self, msg):
|
||||
self.print_usage(sys.stderr)
|
||||
self.exit(2, "%s\n" % msg)
|
||||
369
lib/python3.4/site-packages/pip/cmdoptions.py
Normal file
369
lib/python3.4/site-packages/pip/cmdoptions.py
Normal file
|
|
@ -0,0 +1,369 @@
|
|||
"""
|
||||
shared options and groups
|
||||
|
||||
The principle here is to define options once, but *not* instantiate them globally.
|
||||
One reason being that options with action='append' can carry state between parses.
|
||||
pip parse's general options twice internally, and shouldn't pass on state.
|
||||
To be consistent, all options will follow this design.
|
||||
|
||||
"""
|
||||
import copy
|
||||
from optparse import OptionGroup, SUPPRESS_HELP, Option
|
||||
from pip.locations import default_log_file
|
||||
|
||||
|
||||
def make_option_group(group, parser):
|
||||
"""
|
||||
Return an OptionGroup object
|
||||
group -- assumed to be dict with 'name' and 'options' keys
|
||||
parser -- an optparse Parser
|
||||
"""
|
||||
option_group = OptionGroup(parser, group['name'])
|
||||
for option in group['options']:
|
||||
option_group.add_option(option.make())
|
||||
return option_group
|
||||
|
||||
class OptionMaker(object):
|
||||
"""Class that stores the args/kwargs that would be used to make an Option,
|
||||
for making them later, and uses deepcopy's to reset state."""
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.args = args
|
||||
self.kwargs = kwargs
|
||||
def make(self):
|
||||
args_copy = copy.deepcopy(self.args)
|
||||
kwargs_copy = copy.deepcopy(self.kwargs)
|
||||
return Option(*args_copy, **kwargs_copy)
|
||||
|
||||
###########
|
||||
# options #
|
||||
###########
|
||||
|
||||
help_ = OptionMaker(
|
||||
'-h', '--help',
|
||||
dest='help',
|
||||
action='help',
|
||||
help='Show help.')
|
||||
|
||||
require_virtualenv = OptionMaker(
|
||||
# Run only if inside a virtualenv, bail if not.
|
||||
'--require-virtualenv', '--require-venv',
|
||||
dest='require_venv',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help=SUPPRESS_HELP)
|
||||
|
||||
verbose = OptionMaker(
|
||||
'-v', '--verbose',
|
||||
dest='verbose',
|
||||
action='count',
|
||||
default=0,
|
||||
help='Give more output. Option is additive, and can be used up to 3 times.')
|
||||
|
||||
version = OptionMaker(
|
||||
'-V', '--version',
|
||||
dest='version',
|
||||
action='store_true',
|
||||
help='Show version and exit.')
|
||||
|
||||
quiet = OptionMaker(
|
||||
'-q', '--quiet',
|
||||
dest='quiet',
|
||||
action='count',
|
||||
default=0,
|
||||
help='Give less output.')
|
||||
|
||||
log = OptionMaker(
|
||||
'--log',
|
||||
dest='log',
|
||||
metavar='path',
|
||||
help='Path to a verbose appending log. This log is inactive by default.')
|
||||
|
||||
log_explicit_levels = OptionMaker(
|
||||
# Writes the log levels explicitely to the log'
|
||||
'--log-explicit-levels',
|
||||
dest='log_explicit_levels',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help=SUPPRESS_HELP)
|
||||
|
||||
log_file = OptionMaker(
|
||||
# The default log file
|
||||
'--log-file', '--local-log',
|
||||
dest='log_file',
|
||||
metavar='path',
|
||||
default=default_log_file,
|
||||
help='Path to a verbose non-appending log, that only logs failures. This log is active by default at %default.')
|
||||
|
||||
no_input = OptionMaker(
|
||||
# Don't ask for input
|
||||
'--no-input',
|
||||
dest='no_input',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help=SUPPRESS_HELP)
|
||||
|
||||
proxy = OptionMaker(
|
||||
'--proxy',
|
||||
dest='proxy',
|
||||
type='str',
|
||||
default='',
|
||||
help="Specify a proxy in the form [user:passwd@]proxy.server:port.")
|
||||
|
||||
timeout = OptionMaker(
|
||||
'--timeout', '--default-timeout',
|
||||
metavar='sec',
|
||||
dest='timeout',
|
||||
type='float',
|
||||
default=15,
|
||||
help='Set the socket timeout (default %default seconds).')
|
||||
|
||||
default_vcs = OptionMaker(
|
||||
# The default version control system for editables, e.g. 'svn'
|
||||
'--default-vcs',
|
||||
dest='default_vcs',
|
||||
type='str',
|
||||
default='',
|
||||
help=SUPPRESS_HELP)
|
||||
|
||||
skip_requirements_regex = OptionMaker(
|
||||
# A regex to be used to skip requirements
|
||||
'--skip-requirements-regex',
|
||||
dest='skip_requirements_regex',
|
||||
type='str',
|
||||
default='',
|
||||
help=SUPPRESS_HELP)
|
||||
|
||||
exists_action = OptionMaker(
|
||||
# Option when path already exist
|
||||
'--exists-action',
|
||||
dest='exists_action',
|
||||
type='choice',
|
||||
choices=['s', 'i', 'w', 'b'],
|
||||
default=[],
|
||||
action='append',
|
||||
metavar='action',
|
||||
help="Default action when a path already exists: "
|
||||
"(s)witch, (i)gnore, (w)ipe, (b)ackup.")
|
||||
|
||||
cert = OptionMaker(
|
||||
'--cert',
|
||||
dest='cert',
|
||||
type='str',
|
||||
default='',
|
||||
metavar='path',
|
||||
help = "Path to alternate CA bundle.")
|
||||
|
||||
index_url = OptionMaker(
|
||||
'-i', '--index-url', '--pypi-url',
|
||||
dest='index_url',
|
||||
metavar='URL',
|
||||
default='https://pypi.python.org/simple/',
|
||||
help='Base URL of Python Package Index (default %default).')
|
||||
|
||||
extra_index_url = OptionMaker(
|
||||
'--extra-index-url',
|
||||
dest='extra_index_urls',
|
||||
metavar='URL',
|
||||
action='append',
|
||||
default=[],
|
||||
help='Extra URLs of package indexes to use in addition to --index-url.')
|
||||
|
||||
no_index = OptionMaker(
|
||||
'--no-index',
|
||||
dest='no_index',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Ignore package index (only looking at --find-links URLs instead).')
|
||||
|
||||
find_links = OptionMaker(
|
||||
'-f', '--find-links',
|
||||
dest='find_links',
|
||||
action='append',
|
||||
default=[],
|
||||
metavar='url',
|
||||
help="If a url or path to an html file, then parse for links to archives. If a local path or file:// url that's a directory, then look for archives in the directory listing.")
|
||||
|
||||
# TODO: Remove after 1.6
|
||||
use_mirrors = OptionMaker(
|
||||
'-M', '--use-mirrors',
|
||||
dest='use_mirrors',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help=SUPPRESS_HELP)
|
||||
|
||||
# TODO: Remove after 1.6
|
||||
mirrors = OptionMaker(
|
||||
'--mirrors',
|
||||
dest='mirrors',
|
||||
metavar='URL',
|
||||
action='append',
|
||||
default=[],
|
||||
help=SUPPRESS_HELP)
|
||||
|
||||
allow_external = OptionMaker(
|
||||
"--allow-external",
|
||||
dest="allow_external",
|
||||
action="append",
|
||||
default=[],
|
||||
metavar="PACKAGE",
|
||||
help="Allow the installation of externally hosted files",
|
||||
)
|
||||
|
||||
allow_all_external = OptionMaker(
|
||||
"--allow-all-external",
|
||||
dest="allow_all_external",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Allow the installation of all externally hosted files",
|
||||
)
|
||||
|
||||
# Remove after 1.7
|
||||
no_allow_external = OptionMaker(
|
||||
"--no-allow-external",
|
||||
dest="allow_all_external",
|
||||
action="store_false",
|
||||
default=False,
|
||||
help=SUPPRESS_HELP,
|
||||
)
|
||||
|
||||
# Remove --allow-insecure after 1.7
|
||||
allow_unsafe = OptionMaker(
|
||||
"--allow-unverified", "--allow-insecure",
|
||||
dest="allow_unverified",
|
||||
action="append",
|
||||
default=[],
|
||||
metavar="PACKAGE",
|
||||
help="Allow the installation of insecure and unverifiable files",
|
||||
)
|
||||
|
||||
# Remove after 1.7
|
||||
no_allow_unsafe = OptionMaker(
|
||||
"--no-allow-insecure",
|
||||
dest="allow_all_insecure",
|
||||
action="store_false",
|
||||
default=False,
|
||||
help=SUPPRESS_HELP
|
||||
)
|
||||
|
||||
# Remove after 1.5
|
||||
process_dependency_links = OptionMaker(
|
||||
"--process-dependency-links",
|
||||
dest="process_dependency_links",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Enable the processing of dependency links.",
|
||||
)
|
||||
|
||||
requirements = OptionMaker(
|
||||
'-r', '--requirement',
|
||||
dest='requirements',
|
||||
action='append',
|
||||
default=[],
|
||||
metavar='file',
|
||||
help='Install from the given requirements file. '
|
||||
'This option can be used multiple times.')
|
||||
|
||||
use_wheel = OptionMaker(
|
||||
'--use-wheel',
|
||||
dest='use_wheel',
|
||||
action='store_true',
|
||||
help=SUPPRESS_HELP,
|
||||
)
|
||||
|
||||
no_use_wheel = OptionMaker(
|
||||
'--no-use-wheel',
|
||||
dest='use_wheel',
|
||||
action='store_false',
|
||||
default=True,
|
||||
help=('Do not Find and prefer wheel archives when searching indexes and '
|
||||
'find-links locations.'),
|
||||
)
|
||||
|
||||
download_cache = OptionMaker(
|
||||
'--download-cache',
|
||||
dest='download_cache',
|
||||
metavar='dir',
|
||||
default=None,
|
||||
help='Cache downloaded packages in <dir>.')
|
||||
|
||||
no_deps = OptionMaker(
|
||||
'--no-deps', '--no-dependencies',
|
||||
dest='ignore_dependencies',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help="Don't install package dependencies.")
|
||||
|
||||
build_dir = OptionMaker(
|
||||
'-b', '--build', '--build-dir', '--build-directory',
|
||||
dest='build_dir',
|
||||
metavar='dir',
|
||||
help='Directory to unpack packages into and build in.',
|
||||
)
|
||||
|
||||
install_options = OptionMaker(
|
||||
'--install-option',
|
||||
dest='install_options',
|
||||
action='append',
|
||||
metavar='options',
|
||||
help="Extra arguments to be supplied to the setup.py install "
|
||||
"command (use like --install-option=\"--install-scripts=/usr/local/bin\"). "
|
||||
"Use multiple --install-option options to pass multiple options to setup.py install. "
|
||||
"If you are using an option with a directory path, be sure to use absolute path.")
|
||||
|
||||
global_options = OptionMaker(
|
||||
'--global-option',
|
||||
dest='global_options',
|
||||
action='append',
|
||||
metavar='options',
|
||||
help="Extra global options to be supplied to the setup.py "
|
||||
"call before the install command.")
|
||||
|
||||
no_clean = OptionMaker(
|
||||
'--no-clean',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help="Don't clean up build directories.")
|
||||
|
||||
|
||||
##########
|
||||
# groups #
|
||||
##########
|
||||
|
||||
general_group = {
|
||||
'name': 'General Options',
|
||||
'options': [
|
||||
help_,
|
||||
require_virtualenv,
|
||||
verbose,
|
||||
version,
|
||||
quiet,
|
||||
log_file,
|
||||
log,
|
||||
log_explicit_levels,
|
||||
no_input,
|
||||
proxy,
|
||||
timeout,
|
||||
default_vcs,
|
||||
skip_requirements_regex,
|
||||
exists_action,
|
||||
cert,
|
||||
]
|
||||
}
|
||||
|
||||
index_group = {
|
||||
'name': 'Package Index Options',
|
||||
'options': [
|
||||
index_url,
|
||||
extra_index_url,
|
||||
no_index,
|
||||
find_links,
|
||||
use_mirrors,
|
||||
mirrors,
|
||||
allow_external,
|
||||
allow_all_external,
|
||||
no_allow_external,
|
||||
allow_unsafe,
|
||||
no_allow_unsafe,
|
||||
process_dependency_links,
|
||||
]
|
||||
}
|
||||
88
lib/python3.4/site-packages/pip/commands/__init__.py
Normal file
88
lib/python3.4/site-packages/pip/commands/__init__.py
Normal file
|
|
@ -0,0 +1,88 @@
|
|||
"""
|
||||
Package containing all pip commands
|
||||
"""
|
||||
|
||||
|
||||
from pip.commands.bundle import BundleCommand
|
||||
from pip.commands.completion import CompletionCommand
|
||||
from pip.commands.freeze import FreezeCommand
|
||||
from pip.commands.help import HelpCommand
|
||||
from pip.commands.list import ListCommand
|
||||
from pip.commands.search import SearchCommand
|
||||
from pip.commands.show import ShowCommand
|
||||
from pip.commands.install import InstallCommand
|
||||
from pip.commands.uninstall import UninstallCommand
|
||||
from pip.commands.unzip import UnzipCommand
|
||||
from pip.commands.zip import ZipCommand
|
||||
from pip.commands.wheel import WheelCommand
|
||||
|
||||
|
||||
commands = {
|
||||
BundleCommand.name: BundleCommand,
|
||||
CompletionCommand.name: CompletionCommand,
|
||||
FreezeCommand.name: FreezeCommand,
|
||||
HelpCommand.name: HelpCommand,
|
||||
SearchCommand.name: SearchCommand,
|
||||
ShowCommand.name: ShowCommand,
|
||||
InstallCommand.name: InstallCommand,
|
||||
UninstallCommand.name: UninstallCommand,
|
||||
UnzipCommand.name: UnzipCommand,
|
||||
ZipCommand.name: ZipCommand,
|
||||
ListCommand.name: ListCommand,
|
||||
WheelCommand.name: WheelCommand,
|
||||
}
|
||||
|
||||
|
||||
commands_order = [
|
||||
InstallCommand,
|
||||
UninstallCommand,
|
||||
FreezeCommand,
|
||||
ListCommand,
|
||||
ShowCommand,
|
||||
SearchCommand,
|
||||
WheelCommand,
|
||||
ZipCommand,
|
||||
UnzipCommand,
|
||||
BundleCommand,
|
||||
HelpCommand,
|
||||
]
|
||||
|
||||
|
||||
def get_summaries(ignore_hidden=True, ordered=True):
|
||||
"""Yields sorted (command name, command summary) tuples."""
|
||||
|
||||
if ordered:
|
||||
cmditems = _sort_commands(commands, commands_order)
|
||||
else:
|
||||
cmditems = commands.items()
|
||||
|
||||
for name, command_class in cmditems:
|
||||
if ignore_hidden and command_class.hidden:
|
||||
continue
|
||||
|
||||
yield (name, command_class.summary)
|
||||
|
||||
|
||||
def get_similar_commands(name):
|
||||
"""Command name auto-correct."""
|
||||
from difflib import get_close_matches
|
||||
|
||||
close_commands = get_close_matches(name, commands.keys())
|
||||
|
||||
if close_commands:
|
||||
guess = close_commands[0]
|
||||
else:
|
||||
guess = False
|
||||
|
||||
return guess
|
||||
|
||||
|
||||
def _sort_commands(cmddict, order):
|
||||
def keyfn(key):
|
||||
try:
|
||||
return order.index(key[1])
|
||||
except ValueError:
|
||||
# unordered items should come last
|
||||
return 0xff
|
||||
|
||||
return sorted(cmddict.items(), key=keyfn)
|
||||
42
lib/python3.4/site-packages/pip/commands/bundle.py
Normal file
42
lib/python3.4/site-packages/pip/commands/bundle.py
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
import textwrap
|
||||
from pip.locations import build_prefix, src_prefix
|
||||
from pip.util import display_path, backup_dir
|
||||
from pip.log import logger
|
||||
from pip.exceptions import InstallationError
|
||||
from pip.commands.install import InstallCommand
|
||||
|
||||
|
||||
class BundleCommand(InstallCommand):
|
||||
"""Create pybundles (archives containing multiple packages)."""
|
||||
name = 'bundle'
|
||||
usage = """
|
||||
%prog [options] <bundle name>.pybundle <package>..."""
|
||||
summary = 'DEPRECATED. Create pybundles.'
|
||||
bundle = True
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
super(BundleCommand, self).__init__(*args, **kw)
|
||||
# bundle uses different default source and build dirs
|
||||
build_opt = self.parser.get_option("--build")
|
||||
build_opt.default = backup_dir(build_prefix, '-bundle')
|
||||
src_opt = self.parser.get_option("--src")
|
||||
src_opt.default = backup_dir(src_prefix, '-bundle')
|
||||
self.parser.set_defaults(**{
|
||||
src_opt.dest: src_opt.default,
|
||||
build_opt.dest: build_opt.default,
|
||||
})
|
||||
|
||||
def run(self, options, args):
|
||||
|
||||
logger.deprecated('1.6', "DEPRECATION: 'pip bundle' and support for installing from *.pybundle files is deprecated. "
|
||||
"See https://github.com/pypa/pip/pull/1046")
|
||||
|
||||
if not args:
|
||||
raise InstallationError('You must give a bundle filename')
|
||||
# We have to get everything when creating a bundle:
|
||||
options.ignore_installed = True
|
||||
logger.notify('Putting temporary build files in %s and source/develop files in %s'
|
||||
% (display_path(options.build_dir), display_path(options.src_dir)))
|
||||
self.bundle_filename = args.pop(0)
|
||||
requirement_set = super(BundleCommand, self).run(options, args)
|
||||
return requirement_set
|
||||
59
lib/python3.4/site-packages/pip/commands/completion.py
Normal file
59
lib/python3.4/site-packages/pip/commands/completion.py
Normal file
|
|
@ -0,0 +1,59 @@
|
|||
import sys
|
||||
from pip.basecommand import Command
|
||||
|
||||
BASE_COMPLETION = """
|
||||
# pip %(shell)s completion start%(script)s# pip %(shell)s completion end
|
||||
"""
|
||||
|
||||
COMPLETION_SCRIPTS = {
|
||||
'bash': """
|
||||
_pip_completion()
|
||||
{
|
||||
COMPREPLY=( $( COMP_WORDS="${COMP_WORDS[*]}" \\
|
||||
COMP_CWORD=$COMP_CWORD \\
|
||||
PIP_AUTO_COMPLETE=1 $1 ) )
|
||||
}
|
||||
complete -o default -F _pip_completion pip
|
||||
""", 'zsh': """
|
||||
function _pip_completion {
|
||||
local words cword
|
||||
read -Ac words
|
||||
read -cn cword
|
||||
reply=( $( COMP_WORDS="$words[*]" \\
|
||||
COMP_CWORD=$(( cword-1 )) \\
|
||||
PIP_AUTO_COMPLETE=1 $words[1] ) )
|
||||
}
|
||||
compctl -K _pip_completion pip
|
||||
"""}
|
||||
|
||||
|
||||
class CompletionCommand(Command):
|
||||
"""A helper command to be used for command completion."""
|
||||
name = 'completion'
|
||||
summary = 'A helper command to be used for command completion'
|
||||
hidden = True
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
super(CompletionCommand, self).__init__(*args, **kw)
|
||||
self.parser.add_option(
|
||||
'--bash', '-b',
|
||||
action='store_const',
|
||||
const='bash',
|
||||
dest='shell',
|
||||
help='Emit completion code for bash')
|
||||
self.parser.add_option(
|
||||
'--zsh', '-z',
|
||||
action='store_const',
|
||||
const='zsh',
|
||||
dest='shell',
|
||||
help='Emit completion code for zsh')
|
||||
|
||||
def run(self, options, args):
|
||||
"""Prints the completion code of the given shell"""
|
||||
shells = COMPLETION_SCRIPTS.keys()
|
||||
shell_options = ['--' + shell for shell in sorted(shells)]
|
||||
if options.shell in shells:
|
||||
script = COMPLETION_SCRIPTS.get(options.shell, '')
|
||||
print(BASE_COMPLETION % {'script': script, 'shell': options.shell})
|
||||
else:
|
||||
sys.stderr.write('ERROR: You must pass %s\n' % ' or '.join(shell_options))
|
||||
114
lib/python3.4/site-packages/pip/commands/freeze.py
Normal file
114
lib/python3.4/site-packages/pip/commands/freeze.py
Normal file
|
|
@ -0,0 +1,114 @@
|
|||
import re
|
||||
import sys
|
||||
import pip
|
||||
|
||||
from pip.req import InstallRequirement
|
||||
from pip.log import logger
|
||||
from pip.basecommand import Command
|
||||
from pip.util import get_installed_distributions
|
||||
import pkg_resources
|
||||
|
||||
|
||||
class FreezeCommand(Command):
|
||||
"""Output installed packages in requirements format."""
|
||||
name = 'freeze'
|
||||
usage = """
|
||||
%prog [options]"""
|
||||
summary = 'Output installed packages in requirements format.'
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
super(FreezeCommand, self).__init__(*args, **kw)
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
'-r', '--requirement',
|
||||
dest='requirement',
|
||||
action='store',
|
||||
default=None,
|
||||
metavar='file',
|
||||
help="Use the order in the given requirements file and it's comments when generating output.")
|
||||
self.cmd_opts.add_option(
|
||||
'-f', '--find-links',
|
||||
dest='find_links',
|
||||
action='append',
|
||||
default=[],
|
||||
metavar='URL',
|
||||
help='URL for finding packages, which will be added to the output.')
|
||||
self.cmd_opts.add_option(
|
||||
'-l', '--local',
|
||||
dest='local',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='If in a virtualenv that has global access, do not output globally-installed packages.')
|
||||
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
def setup_logging(self):
|
||||
logger.move_stdout_to_stderr()
|
||||
|
||||
def run(self, options, args):
|
||||
requirement = options.requirement
|
||||
find_links = options.find_links or []
|
||||
local_only = options.local
|
||||
## FIXME: Obviously this should be settable:
|
||||
find_tags = False
|
||||
skip_match = None
|
||||
|
||||
skip_regex = options.skip_requirements_regex
|
||||
if skip_regex:
|
||||
skip_match = re.compile(skip_regex)
|
||||
|
||||
dependency_links = []
|
||||
|
||||
f = sys.stdout
|
||||
|
||||
for dist in pkg_resources.working_set:
|
||||
if dist.has_metadata('dependency_links.txt'):
|
||||
dependency_links.extend(dist.get_metadata_lines('dependency_links.txt'))
|
||||
for link in find_links:
|
||||
if '#egg=' in link:
|
||||
dependency_links.append(link)
|
||||
for link in find_links:
|
||||
f.write('-f %s\n' % link)
|
||||
installations = {}
|
||||
for dist in get_installed_distributions(local_only=local_only):
|
||||
req = pip.FrozenRequirement.from_dist(dist, dependency_links, find_tags=find_tags)
|
||||
installations[req.name] = req
|
||||
if requirement:
|
||||
req_f = open(requirement)
|
||||
for line in req_f:
|
||||
if not line.strip() or line.strip().startswith('#'):
|
||||
f.write(line)
|
||||
continue
|
||||
if skip_match and skip_match.search(line):
|
||||
f.write(line)
|
||||
continue
|
||||
elif line.startswith('-e') or line.startswith('--editable'):
|
||||
if line.startswith('-e'):
|
||||
line = line[2:].strip()
|
||||
else:
|
||||
line = line[len('--editable'):].strip().lstrip('=')
|
||||
line_req = InstallRequirement.from_editable(line, default_vcs=options.default_vcs)
|
||||
elif (line.startswith('-r') or line.startswith('--requirement')
|
||||
or line.startswith('-Z') or line.startswith('--always-unzip')
|
||||
or line.startswith('-f') or line.startswith('-i')
|
||||
or line.startswith('--extra-index-url')
|
||||
or line.startswith('--find-links')
|
||||
or line.startswith('--index-url')):
|
||||
f.write(line)
|
||||
continue
|
||||
else:
|
||||
line_req = InstallRequirement.from_line(line)
|
||||
if not line_req.name:
|
||||
logger.notify("Skipping line because it's not clear what it would install: %s"
|
||||
% line.strip())
|
||||
logger.notify(" (add #egg=PackageName to the URL to avoid this warning)")
|
||||
continue
|
||||
if line_req.name not in installations:
|
||||
logger.warn("Requirement file contains %s, but that package is not installed"
|
||||
% line.strip())
|
||||
continue
|
||||
f.write(str(installations[line_req.name]))
|
||||
del installations[line_req.name]
|
||||
f.write('## The following requirements were added by pip --freeze:\n')
|
||||
for installation in sorted(installations.values(), key=lambda x: x.name):
|
||||
f.write(str(installation))
|
||||
33
lib/python3.4/site-packages/pip/commands/help.py
Normal file
33
lib/python3.4/site-packages/pip/commands/help.py
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
from pip.basecommand import Command, SUCCESS
|
||||
from pip.exceptions import CommandError
|
||||
|
||||
|
||||
class HelpCommand(Command):
|
||||
"""Show help for commands"""
|
||||
name = 'help'
|
||||
usage = """
|
||||
%prog <command>"""
|
||||
summary = 'Show help for commands.'
|
||||
|
||||
def run(self, options, args):
|
||||
from pip.commands import commands, get_similar_commands
|
||||
|
||||
try:
|
||||
# 'pip help' with no args is handled by pip.__init__.parseopt()
|
||||
cmd_name = args[0] # the command we need help for
|
||||
except IndexError:
|
||||
return SUCCESS
|
||||
|
||||
if cmd_name not in commands:
|
||||
guess = get_similar_commands(cmd_name)
|
||||
|
||||
msg = ['unknown command "%s"' % cmd_name]
|
||||
if guess:
|
||||
msg.append('maybe you meant "%s"' % guess)
|
||||
|
||||
raise CommandError(' - '.join(msg))
|
||||
|
||||
command = commands[cmd_name]()
|
||||
command.parser.print_help()
|
||||
|
||||
return SUCCESS
|
||||
326
lib/python3.4/site-packages/pip/commands/install.py
Normal file
326
lib/python3.4/site-packages/pip/commands/install.py
Normal file
|
|
@ -0,0 +1,326 @@
|
|||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
import shutil
|
||||
from pip.req import InstallRequirement, RequirementSet, parse_requirements
|
||||
from pip.log import logger
|
||||
from pip.locations import (src_prefix, virtualenv_no_global, distutils_scheme,
|
||||
build_prefix)
|
||||
from pip.basecommand import Command
|
||||
from pip.index import PackageFinder
|
||||
from pip.exceptions import InstallationError, CommandError, PreviousBuildDirError
|
||||
from pip import cmdoptions
|
||||
from pip.util import BuildDirectory
|
||||
|
||||
|
||||
class InstallCommand(Command):
|
||||
"""
|
||||
Install packages from:
|
||||
|
||||
- PyPI (and other indexes) using requirement specifiers.
|
||||
- VCS project urls.
|
||||
- Local project directories.
|
||||
- Local or remote source archives.
|
||||
|
||||
pip also supports installing from "requirements files", which provide
|
||||
an easy way to specify a whole environment to be installed.
|
||||
"""
|
||||
name = 'install'
|
||||
|
||||
usage = """
|
||||
%prog [options] <requirement specifier> ...
|
||||
%prog [options] -r <requirements file> ...
|
||||
%prog [options] [-e] <vcs project url> ...
|
||||
%prog [options] [-e] <local project path> ...
|
||||
%prog [options] <archive url/path> ..."""
|
||||
|
||||
summary = 'Install packages.'
|
||||
bundle = False
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
super(InstallCommand, self).__init__(*args, **kw)
|
||||
|
||||
cmd_opts = self.cmd_opts
|
||||
|
||||
cmd_opts.add_option(
|
||||
'-e', '--editable',
|
||||
dest='editables',
|
||||
action='append',
|
||||
default=[],
|
||||
metavar='path/url',
|
||||
help='Install a project in editable mode (i.e. setuptools "develop mode") from a local project path or a VCS url.')
|
||||
|
||||
cmd_opts.add_option(cmdoptions.requirements.make())
|
||||
cmd_opts.add_option(cmdoptions.build_dir.make())
|
||||
|
||||
cmd_opts.add_option(
|
||||
'-t', '--target',
|
||||
dest='target_dir',
|
||||
metavar='dir',
|
||||
default=None,
|
||||
help='Install packages into <dir>.')
|
||||
|
||||
cmd_opts.add_option(
|
||||
'-d', '--download', '--download-dir', '--download-directory',
|
||||
dest='download_dir',
|
||||
metavar='dir',
|
||||
default=None,
|
||||
help="Download packages into <dir> instead of installing them, regardless of what's already installed.")
|
||||
|
||||
cmd_opts.add_option(cmdoptions.download_cache.make())
|
||||
|
||||
cmd_opts.add_option(
|
||||
'--src', '--source', '--source-dir', '--source-directory',
|
||||
dest='src_dir',
|
||||
metavar='dir',
|
||||
default=src_prefix,
|
||||
help='Directory to check out editable projects into. '
|
||||
'The default in a virtualenv is "<venv path>/src". '
|
||||
'The default for global installs is "<current dir>/src".')
|
||||
|
||||
cmd_opts.add_option(
|
||||
'-U', '--upgrade',
|
||||
dest='upgrade',
|
||||
action='store_true',
|
||||
help='Upgrade all packages to the newest available version. '
|
||||
'This process is recursive regardless of whether a dependency is already satisfied.')
|
||||
|
||||
cmd_opts.add_option(
|
||||
'--force-reinstall',
|
||||
dest='force_reinstall',
|
||||
action='store_true',
|
||||
help='When upgrading, reinstall all packages even if they are '
|
||||
'already up-to-date.')
|
||||
|
||||
cmd_opts.add_option(
|
||||
'-I', '--ignore-installed',
|
||||
dest='ignore_installed',
|
||||
action='store_true',
|
||||
help='Ignore the installed packages (reinstalling instead).')
|
||||
|
||||
cmd_opts.add_option(cmdoptions.no_deps.make())
|
||||
|
||||
cmd_opts.add_option(
|
||||
'--no-install',
|
||||
dest='no_install',
|
||||
action='store_true',
|
||||
help="DEPRECATED. Download and unpack all packages, but don't actually install them.")
|
||||
|
||||
cmd_opts.add_option(
|
||||
'--no-download',
|
||||
dest='no_download',
|
||||
action="store_true",
|
||||
help="DEPRECATED. Don't download any packages, just install the ones already downloaded "
|
||||
"(completes an install run with --no-install).")
|
||||
|
||||
cmd_opts.add_option(cmdoptions.install_options.make())
|
||||
cmd_opts.add_option(cmdoptions.global_options.make())
|
||||
|
||||
cmd_opts.add_option(
|
||||
'--user',
|
||||
dest='use_user_site',
|
||||
action='store_true',
|
||||
help='Install using the user scheme.')
|
||||
|
||||
cmd_opts.add_option(
|
||||
'--egg',
|
||||
dest='as_egg',
|
||||
action='store_true',
|
||||
help="Install packages as eggs, not 'flat', like pip normally does. This option is not about installing *from* eggs. (WARNING: Because this option overrides pip's normal install logic, requirements files may not behave as expected.)")
|
||||
|
||||
cmd_opts.add_option(
|
||||
'--root',
|
||||
dest='root_path',
|
||||
metavar='dir',
|
||||
default=None,
|
||||
help="Install everything relative to this alternate root directory.")
|
||||
|
||||
cmd_opts.add_option(
|
||||
"--compile",
|
||||
action="store_true",
|
||||
dest="compile",
|
||||
default=True,
|
||||
help="Compile py files to pyc",
|
||||
)
|
||||
|
||||
cmd_opts.add_option(
|
||||
"--no-compile",
|
||||
action="store_false",
|
||||
dest="compile",
|
||||
help="Do not compile py files to pyc",
|
||||
)
|
||||
|
||||
cmd_opts.add_option(cmdoptions.use_wheel.make())
|
||||
cmd_opts.add_option(cmdoptions.no_use_wheel.make())
|
||||
|
||||
cmd_opts.add_option(
|
||||
'--pre',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help="Include pre-release and development versions. By default, pip only finds stable versions.")
|
||||
|
||||
cmd_opts.add_option(cmdoptions.no_clean.make())
|
||||
|
||||
index_opts = cmdoptions.make_option_group(cmdoptions.index_group, self.parser)
|
||||
|
||||
self.parser.insert_option_group(0, index_opts)
|
||||
self.parser.insert_option_group(0, cmd_opts)
|
||||
|
||||
def _build_package_finder(self, options, index_urls, session):
|
||||
"""
|
||||
Create a package finder appropriate to this install command.
|
||||
This method is meant to be overridden by subclasses, not
|
||||
called directly.
|
||||
"""
|
||||
return PackageFinder(find_links=options.find_links,
|
||||
index_urls=index_urls,
|
||||
use_wheel=options.use_wheel,
|
||||
allow_external=options.allow_external,
|
||||
allow_unverified=options.allow_unverified,
|
||||
allow_all_external=options.allow_all_external,
|
||||
allow_all_prereleases=options.pre,
|
||||
process_dependency_links=
|
||||
options.process_dependency_links,
|
||||
session=session,
|
||||
)
|
||||
|
||||
def run(self, options, args):
|
||||
|
||||
if (
|
||||
options.no_install or
|
||||
options.no_download or
|
||||
options.build_dir or
|
||||
options.no_clean
|
||||
):
|
||||
logger.deprecated('1.7', 'DEPRECATION: --no-install, --no-download, --build, '
|
||||
'and --no-clean are deprecated. See https://github.com/pypa/pip/issues/906.')
|
||||
|
||||
if options.download_dir:
|
||||
options.no_install = True
|
||||
options.ignore_installed = True
|
||||
|
||||
# If we have --no-install or --no-download and no --build we use the
|
||||
# legacy static build dir
|
||||
if (options.build_dir is None
|
||||
and (options.no_install or options.no_download)):
|
||||
options.build_dir = build_prefix
|
||||
|
||||
if options.build_dir:
|
||||
options.build_dir = os.path.abspath(options.build_dir)
|
||||
|
||||
options.src_dir = os.path.abspath(options.src_dir)
|
||||
install_options = options.install_options or []
|
||||
if options.use_user_site:
|
||||
if virtualenv_no_global():
|
||||
raise InstallationError("Can not perform a '--user' install. User site-packages are not visible in this virtualenv.")
|
||||
install_options.append('--user')
|
||||
|
||||
temp_target_dir = None
|
||||
if options.target_dir:
|
||||
options.ignore_installed = True
|
||||
temp_target_dir = tempfile.mkdtemp()
|
||||
options.target_dir = os.path.abspath(options.target_dir)
|
||||
if os.path.exists(options.target_dir) and not os.path.isdir(options.target_dir):
|
||||
raise CommandError("Target path exists but is not a directory, will not continue.")
|
||||
install_options.append('--home=' + temp_target_dir)
|
||||
|
||||
global_options = options.global_options or []
|
||||
index_urls = [options.index_url] + options.extra_index_urls
|
||||
if options.no_index:
|
||||
logger.notify('Ignoring indexes: %s' % ','.join(index_urls))
|
||||
index_urls = []
|
||||
|
||||
if options.use_mirrors:
|
||||
logger.deprecated("1.7",
|
||||
"--use-mirrors has been deprecated and will be removed"
|
||||
" in the future. Explicit uses of --index-url and/or "
|
||||
"--extra-index-url is suggested.")
|
||||
|
||||
if options.mirrors:
|
||||
logger.deprecated("1.7",
|
||||
"--mirrors has been deprecated and will be removed in "
|
||||
" the future. Explicit uses of --index-url and/or "
|
||||
"--extra-index-url is suggested.")
|
||||
index_urls += options.mirrors
|
||||
|
||||
session = self._build_session(options)
|
||||
|
||||
finder = self._build_package_finder(options, index_urls, session)
|
||||
|
||||
build_delete = (not (options.no_clean or options.build_dir))
|
||||
with BuildDirectory(options.build_dir, delete=build_delete) as build_dir:
|
||||
requirement_set = RequirementSet(
|
||||
build_dir=build_dir,
|
||||
src_dir=options.src_dir,
|
||||
download_dir=options.download_dir,
|
||||
download_cache=options.download_cache,
|
||||
upgrade=options.upgrade,
|
||||
as_egg=options.as_egg,
|
||||
ignore_installed=options.ignore_installed,
|
||||
ignore_dependencies=options.ignore_dependencies,
|
||||
force_reinstall=options.force_reinstall,
|
||||
use_user_site=options.use_user_site,
|
||||
target_dir=temp_target_dir,
|
||||
session=session,
|
||||
pycompile=options.compile,
|
||||
)
|
||||
for name in args:
|
||||
requirement_set.add_requirement(
|
||||
InstallRequirement.from_line(name, None))
|
||||
for name in options.editables:
|
||||
requirement_set.add_requirement(
|
||||
InstallRequirement.from_editable(name, default_vcs=options.default_vcs))
|
||||
for filename in options.requirements:
|
||||
for req in parse_requirements(filename, finder=finder, options=options, session=session):
|
||||
requirement_set.add_requirement(req)
|
||||
if not requirement_set.has_requirements:
|
||||
opts = {'name': self.name}
|
||||
if options.find_links:
|
||||
msg = ('You must give at least one requirement to %(name)s '
|
||||
'(maybe you meant "pip %(name)s %(links)s"?)' %
|
||||
dict(opts, links=' '.join(options.find_links)))
|
||||
else:
|
||||
msg = ('You must give at least one requirement '
|
||||
'to %(name)s (see "pip help %(name)s")' % opts)
|
||||
logger.warn(msg)
|
||||
return
|
||||
|
||||
try:
|
||||
if not options.no_download:
|
||||
requirement_set.prepare_files(finder, force_root_egg_info=self.bundle, bundle=self.bundle)
|
||||
else:
|
||||
requirement_set.locate_files()
|
||||
|
||||
if not options.no_install and not self.bundle:
|
||||
requirement_set.install(install_options, global_options, root=options.root_path)
|
||||
installed = ' '.join([req.name for req in
|
||||
requirement_set.successfully_installed])
|
||||
if installed:
|
||||
logger.notify('Successfully installed %s' % installed)
|
||||
elif not self.bundle:
|
||||
downloaded = ' '.join([req.name for req in
|
||||
requirement_set.successfully_downloaded])
|
||||
if downloaded:
|
||||
logger.notify('Successfully downloaded %s' % downloaded)
|
||||
elif self.bundle:
|
||||
requirement_set.create_bundle(self.bundle_filename)
|
||||
logger.notify('Created bundle in %s' % self.bundle_filename)
|
||||
except PreviousBuildDirError:
|
||||
options.no_clean = True
|
||||
raise
|
||||
finally:
|
||||
# Clean up
|
||||
if (not options.no_clean) and ((not options.no_install) or options.download_dir):
|
||||
requirement_set.cleanup_files(bundle=self.bundle)
|
||||
|
||||
if options.target_dir:
|
||||
if not os.path.exists(options.target_dir):
|
||||
os.makedirs(options.target_dir)
|
||||
lib_dir = distutils_scheme('', home=temp_target_dir)['purelib']
|
||||
for item in os.listdir(lib_dir):
|
||||
shutil.move(
|
||||
os.path.join(lib_dir, item),
|
||||
os.path.join(options.target_dir, item)
|
||||
)
|
||||
shutil.rmtree(temp_target_dir)
|
||||
return requirement_set
|
||||
162
lib/python3.4/site-packages/pip/commands/list.py
Normal file
162
lib/python3.4/site-packages/pip/commands/list.py
Normal file
|
|
@ -0,0 +1,162 @@
|
|||
from pip.basecommand import Command
|
||||
from pip.exceptions import DistributionNotFound, BestVersionAlreadyInstalled
|
||||
from pip.index import PackageFinder
|
||||
from pip.log import logger
|
||||
from pip.req import InstallRequirement
|
||||
from pip.util import get_installed_distributions, dist_is_editable
|
||||
from pip.cmdoptions import make_option_group, index_group
|
||||
|
||||
|
||||
class ListCommand(Command):
|
||||
"""List installed packages, including editables."""
|
||||
name = 'list'
|
||||
usage = """
|
||||
%prog [options]"""
|
||||
summary = 'List installed packages.'
|
||||
|
||||
# distributions to skip (python itself is reported by pkg_resources.working_set)
|
||||
skip = ['python']
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
super(ListCommand, self).__init__(*args, **kw)
|
||||
|
||||
cmd_opts = self.cmd_opts
|
||||
|
||||
cmd_opts.add_option(
|
||||
'-o', '--outdated',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='List outdated packages (excluding editables)')
|
||||
cmd_opts.add_option(
|
||||
'-u', '--uptodate',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='List uptodate packages (excluding editables)')
|
||||
cmd_opts.add_option(
|
||||
'-e', '--editable',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='List editable projects.')
|
||||
cmd_opts.add_option(
|
||||
'-l', '--local',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='If in a virtualenv that has global access, do not list globally-installed packages.')
|
||||
|
||||
cmd_opts.add_option(
|
||||
'--pre',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help="Include pre-release and development versions. By default, pip only finds stable versions.")
|
||||
|
||||
index_opts = make_option_group(index_group, self.parser)
|
||||
|
||||
self.parser.insert_option_group(0, index_opts)
|
||||
self.parser.insert_option_group(0, cmd_opts)
|
||||
|
||||
def _build_package_finder(self, options, index_urls, session):
|
||||
"""
|
||||
Create a package finder appropriate to this list command.
|
||||
"""
|
||||
return PackageFinder(find_links=options.find_links,
|
||||
index_urls=index_urls,
|
||||
allow_external=options.allow_external,
|
||||
allow_unverified=options.allow_unverified,
|
||||
allow_all_external=options.allow_all_external,
|
||||
allow_all_prereleases=options.pre,
|
||||
process_dependency_links=
|
||||
options.process_dependency_links,
|
||||
session=session,
|
||||
)
|
||||
|
||||
def run(self, options, args):
|
||||
if options.outdated:
|
||||
self.run_outdated(options)
|
||||
elif options.uptodate:
|
||||
self.run_uptodate(options)
|
||||
elif options.editable:
|
||||
self.run_editables(options)
|
||||
else:
|
||||
self.run_listing(options)
|
||||
|
||||
def run_outdated(self, options):
|
||||
for dist, remote_version_raw, remote_version_parsed in self.find_packages_latests_versions(options):
|
||||
if remote_version_parsed > dist.parsed_version:
|
||||
logger.notify('%s (Current: %s Latest: %s)' % (dist.project_name,
|
||||
dist.version, remote_version_raw))
|
||||
|
||||
def find_packages_latests_versions(self, options):
|
||||
index_urls = [options.index_url] + options.extra_index_urls
|
||||
if options.no_index:
|
||||
logger.notify('Ignoring indexes: %s' % ','.join(index_urls))
|
||||
index_urls = []
|
||||
|
||||
if options.use_mirrors:
|
||||
logger.deprecated("1.7",
|
||||
"--use-mirrors has been deprecated and will be removed"
|
||||
" in the future. Explicit uses of --index-url and/or "
|
||||
"--extra-index-url is suggested.")
|
||||
|
||||
if options.mirrors:
|
||||
logger.deprecated("1.7",
|
||||
"--mirrors has been deprecated and will be removed in "
|
||||
" the future. Explicit uses of --index-url and/or "
|
||||
"--extra-index-url is suggested.")
|
||||
index_urls += options.mirrors
|
||||
|
||||
dependency_links = []
|
||||
for dist in get_installed_distributions(local_only=options.local, skip=self.skip):
|
||||
if dist.has_metadata('dependency_links.txt'):
|
||||
dependency_links.extend(
|
||||
dist.get_metadata_lines('dependency_links.txt'),
|
||||
)
|
||||
|
||||
session = self._build_session(options)
|
||||
|
||||
finder = self._build_package_finder(options, index_urls, session)
|
||||
finder.add_dependency_links(dependency_links)
|
||||
|
||||
installed_packages = get_installed_distributions(local_only=options.local, include_editables=False, skip=self.skip)
|
||||
for dist in installed_packages:
|
||||
req = InstallRequirement.from_line(dist.key, None)
|
||||
try:
|
||||
link = finder.find_requirement(req, True)
|
||||
|
||||
# If link is None, means installed version is most up-to-date
|
||||
if link is None:
|
||||
continue
|
||||
except DistributionNotFound:
|
||||
continue
|
||||
except BestVersionAlreadyInstalled:
|
||||
remote_version = req.installed_version
|
||||
else:
|
||||
# It might be a good idea that link or finder had a public method
|
||||
# that returned version
|
||||
remote_version = finder._link_package_versions(link, req.name)[0]
|
||||
remote_version_raw = remote_version[2]
|
||||
remote_version_parsed = remote_version[0]
|
||||
yield dist, remote_version_raw, remote_version_parsed
|
||||
|
||||
def run_listing(self, options):
|
||||
installed_packages = get_installed_distributions(local_only=options.local, skip=self.skip)
|
||||
self.output_package_listing(installed_packages)
|
||||
|
||||
def run_editables(self, options):
|
||||
installed_packages = get_installed_distributions(local_only=options.local, editables_only=True)
|
||||
self.output_package_listing(installed_packages)
|
||||
|
||||
def output_package_listing(self, installed_packages):
|
||||
installed_packages = sorted(installed_packages, key=lambda dist: dist.project_name.lower())
|
||||
for dist in installed_packages:
|
||||
if dist_is_editable(dist):
|
||||
line = '%s (%s, %s)' % (dist.project_name, dist.version, dist.location)
|
||||
else:
|
||||
line = '%s (%s)' % (dist.project_name, dist.version)
|
||||
logger.notify(line)
|
||||
|
||||
def run_uptodate(self, options):
|
||||
uptodate = []
|
||||
for dist, remote_version_raw, remote_version_parsed in self.find_packages_latests_versions(options):
|
||||
if dist.parsed_version == remote_version_parsed:
|
||||
uptodate.append(dist)
|
||||
self.output_package_listing(uptodate)
|
||||
132
lib/python3.4/site-packages/pip/commands/search.py
Normal file
132
lib/python3.4/site-packages/pip/commands/search.py
Normal file
|
|
@ -0,0 +1,132 @@
|
|||
import sys
|
||||
import textwrap
|
||||
|
||||
import pip.download
|
||||
|
||||
from pip.basecommand import Command, SUCCESS
|
||||
from pip.util import get_terminal_size
|
||||
from pip.log import logger
|
||||
from pip.backwardcompat import xmlrpclib, reduce, cmp
|
||||
from pip.exceptions import CommandError
|
||||
from pip.status_codes import NO_MATCHES_FOUND
|
||||
import pkg_resources
|
||||
from distutils.version import StrictVersion, LooseVersion
|
||||
|
||||
|
||||
class SearchCommand(Command):
|
||||
"""Search for PyPI packages whose name or summary contains <query>."""
|
||||
name = 'search'
|
||||
usage = """
|
||||
%prog [options] <query>"""
|
||||
summary = 'Search PyPI for packages.'
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
super(SearchCommand, self).__init__(*args, **kw)
|
||||
self.cmd_opts.add_option(
|
||||
'--index',
|
||||
dest='index',
|
||||
metavar='URL',
|
||||
default='https://pypi.python.org/pypi',
|
||||
help='Base URL of Python Package Index (default %default)')
|
||||
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
def run(self, options, args):
|
||||
if not args:
|
||||
raise CommandError('Missing required argument (search query).')
|
||||
query = args
|
||||
index_url = options.index
|
||||
|
||||
pypi_hits = self.search(query, index_url)
|
||||
hits = transform_hits(pypi_hits)
|
||||
|
||||
terminal_width = None
|
||||
if sys.stdout.isatty():
|
||||
terminal_width = get_terminal_size()[0]
|
||||
|
||||
print_results(hits, terminal_width=terminal_width)
|
||||
if pypi_hits:
|
||||
return SUCCESS
|
||||
return NO_MATCHES_FOUND
|
||||
|
||||
def search(self, query, index_url):
|
||||
pypi = xmlrpclib.ServerProxy(index_url)
|
||||
hits = pypi.search({'name': query, 'summary': query}, 'or')
|
||||
return hits
|
||||
|
||||
|
||||
def transform_hits(hits):
|
||||
"""
|
||||
The list from pypi is really a list of versions. We want a list of
|
||||
packages with the list of versions stored inline. This converts the
|
||||
list from pypi into one we can use.
|
||||
"""
|
||||
packages = {}
|
||||
for hit in hits:
|
||||
name = hit['name']
|
||||
summary = hit['summary']
|
||||
version = hit['version']
|
||||
score = hit['_pypi_ordering']
|
||||
if score is None:
|
||||
score = 0
|
||||
|
||||
if name not in packages.keys():
|
||||
packages[name] = {'name': name, 'summary': summary, 'versions': [version], 'score': score}
|
||||
else:
|
||||
packages[name]['versions'].append(version)
|
||||
|
||||
# if this is the highest version, replace summary and score
|
||||
if version == highest_version(packages[name]['versions']):
|
||||
packages[name]['summary'] = summary
|
||||
packages[name]['score'] = score
|
||||
|
||||
# each record has a unique name now, so we will convert the dict into a list sorted by score
|
||||
package_list = sorted(packages.values(), key=lambda x: x['score'], reverse=True)
|
||||
return package_list
|
||||
|
||||
|
||||
def print_results(hits, name_column_width=25, terminal_width=None):
|
||||
installed_packages = [p.project_name for p in pkg_resources.working_set]
|
||||
for hit in hits:
|
||||
name = hit['name']
|
||||
summary = hit['summary'] or ''
|
||||
if terminal_width is not None:
|
||||
# wrap and indent summary to fit terminal
|
||||
summary = textwrap.wrap(summary, terminal_width - name_column_width - 5)
|
||||
summary = ('\n' + ' ' * (name_column_width + 3)).join(summary)
|
||||
line = '%s - %s' % (name.ljust(name_column_width), summary)
|
||||
try:
|
||||
logger.notify(line)
|
||||
if name in installed_packages:
|
||||
dist = pkg_resources.get_distribution(name)
|
||||
logger.indent += 2
|
||||
try:
|
||||
latest = highest_version(hit['versions'])
|
||||
if dist.version == latest:
|
||||
logger.notify('INSTALLED: %s (latest)' % dist.version)
|
||||
else:
|
||||
logger.notify('INSTALLED: %s' % dist.version)
|
||||
logger.notify('LATEST: %s' % latest)
|
||||
finally:
|
||||
logger.indent -= 2
|
||||
except UnicodeEncodeError:
|
||||
pass
|
||||
|
||||
|
||||
def compare_versions(version1, version2):
|
||||
try:
|
||||
return cmp(StrictVersion(version1), StrictVersion(version2))
|
||||
# in case of abnormal version number, fall back to LooseVersion
|
||||
except ValueError:
|
||||
pass
|
||||
try:
|
||||
return cmp(LooseVersion(version1), LooseVersion(version2))
|
||||
except TypeError:
|
||||
# certain LooseVersion comparions raise due to unorderable types,
|
||||
# fallback to string comparison
|
||||
return cmp([str(v) for v in LooseVersion(version1).version],
|
||||
[str(v) for v in LooseVersion(version2).version])
|
||||
|
||||
|
||||
def highest_version(versions):
|
||||
return reduce((lambda v1, v2: compare_versions(v1, v2) == 1 and v1 or v2), versions)
|
||||
80
lib/python3.4/site-packages/pip/commands/show.py
Normal file
80
lib/python3.4/site-packages/pip/commands/show.py
Normal file
|
|
@ -0,0 +1,80 @@
|
|||
import os
|
||||
|
||||
from pip.basecommand import Command
|
||||
from pip.log import logger
|
||||
import pkg_resources
|
||||
|
||||
|
||||
class ShowCommand(Command):
|
||||
"""Show information about one or more installed packages."""
|
||||
name = 'show'
|
||||
usage = """
|
||||
%prog [options] <package> ..."""
|
||||
summary = 'Show information about installed packages.'
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
super(ShowCommand, self).__init__(*args, **kw)
|
||||
self.cmd_opts.add_option(
|
||||
'-f', '--files',
|
||||
dest='files',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Show the full list of installed files for each package.')
|
||||
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
def run(self, options, args):
|
||||
if not args:
|
||||
logger.warn('ERROR: Please provide a package name or names.')
|
||||
return
|
||||
query = args
|
||||
|
||||
results = search_packages_info(query)
|
||||
print_results(results, options.files)
|
||||
|
||||
|
||||
def search_packages_info(query):
|
||||
"""
|
||||
Gather details from installed distributions. Print distribution name,
|
||||
version, location, and installed files. Installed files requires a
|
||||
pip generated 'installed-files.txt' in the distributions '.egg-info'
|
||||
directory.
|
||||
"""
|
||||
installed_packages = dict(
|
||||
[(p.project_name.lower(), p) for p in pkg_resources.working_set])
|
||||
for name in query:
|
||||
normalized_name = name.lower()
|
||||
if normalized_name in installed_packages:
|
||||
dist = installed_packages[normalized_name]
|
||||
package = {
|
||||
'name': dist.project_name,
|
||||
'version': dist.version,
|
||||
'location': dist.location,
|
||||
'requires': [dep.project_name for dep in dist.requires()],
|
||||
}
|
||||
filelist = os.path.join(
|
||||
dist.location,
|
||||
dist.egg_name() + '.egg-info',
|
||||
'installed-files.txt')
|
||||
if os.path.isfile(filelist):
|
||||
package['files'] = filelist
|
||||
yield package
|
||||
|
||||
|
||||
def print_results(distributions, list_all_files):
|
||||
"""
|
||||
Print the informations from installed distributions found.
|
||||
"""
|
||||
for dist in distributions:
|
||||
logger.notify("---")
|
||||
logger.notify("Name: %s" % dist['name'])
|
||||
logger.notify("Version: %s" % dist['version'])
|
||||
logger.notify("Location: %s" % dist['location'])
|
||||
logger.notify("Requires: %s" % ', '.join(dist['requires']))
|
||||
if list_all_files:
|
||||
logger.notify("Files:")
|
||||
if 'files' in dist:
|
||||
for line in open(dist['files']):
|
||||
logger.notify(" %s" % line.strip())
|
||||
else:
|
||||
logger.notify("Cannot locate installed-files.txt")
|
||||
59
lib/python3.4/site-packages/pip/commands/uninstall.py
Normal file
59
lib/python3.4/site-packages/pip/commands/uninstall.py
Normal file
|
|
@ -0,0 +1,59 @@
|
|||
from pip.req import InstallRequirement, RequirementSet, parse_requirements
|
||||
from pip.basecommand import Command
|
||||
from pip.exceptions import InstallationError
|
||||
|
||||
|
||||
class UninstallCommand(Command):
|
||||
"""
|
||||
Uninstall packages.
|
||||
|
||||
pip is able to uninstall most installed packages. Known exceptions are:
|
||||
|
||||
- Pure distutils packages installed with ``python setup.py install``, which
|
||||
leave behind no metadata to determine what files were installed.
|
||||
- Script wrappers installed by ``python setup.py develop``.
|
||||
"""
|
||||
name = 'uninstall'
|
||||
usage = """
|
||||
%prog [options] <package> ...
|
||||
%prog [options] -r <requirements file> ..."""
|
||||
summary = 'Uninstall packages.'
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
super(UninstallCommand, self).__init__(*args, **kw)
|
||||
self.cmd_opts.add_option(
|
||||
'-r', '--requirement',
|
||||
dest='requirements',
|
||||
action='append',
|
||||
default=[],
|
||||
metavar='file',
|
||||
help='Uninstall all the packages listed in the given requirements file. '
|
||||
'This option can be used multiple times.')
|
||||
self.cmd_opts.add_option(
|
||||
'-y', '--yes',
|
||||
dest='yes',
|
||||
action='store_true',
|
||||
help="Don't ask for confirmation of uninstall deletions.")
|
||||
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
def run(self, options, args):
|
||||
session = self._build_session(options)
|
||||
|
||||
requirement_set = RequirementSet(
|
||||
build_dir=None,
|
||||
src_dir=None,
|
||||
download_dir=None,
|
||||
session=session,
|
||||
)
|
||||
for name in args:
|
||||
requirement_set.add_requirement(
|
||||
InstallRequirement.from_line(name))
|
||||
for filename in options.requirements:
|
||||
for req in parse_requirements(filename,
|
||||
options=options, session=session):
|
||||
requirement_set.add_requirement(req)
|
||||
if not requirement_set.has_requirements:
|
||||
raise InstallationError('You must give at least one requirement '
|
||||
'to %(name)s (see "pip help %(name)s")' % dict(name=self.name))
|
||||
requirement_set.uninstall(auto_confirm=options.yes)
|
||||
7
lib/python3.4/site-packages/pip/commands/unzip.py
Normal file
7
lib/python3.4/site-packages/pip/commands/unzip.py
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
from pip.commands.zip import ZipCommand
|
||||
|
||||
|
||||
class UnzipCommand(ZipCommand):
|
||||
"""Unzip individual packages."""
|
||||
name = 'unzip'
|
||||
summary = 'DEPRECATED. Unzip individual packages.'
|
||||
203
lib/python3.4/site-packages/pip/commands/wheel.py
Normal file
203
lib/python3.4/site-packages/pip/commands/wheel.py
Normal file
|
|
@ -0,0 +1,203 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
import sys
|
||||
from pip.basecommand import Command
|
||||
from pip.index import PackageFinder
|
||||
from pip.log import logger
|
||||
from pip.exceptions import CommandError, PreviousBuildDirError
|
||||
from pip.req import InstallRequirement, RequirementSet, parse_requirements
|
||||
from pip.util import BuildDirectory, normalize_path
|
||||
from pip.wheel import WheelBuilder
|
||||
from pip import cmdoptions
|
||||
|
||||
DEFAULT_WHEEL_DIR = os.path.join(normalize_path(os.curdir), 'wheelhouse')
|
||||
|
||||
class WheelCommand(Command):
|
||||
"""
|
||||
Build Wheel archives for your requirements and dependencies.
|
||||
|
||||
Wheel is a built-package format, and offers the advantage of not recompiling your software during every install.
|
||||
For more details, see the wheel docs: http://wheel.readthedocs.org/en/latest.
|
||||
|
||||
Requirements: setuptools>=0.8, and wheel.
|
||||
|
||||
'pip wheel' uses the bdist_wheel setuptools extension from the wheel package to build individual wheels.
|
||||
|
||||
"""
|
||||
|
||||
name = 'wheel'
|
||||
usage = """
|
||||
%prog [options] <requirement specifier> ...
|
||||
%prog [options] -r <requirements file> ...
|
||||
%prog [options] <vcs project url> ...
|
||||
%prog [options] <local project path> ...
|
||||
%prog [options] <archive url/path> ..."""
|
||||
|
||||
summary = 'Build wheels from your requirements.'
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
super(WheelCommand, self).__init__(*args, **kw)
|
||||
|
||||
cmd_opts = self.cmd_opts
|
||||
|
||||
cmd_opts.add_option(
|
||||
'-w', '--wheel-dir',
|
||||
dest='wheel_dir',
|
||||
metavar='dir',
|
||||
default=DEFAULT_WHEEL_DIR,
|
||||
help="Build wheels into <dir>, where the default is '<cwd>/wheelhouse'.")
|
||||
cmd_opts.add_option(cmdoptions.use_wheel.make())
|
||||
cmd_opts.add_option(cmdoptions.no_use_wheel.make())
|
||||
cmd_opts.add_option(
|
||||
'--build-option',
|
||||
dest='build_options',
|
||||
metavar='options',
|
||||
action='append',
|
||||
help="Extra arguments to be supplied to 'setup.py bdist_wheel'.")
|
||||
cmd_opts.add_option(cmdoptions.requirements.make())
|
||||
cmd_opts.add_option(cmdoptions.download_cache.make())
|
||||
cmd_opts.add_option(cmdoptions.no_deps.make())
|
||||
cmd_opts.add_option(cmdoptions.build_dir.make())
|
||||
|
||||
cmd_opts.add_option(
|
||||
'--global-option',
|
||||
dest='global_options',
|
||||
action='append',
|
||||
metavar='options',
|
||||
help="Extra global options to be supplied to the setup.py "
|
||||
"call before the 'bdist_wheel' command.")
|
||||
|
||||
cmd_opts.add_option(
|
||||
'--pre',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help="Include pre-release and development versions. By default, pip only finds stable versions.")
|
||||
|
||||
cmd_opts.add_option(cmdoptions.no_clean.make())
|
||||
|
||||
index_opts = cmdoptions.make_option_group(cmdoptions.index_group, self.parser)
|
||||
|
||||
self.parser.insert_option_group(0, index_opts)
|
||||
self.parser.insert_option_group(0, cmd_opts)
|
||||
|
||||
def run(self, options, args):
|
||||
|
||||
# confirm requirements
|
||||
try:
|
||||
import wheel.bdist_wheel
|
||||
except ImportError:
|
||||
if sys.version_info < (3,):
|
||||
debian_package = 'python-wheel'
|
||||
else:
|
||||
debian_package = 'python3-wheel'
|
||||
raise CommandError("'pip wheel' requires the 'wheel' package. To fix this, run: sudo apt-get install %s" % debian_package)
|
||||
|
||||
try:
|
||||
import pkg_resources
|
||||
except ImportError:
|
||||
raise CommandError(
|
||||
"'pip wheel' requires setuptools >= 0.8 for dist-info support."
|
||||
" To fix this, run: pip install --upgrade setuptools"
|
||||
)
|
||||
else:
|
||||
if not hasattr(pkg_resources, 'DistInfoDistribution'):
|
||||
raise CommandError(
|
||||
"'pip wheel' requires setuptools >= 0.8 for dist-info "
|
||||
"support. To fix this, run: pip install --upgrade "
|
||||
"setuptools"
|
||||
)
|
||||
|
||||
index_urls = [options.index_url] + options.extra_index_urls
|
||||
if options.no_index:
|
||||
logger.notify('Ignoring indexes: %s' % ','.join(index_urls))
|
||||
index_urls = []
|
||||
|
||||
if options.use_mirrors:
|
||||
logger.deprecated("1.7",
|
||||
"--use-mirrors has been deprecated and will be removed"
|
||||
" in the future. Explicit uses of --index-url and/or "
|
||||
"--extra-index-url is suggested.")
|
||||
|
||||
if options.mirrors:
|
||||
logger.deprecated("1.7",
|
||||
"--mirrors has been deprecated and will be removed in "
|
||||
" the future. Explicit uses of --index-url and/or "
|
||||
"--extra-index-url is suggested.")
|
||||
index_urls += options.mirrors
|
||||
|
||||
if options.build_dir:
|
||||
options.build_dir = os.path.abspath(options.build_dir)
|
||||
|
||||
session = self._build_session(options)
|
||||
|
||||
finder = PackageFinder(find_links=options.find_links,
|
||||
index_urls=index_urls,
|
||||
use_wheel=options.use_wheel,
|
||||
allow_external=options.allow_external,
|
||||
allow_unverified=options.allow_unverified,
|
||||
allow_all_external=options.allow_all_external,
|
||||
allow_all_prereleases=options.pre,
|
||||
process_dependency_links=
|
||||
options.process_dependency_links,
|
||||
session=session,
|
||||
)
|
||||
|
||||
build_delete = (not (options.no_clean or options.build_dir))
|
||||
with BuildDirectory(options.build_dir, delete=build_delete) as build_dir:
|
||||
requirement_set = RequirementSet(
|
||||
build_dir=build_dir,
|
||||
src_dir=None,
|
||||
download_dir=None,
|
||||
download_cache=options.download_cache,
|
||||
ignore_dependencies=options.ignore_dependencies,
|
||||
ignore_installed=True,
|
||||
session=session,
|
||||
wheel_download_dir=options.wheel_dir
|
||||
)
|
||||
|
||||
# make the wheelhouse
|
||||
if not os.path.exists(options.wheel_dir):
|
||||
os.makedirs(options.wheel_dir)
|
||||
|
||||
#parse args and/or requirements files
|
||||
for name in args:
|
||||
requirement_set.add_requirement(
|
||||
InstallRequirement.from_line(name, None))
|
||||
|
||||
for filename in options.requirements:
|
||||
for req in parse_requirements(
|
||||
filename,
|
||||
finder=finder,
|
||||
options=options,
|
||||
session=session):
|
||||
if req.editable:
|
||||
logger.notify("ignoring %s" % req.url)
|
||||
continue
|
||||
requirement_set.add_requirement(req)
|
||||
|
||||
#fail if no requirements
|
||||
if not requirement_set.has_requirements:
|
||||
opts = {'name': self.name}
|
||||
msg = ('You must give at least one requirement '
|
||||
'to %(name)s (see "pip help %(name)s")' % opts)
|
||||
logger.error(msg)
|
||||
return
|
||||
|
||||
try:
|
||||
#build wheels
|
||||
wb = WheelBuilder(
|
||||
requirement_set,
|
||||
finder,
|
||||
options.wheel_dir,
|
||||
build_options = options.build_options or [],
|
||||
global_options = options.global_options or []
|
||||
)
|
||||
wb.build()
|
||||
except PreviousBuildDirError:
|
||||
options.no_clean = True
|
||||
raise
|
||||
finally:
|
||||
if not options.no_clean:
|
||||
requirement_set.cleanup_files()
|
||||
351
lib/python3.4/site-packages/pip/commands/zip.py
Normal file
351
lib/python3.4/site-packages/pip/commands/zip.py
Normal file
|
|
@ -0,0 +1,351 @@
|
|||
import sys
|
||||
import re
|
||||
import fnmatch
|
||||
import os
|
||||
import shutil
|
||||
import zipfile
|
||||
from pip.util import display_path, backup_dir, rmtree
|
||||
from pip.log import logger
|
||||
from pip.exceptions import InstallationError
|
||||
from pip.basecommand import Command
|
||||
|
||||
|
||||
class ZipCommand(Command):
|
||||
"""Zip individual packages."""
|
||||
name = 'zip'
|
||||
usage = """
|
||||
%prog [options] <package> ..."""
|
||||
summary = 'DEPRECATED. Zip individual packages.'
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
super(ZipCommand, self).__init__(*args, **kw)
|
||||
if self.name == 'zip':
|
||||
self.cmd_opts.add_option(
|
||||
'--unzip',
|
||||
action='store_true',
|
||||
dest='unzip',
|
||||
help='Unzip (rather than zip) a package.')
|
||||
else:
|
||||
self.cmd_opts.add_option(
|
||||
'--zip',
|
||||
action='store_false',
|
||||
dest='unzip',
|
||||
default=True,
|
||||
help='Zip (rather than unzip) a package.')
|
||||
self.cmd_opts.add_option(
|
||||
'--no-pyc',
|
||||
action='store_true',
|
||||
dest='no_pyc',
|
||||
help='Do not include .pyc files in zip files (useful on Google App Engine).')
|
||||
self.cmd_opts.add_option(
|
||||
'-l', '--list',
|
||||
action='store_true',
|
||||
dest='list',
|
||||
help='List the packages available, and their zip status.')
|
||||
self.cmd_opts.add_option(
|
||||
'--sort-files',
|
||||
action='store_true',
|
||||
dest='sort_files',
|
||||
help='With --list, sort packages according to how many files they contain.')
|
||||
self.cmd_opts.add_option(
|
||||
'--path',
|
||||
action='append',
|
||||
dest='paths',
|
||||
help='Restrict operations to the given paths (may include wildcards).')
|
||||
self.cmd_opts.add_option(
|
||||
'-n', '--simulate',
|
||||
action='store_true',
|
||||
help='Do not actually perform the zip/unzip operation.')
|
||||
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
def paths(self):
|
||||
"""All the entries of sys.path, possibly restricted by --path"""
|
||||
if not self.select_paths:
|
||||
return sys.path
|
||||
result = []
|
||||
match_any = set()
|
||||
for path in sys.path:
|
||||
path = os.path.normcase(os.path.abspath(path))
|
||||
for match in self.select_paths:
|
||||
match = os.path.normcase(os.path.abspath(match))
|
||||
if '*' in match:
|
||||
if re.search(fnmatch.translate(match + '*'), path):
|
||||
result.append(path)
|
||||
match_any.add(match)
|
||||
break
|
||||
else:
|
||||
if path.startswith(match):
|
||||
result.append(path)
|
||||
match_any.add(match)
|
||||
break
|
||||
else:
|
||||
logger.debug("Skipping path %s because it doesn't match %s"
|
||||
% (path, ', '.join(self.select_paths)))
|
||||
for match in self.select_paths:
|
||||
if match not in match_any and '*' not in match:
|
||||
result.append(match)
|
||||
logger.debug("Adding path %s because it doesn't match "
|
||||
"anything already on sys.path" % match)
|
||||
return result
|
||||
|
||||
def run(self, options, args):
|
||||
|
||||
logger.deprecated('1.7', "DEPRECATION: 'pip zip' and 'pip unzip` are deprecated, and will be removed in a future release.")
|
||||
|
||||
self.select_paths = options.paths
|
||||
self.simulate = options.simulate
|
||||
if options.list:
|
||||
return self.list(options, args)
|
||||
if not args:
|
||||
raise InstallationError(
|
||||
'You must give at least one package to zip or unzip')
|
||||
packages = []
|
||||
for arg in args:
|
||||
module_name, filename = self.find_package(arg)
|
||||
if options.unzip and os.path.isdir(filename):
|
||||
raise InstallationError(
|
||||
'The module %s (in %s) is not a zip file; cannot be unzipped'
|
||||
% (module_name, filename))
|
||||
elif not options.unzip and not os.path.isdir(filename):
|
||||
raise InstallationError(
|
||||
'The module %s (in %s) is not a directory; cannot be zipped'
|
||||
% (module_name, filename))
|
||||
packages.append((module_name, filename))
|
||||
last_status = None
|
||||
for module_name, filename in packages:
|
||||
if options.unzip:
|
||||
last_status = self.unzip_package(module_name, filename)
|
||||
else:
|
||||
last_status = self.zip_package(module_name, filename, options.no_pyc)
|
||||
return last_status
|
||||
|
||||
def unzip_package(self, module_name, filename):
|
||||
zip_filename = os.path.dirname(filename)
|
||||
if not os.path.isfile(zip_filename) and zipfile.is_zipfile(zip_filename):
|
||||
raise InstallationError(
|
||||
'Module %s (in %s) isn\'t located in a zip file in %s'
|
||||
% (module_name, filename, zip_filename))
|
||||
package_path = os.path.dirname(zip_filename)
|
||||
if not package_path in self.paths():
|
||||
logger.warn(
|
||||
'Unpacking %s into %s, but %s is not on sys.path'
|
||||
% (display_path(zip_filename), display_path(package_path),
|
||||
display_path(package_path)))
|
||||
logger.notify('Unzipping %s (in %s)' % (module_name, display_path(zip_filename)))
|
||||
if self.simulate:
|
||||
logger.notify('Skipping remaining operations because of --simulate')
|
||||
return
|
||||
logger.indent += 2
|
||||
try:
|
||||
## FIXME: this should be undoable:
|
||||
zip = zipfile.ZipFile(zip_filename)
|
||||
to_save = []
|
||||
for info in zip.infolist():
|
||||
name = info.filename
|
||||
if name.startswith(module_name + os.path.sep):
|
||||
content = zip.read(name)
|
||||
dest = os.path.join(package_path, name)
|
||||
if not os.path.exists(os.path.dirname(dest)):
|
||||
os.makedirs(os.path.dirname(dest))
|
||||
if not content and dest.endswith(os.path.sep):
|
||||
if not os.path.exists(dest):
|
||||
os.makedirs(dest)
|
||||
else:
|
||||
f = open(dest, 'wb')
|
||||
f.write(content)
|
||||
f.close()
|
||||
else:
|
||||
to_save.append((name, zip.read(name)))
|
||||
zip.close()
|
||||
if not to_save:
|
||||
logger.info('Removing now-empty zip file %s' % display_path(zip_filename))
|
||||
os.unlink(zip_filename)
|
||||
self.remove_filename_from_pth(zip_filename)
|
||||
else:
|
||||
logger.info('Removing entries in %s/ from zip file %s' % (module_name, display_path(zip_filename)))
|
||||
zip = zipfile.ZipFile(zip_filename, 'w')
|
||||
for name, content in to_save:
|
||||
zip.writestr(name, content)
|
||||
zip.close()
|
||||
finally:
|
||||
logger.indent -= 2
|
||||
|
||||
def zip_package(self, module_name, filename, no_pyc):
|
||||
orig_filename = filename
|
||||
logger.notify('Zip %s (in %s)' % (module_name, display_path(filename)))
|
||||
logger.indent += 2
|
||||
if filename.endswith('.egg'):
|
||||
dest_filename = filename
|
||||
else:
|
||||
dest_filename = filename + '.zip'
|
||||
try:
|
||||
## FIXME: I think this needs to be undoable:
|
||||
if filename == dest_filename:
|
||||
filename = backup_dir(orig_filename)
|
||||
logger.notify('Moving %s aside to %s' % (orig_filename, filename))
|
||||
if not self.simulate:
|
||||
shutil.move(orig_filename, filename)
|
||||
try:
|
||||
logger.info('Creating zip file in %s' % display_path(dest_filename))
|
||||
if not self.simulate:
|
||||
zip = zipfile.ZipFile(dest_filename, 'w')
|
||||
zip.writestr(module_name + '/', '')
|
||||
for dirpath, dirnames, filenames in os.walk(filename):
|
||||
if no_pyc:
|
||||
filenames = [f for f in filenames
|
||||
if not f.lower().endswith('.pyc')]
|
||||
for fns, is_dir in [(dirnames, True), (filenames, False)]:
|
||||
for fn in fns:
|
||||
full = os.path.join(dirpath, fn)
|
||||
dest = os.path.join(module_name, dirpath[len(filename):].lstrip(os.path.sep), fn)
|
||||
if is_dir:
|
||||
zip.writestr(dest + '/', '')
|
||||
else:
|
||||
zip.write(full, dest)
|
||||
zip.close()
|
||||
logger.info('Removing old directory %s' % display_path(filename))
|
||||
if not self.simulate:
|
||||
rmtree(filename)
|
||||
except:
|
||||
## FIXME: need to do an undo here
|
||||
raise
|
||||
## FIXME: should also be undone:
|
||||
self.add_filename_to_pth(dest_filename)
|
||||
finally:
|
||||
logger.indent -= 2
|
||||
|
||||
def remove_filename_from_pth(self, filename):
|
||||
for pth in self.pth_files():
|
||||
f = open(pth, 'r')
|
||||
lines = f.readlines()
|
||||
f.close()
|
||||
new_lines = [
|
||||
l for l in lines if l.strip() != filename]
|
||||
if lines != new_lines:
|
||||
logger.info('Removing reference to %s from .pth file %s'
|
||||
% (display_path(filename), display_path(pth)))
|
||||
if not [line for line in new_lines if line]:
|
||||
logger.info('%s file would be empty: deleting' % display_path(pth))
|
||||
if not self.simulate:
|
||||
os.unlink(pth)
|
||||
else:
|
||||
if not self.simulate:
|
||||
f = open(pth, 'wb')
|
||||
f.writelines(new_lines)
|
||||
f.close()
|
||||
return
|
||||
logger.warn('Cannot find a reference to %s in any .pth file' % display_path(filename))
|
||||
|
||||
def add_filename_to_pth(self, filename):
|
||||
path = os.path.dirname(filename)
|
||||
dest = filename + '.pth'
|
||||
if path not in self.paths():
|
||||
logger.warn('Adding .pth file %s, but it is not on sys.path' % display_path(dest))
|
||||
if not self.simulate:
|
||||
if os.path.exists(dest):
|
||||
f = open(dest)
|
||||
lines = f.readlines()
|
||||
f.close()
|
||||
if lines and not lines[-1].endswith('\n'):
|
||||
lines[-1] += '\n'
|
||||
lines.append(filename + '\n')
|
||||
else:
|
||||
lines = [filename + '\n']
|
||||
f = open(dest, 'wb')
|
||||
f.writelines(lines)
|
||||
f.close()
|
||||
|
||||
def pth_files(self):
|
||||
for path in self.paths():
|
||||
if not os.path.exists(path) or not os.path.isdir(path):
|
||||
continue
|
||||
for filename in os.listdir(path):
|
||||
if filename.endswith('.pth'):
|
||||
yield os.path.join(path, filename)
|
||||
|
||||
def find_package(self, package):
|
||||
for path in self.paths():
|
||||
full = os.path.join(path, package)
|
||||
if os.path.exists(full):
|
||||
return package, full
|
||||
if not os.path.isdir(path) and zipfile.is_zipfile(path):
|
||||
zip = zipfile.ZipFile(path, 'r')
|
||||
try:
|
||||
zip.read(os.path.join(package, '__init__.py'))
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
zip.close()
|
||||
return package, full
|
||||
zip.close()
|
||||
## FIXME: need special error for package.py case:
|
||||
raise InstallationError(
|
||||
'No package with the name %s found' % package)
|
||||
|
||||
def list(self, options, args):
|
||||
if args:
|
||||
raise InstallationError(
|
||||
'You cannot give an argument with --list')
|
||||
for path in sorted(self.paths()):
|
||||
if not os.path.exists(path):
|
||||
continue
|
||||
basename = os.path.basename(path.rstrip(os.path.sep))
|
||||
if os.path.isfile(path) and zipfile.is_zipfile(path):
|
||||
if os.path.dirname(path) not in self.paths():
|
||||
logger.notify('Zipped egg: %s' % display_path(path))
|
||||
continue
|
||||
if (basename != 'site-packages' and basename != 'dist-packages'
|
||||
and not path.replace('\\', '/').endswith('lib/python')):
|
||||
continue
|
||||
logger.notify('In %s:' % display_path(path))
|
||||
logger.indent += 2
|
||||
zipped = []
|
||||
unzipped = []
|
||||
try:
|
||||
for filename in sorted(os.listdir(path)):
|
||||
ext = os.path.splitext(filename)[1].lower()
|
||||
if ext in ('.pth', '.egg-info', '.egg-link'):
|
||||
continue
|
||||
if ext == '.py':
|
||||
logger.info('Not displaying %s: not a package' % display_path(filename))
|
||||
continue
|
||||
full = os.path.join(path, filename)
|
||||
if os.path.isdir(full):
|
||||
unzipped.append((filename, self.count_package(full)))
|
||||
elif zipfile.is_zipfile(full):
|
||||
zipped.append(filename)
|
||||
else:
|
||||
logger.info('Unknown file: %s' % display_path(filename))
|
||||
if zipped:
|
||||
logger.notify('Zipped packages:')
|
||||
logger.indent += 2
|
||||
try:
|
||||
for filename in zipped:
|
||||
logger.notify(filename)
|
||||
finally:
|
||||
logger.indent -= 2
|
||||
else:
|
||||
logger.notify('No zipped packages.')
|
||||
if unzipped:
|
||||
if options.sort_files:
|
||||
unzipped.sort(key=lambda x: -x[1])
|
||||
logger.notify('Unzipped packages:')
|
||||
logger.indent += 2
|
||||
try:
|
||||
for filename, count in unzipped:
|
||||
logger.notify('%s (%i files)' % (filename, count))
|
||||
finally:
|
||||
logger.indent -= 2
|
||||
else:
|
||||
logger.notify('No unzipped packages.')
|
||||
finally:
|
||||
logger.indent -= 2
|
||||
|
||||
def count_package(self, path):
|
||||
total = 0
|
||||
for dirpath, dirnames, filenames in os.walk(path):
|
||||
filenames = [f for f in filenames
|
||||
if not f.lower().endswith('.pyc')]
|
||||
total += len(filenames)
|
||||
return total
|
||||
644
lib/python3.4/site-packages/pip/download.py
Normal file
644
lib/python3.4/site-packages/pip/download.py
Normal file
|
|
@ -0,0 +1,644 @@
|
|||
import cgi
|
||||
import email.utils
|
||||
import hashlib
|
||||
import getpass
|
||||
import mimetypes
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
import pip
|
||||
|
||||
from pip.backwardcompat import urllib, urlparse, raw_input
|
||||
from pip.exceptions import InstallationError, HashMismatch
|
||||
from pip.util import (splitext, rmtree, format_size, display_path,
|
||||
backup_dir, ask_path_exists, unpack_file,
|
||||
create_download_cache_folder, cache_download)
|
||||
from pip.vcs import vcs
|
||||
from pip.log import logger
|
||||
import requests, six
|
||||
from requests.adapters import BaseAdapter
|
||||
from requests.auth import AuthBase, HTTPBasicAuth
|
||||
from requests.compat import IncompleteRead
|
||||
from requests.exceptions import InvalidURL, ChunkedEncodingError
|
||||
from requests.models import Response
|
||||
from requests.structures import CaseInsensitiveDict
|
||||
|
||||
__all__ = ['get_file_content',
|
||||
'is_url', 'url_to_path', 'path_to_url',
|
||||
'is_archive_file', 'unpack_vcs_link',
|
||||
'unpack_file_url', 'is_vcs_url', 'is_file_url', 'unpack_http_url']
|
||||
|
||||
|
||||
def user_agent():
|
||||
"""Return a string representing the user agent."""
|
||||
_implementation = platform.python_implementation()
|
||||
|
||||
if _implementation == 'CPython':
|
||||
_implementation_version = platform.python_version()
|
||||
elif _implementation == 'PyPy':
|
||||
_implementation_version = '%s.%s.%s' % (sys.pypy_version_info.major,
|
||||
sys.pypy_version_info.minor,
|
||||
sys.pypy_version_info.micro)
|
||||
if sys.pypy_version_info.releaselevel != 'final':
|
||||
_implementation_version = ''.join([
|
||||
_implementation_version,
|
||||
sys.pypy_version_info.releaselevel,
|
||||
])
|
||||
elif _implementation == 'Jython':
|
||||
_implementation_version = platform.python_version() # Complete Guess
|
||||
elif _implementation == 'IronPython':
|
||||
_implementation_version = platform.python_version() # Complete Guess
|
||||
else:
|
||||
_implementation_version = 'Unknown'
|
||||
|
||||
try:
|
||||
p_system = platform.system()
|
||||
p_release = platform.release()
|
||||
except IOError:
|
||||
p_system = 'Unknown'
|
||||
p_release = 'Unknown'
|
||||
|
||||
return " ".join(['pip/%s' % pip.__version__,
|
||||
'%s/%s' % (_implementation, _implementation_version),
|
||||
'%s/%s' % (p_system, p_release)])
|
||||
|
||||
|
||||
class MultiDomainBasicAuth(AuthBase):
|
||||
|
||||
def __init__(self, prompting=True):
|
||||
self.prompting = prompting
|
||||
self.passwords = {}
|
||||
|
||||
def __call__(self, req):
|
||||
parsed = urlparse.urlparse(req.url)
|
||||
|
||||
# Get the netloc without any embedded credentials
|
||||
netloc = parsed.netloc.split("@", 1)[-1]
|
||||
|
||||
# Set the url of the request to the url without any credentials
|
||||
req.url = urlparse.urlunparse(parsed[:1] + (netloc,) + parsed[2:])
|
||||
|
||||
# Use any stored credentials that we have for this netloc
|
||||
username, password = self.passwords.get(netloc, (None, None))
|
||||
|
||||
# Extract credentials embedded in the url if we have none stored
|
||||
if username is None:
|
||||
username, password = self.parse_credentials(parsed.netloc)
|
||||
|
||||
if username or password:
|
||||
# Store the username and password
|
||||
self.passwords[netloc] = (username, password)
|
||||
|
||||
# Send the basic auth with this request
|
||||
req = HTTPBasicAuth(username or "", password or "")(req)
|
||||
|
||||
# Attach a hook to handle 401 responses
|
||||
req.register_hook("response", self.handle_401)
|
||||
|
||||
return req
|
||||
|
||||
def handle_401(self, resp, **kwargs):
|
||||
# We only care about 401 responses, anything else we want to just
|
||||
# pass through the actual response
|
||||
if resp.status_code != 401:
|
||||
return resp
|
||||
|
||||
# We are not able to prompt the user so simple return the response
|
||||
if not self.prompting:
|
||||
return resp
|
||||
|
||||
parsed = urlparse.urlparse(resp.url)
|
||||
|
||||
# Prompt the user for a new username and password
|
||||
username = raw_input("User for %s: " % parsed.netloc)
|
||||
password = getpass.getpass("Password: ")
|
||||
|
||||
# Store the new username and password to use for future requests
|
||||
if username or password:
|
||||
self.passwords[parsed.netloc] = (username, password)
|
||||
|
||||
# Consume content and release the original connection to allow our new
|
||||
# request to reuse the same one.
|
||||
resp.content
|
||||
resp.raw.release_conn()
|
||||
|
||||
# Add our new username and password to the request
|
||||
req = HTTPBasicAuth(username or "", password or "")(resp.request)
|
||||
|
||||
# Send our new request
|
||||
new_resp = resp.connection.send(req, **kwargs)
|
||||
new_resp.history.append(resp)
|
||||
|
||||
return new_resp
|
||||
|
||||
def parse_credentials(self, netloc):
|
||||
if "@" in netloc:
|
||||
userinfo = netloc.rsplit("@", 1)[0]
|
||||
if ":" in userinfo:
|
||||
return userinfo.split(":", 1)
|
||||
return userinfo, None
|
||||
return None, None
|
||||
|
||||
|
||||
class LocalFSResponse(object):
|
||||
|
||||
def __init__(self, fileobj):
|
||||
self.fileobj = fileobj
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self.fileobj, name)
|
||||
|
||||
def read(self, amt=None, decode_content=None, cache_content=False):
|
||||
return self.fileobj.read(amt)
|
||||
|
||||
# Insert Hacks to Make Cookie Jar work w/ Requests
|
||||
@property
|
||||
def _original_response(self):
|
||||
class FakeMessage(object):
|
||||
def getheaders(self, header):
|
||||
return []
|
||||
|
||||
def get_all(self, header, default):
|
||||
return []
|
||||
|
||||
class FakeResponse(object):
|
||||
@property
|
||||
def msg(self):
|
||||
return FakeMessage()
|
||||
|
||||
return FakeResponse()
|
||||
|
||||
|
||||
class LocalFSAdapter(BaseAdapter):
|
||||
|
||||
def send(self, request, stream=None, timeout=None, verify=None, cert=None,
|
||||
proxies=None):
|
||||
parsed_url = urlparse.urlparse(request.url)
|
||||
|
||||
# We only work for requests with a host of localhost
|
||||
if parsed_url.netloc.lower() != "localhost":
|
||||
raise InvalidURL("Invalid URL %r: Only localhost is allowed" %
|
||||
request.url)
|
||||
|
||||
real_url = urlparse.urlunparse(parsed_url[:1] + ("",) + parsed_url[2:])
|
||||
pathname = url_to_path(real_url)
|
||||
|
||||
resp = Response()
|
||||
resp.status_code = 200
|
||||
resp.url = real_url
|
||||
|
||||
stats = os.stat(pathname)
|
||||
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
|
||||
resp.headers = CaseInsensitiveDict({
|
||||
"Content-Type": mimetypes.guess_type(pathname)[0] or "text/plain",
|
||||
"Content-Length": stats.st_size,
|
||||
"Last-Modified": modified,
|
||||
})
|
||||
|
||||
resp.raw = LocalFSResponse(open(pathname, "rb"))
|
||||
resp.close = resp.raw.close
|
||||
|
||||
return resp
|
||||
|
||||
def close(self):
|
||||
pass
|
||||
|
||||
|
||||
class PipSession(requests.Session):
|
||||
|
||||
timeout = None
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(PipSession, self).__init__(*args, **kwargs)
|
||||
|
||||
# Attach our User Agent to the request
|
||||
self.headers["User-Agent"] = user_agent()
|
||||
|
||||
# Attach our Authentication handler to the session
|
||||
self.auth = MultiDomainBasicAuth()
|
||||
|
||||
# Enable file:// urls
|
||||
self.mount("file://", LocalFSAdapter())
|
||||
|
||||
def request(self, method, url, *args, **kwargs):
|
||||
# Make file:// urls not fail due to lack of a hostname
|
||||
parsed = urlparse.urlparse(url)
|
||||
if parsed.scheme == "file":
|
||||
url = urlparse.urlunparse(parsed[:1] + ("localhost",) + parsed[2:])
|
||||
|
||||
# Allow setting a default timeout on a session
|
||||
kwargs.setdefault("timeout", self.timeout)
|
||||
|
||||
# Dispatch the actual request
|
||||
return super(PipSession, self).request(method, url, *args, **kwargs)
|
||||
|
||||
|
||||
def get_file_content(url, comes_from=None, session=None):
|
||||
"""Gets the content of a file; it may be a filename, file: URL, or
|
||||
http: URL. Returns (location, content). Content is unicode."""
|
||||
if session is None:
|
||||
session = PipSession()
|
||||
|
||||
match = _scheme_re.search(url)
|
||||
if match:
|
||||
scheme = match.group(1).lower()
|
||||
if (scheme == 'file' and comes_from
|
||||
and comes_from.startswith('http')):
|
||||
raise InstallationError(
|
||||
'Requirements file %s references URL %s, which is local'
|
||||
% (comes_from, url))
|
||||
if scheme == 'file':
|
||||
path = url.split(':', 1)[1]
|
||||
path = path.replace('\\', '/')
|
||||
match = _url_slash_drive_re.match(path)
|
||||
if match:
|
||||
path = match.group(1) + ':' + path.split('|', 1)[1]
|
||||
path = urllib.unquote(path)
|
||||
if path.startswith('/'):
|
||||
path = '/' + path.lstrip('/')
|
||||
url = path
|
||||
else:
|
||||
## FIXME: catch some errors
|
||||
resp = session.get(url)
|
||||
resp.raise_for_status()
|
||||
|
||||
if six.PY3:
|
||||
return resp.url, resp.text
|
||||
else:
|
||||
return resp.url, resp.content
|
||||
try:
|
||||
f = open(url)
|
||||
content = f.read()
|
||||
except IOError:
|
||||
e = sys.exc_info()[1]
|
||||
raise InstallationError('Could not open requirements file: %s' % str(e))
|
||||
else:
|
||||
f.close()
|
||||
return url, content
|
||||
|
||||
|
||||
_scheme_re = re.compile(r'^(http|https|file):', re.I)
|
||||
_url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I)
|
||||
|
||||
|
||||
def is_url(name):
|
||||
"""Returns true if the name looks like a URL"""
|
||||
if ':' not in name:
|
||||
return False
|
||||
scheme = name.split(':', 1)[0].lower()
|
||||
return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes
|
||||
|
||||
|
||||
def url_to_path(url):
|
||||
"""
|
||||
Convert a file: URL to a path.
|
||||
"""
|
||||
assert url.startswith('file:'), (
|
||||
"You can only turn file: urls into filenames (not %r)" % url)
|
||||
path = url[len('file:'):].lstrip('/')
|
||||
path = urllib.unquote(path)
|
||||
if _url_drive_re.match(path):
|
||||
path = path[0] + ':' + path[2:]
|
||||
else:
|
||||
path = '/' + path
|
||||
return path
|
||||
|
||||
|
||||
_drive_re = re.compile('^([a-z]):', re.I)
|
||||
_url_drive_re = re.compile('^([a-z])[:|]', re.I)
|
||||
|
||||
|
||||
def path_to_url(path):
|
||||
"""
|
||||
Convert a path to a file: URL. The path will be made absolute and have
|
||||
quoted path parts.
|
||||
"""
|
||||
path = os.path.normpath(os.path.abspath(path))
|
||||
drive, path = os.path.splitdrive(path)
|
||||
filepath = path.split(os.path.sep)
|
||||
url = '/'.join([urllib.quote(part) for part in filepath])
|
||||
if not drive:
|
||||
url = url.lstrip('/')
|
||||
return 'file:///' + drive + url
|
||||
|
||||
|
||||
def is_archive_file(name):
|
||||
"""Return True if `name` is a considered as an archive file."""
|
||||
archives = ('.zip', '.tar.gz', '.tar.bz2', '.tgz', '.tar', '.pybundle',
|
||||
'.whl')
|
||||
ext = splitext(name)[1].lower()
|
||||
if ext in archives:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def unpack_vcs_link(link, location, only_download=False):
|
||||
vcs_backend = _get_used_vcs_backend(link)
|
||||
if only_download:
|
||||
vcs_backend.export(location)
|
||||
else:
|
||||
vcs_backend.unpack(location)
|
||||
|
||||
|
||||
def _get_used_vcs_backend(link):
|
||||
for backend in vcs.backends:
|
||||
if link.scheme in backend.schemes:
|
||||
vcs_backend = backend(link.url)
|
||||
return vcs_backend
|
||||
|
||||
|
||||
def is_vcs_url(link):
|
||||
return bool(_get_used_vcs_backend(link))
|
||||
|
||||
|
||||
def is_file_url(link):
|
||||
return link.url.lower().startswith('file:')
|
||||
|
||||
|
||||
def _check_hash(download_hash, link):
|
||||
if download_hash.digest_size != hashlib.new(link.hash_name).digest_size:
|
||||
logger.fatal("Hash digest size of the package %d (%s) doesn't match the expected hash name %s!"
|
||||
% (download_hash.digest_size, link, link.hash_name))
|
||||
raise HashMismatch('Hash name mismatch for package %s' % link)
|
||||
if download_hash.hexdigest() != link.hash:
|
||||
logger.fatal("Hash of the package %s (%s) doesn't match the expected hash %s!"
|
||||
% (link, download_hash.hexdigest(), link.hash))
|
||||
raise HashMismatch('Bad %s hash for package %s' % (link.hash_name, link))
|
||||
|
||||
|
||||
def _get_hash_from_file(target_file, link):
|
||||
try:
|
||||
download_hash = hashlib.new(link.hash_name)
|
||||
except (ValueError, TypeError):
|
||||
logger.warn("Unsupported hash name %s for package %s" % (link.hash_name, link))
|
||||
return None
|
||||
|
||||
fp = open(target_file, 'rb')
|
||||
while True:
|
||||
chunk = fp.read(4096)
|
||||
if not chunk:
|
||||
break
|
||||
download_hash.update(chunk)
|
||||
fp.close()
|
||||
return download_hash
|
||||
|
||||
|
||||
def _download_url(resp, link, temp_location):
|
||||
fp = open(temp_location, 'wb')
|
||||
download_hash = None
|
||||
if link.hash and link.hash_name:
|
||||
try:
|
||||
download_hash = hashlib.new(link.hash_name)
|
||||
except ValueError:
|
||||
logger.warn("Unsupported hash name %s for package %s" % (link.hash_name, link))
|
||||
try:
|
||||
total_length = int(resp.headers['content-length'])
|
||||
except (ValueError, KeyError, TypeError):
|
||||
total_length = 0
|
||||
downloaded = 0
|
||||
show_progress = total_length > 40 * 1000 or not total_length
|
||||
show_url = link.show_url
|
||||
try:
|
||||
if show_progress:
|
||||
## FIXME: the URL can get really long in this message:
|
||||
if total_length:
|
||||
logger.start_progress('Downloading %s (%s): ' % (show_url, format_size(total_length)))
|
||||
else:
|
||||
logger.start_progress('Downloading %s (unknown size): ' % show_url)
|
||||
else:
|
||||
logger.notify('Downloading %s' % show_url)
|
||||
logger.info('Downloading from URL %s' % link)
|
||||
|
||||
def resp_read(chunk_size):
|
||||
try:
|
||||
# Special case for urllib3.
|
||||
try:
|
||||
for chunk in resp.raw.stream(
|
||||
chunk_size, decode_content=False):
|
||||
yield chunk
|
||||
except IncompleteRead as e:
|
||||
raise ChunkedEncodingError(e)
|
||||
except AttributeError:
|
||||
# Standard file-like object.
|
||||
while True:
|
||||
chunk = resp.raw.read(chunk_size)
|
||||
if not chunk:
|
||||
break
|
||||
yield chunk
|
||||
|
||||
for chunk in resp_read(4096):
|
||||
downloaded += len(chunk)
|
||||
if show_progress:
|
||||
if not total_length:
|
||||
logger.show_progress('%s' % format_size(downloaded))
|
||||
else:
|
||||
logger.show_progress('%3i%% %s' % (100 * downloaded / total_length, format_size(downloaded)))
|
||||
if download_hash is not None:
|
||||
download_hash.update(chunk)
|
||||
fp.write(chunk)
|
||||
fp.close()
|
||||
finally:
|
||||
if show_progress:
|
||||
logger.end_progress('%s downloaded' % format_size(downloaded))
|
||||
return download_hash
|
||||
|
||||
|
||||
def _copy_file(filename, location, content_type, link):
|
||||
copy = True
|
||||
download_location = os.path.join(location, link.filename)
|
||||
if os.path.exists(download_location):
|
||||
response = ask_path_exists(
|
||||
'The file %s exists. (i)gnore, (w)ipe, (b)ackup ' %
|
||||
display_path(download_location), ('i', 'w', 'b'))
|
||||
if response == 'i':
|
||||
copy = False
|
||||
elif response == 'w':
|
||||
logger.warn('Deleting %s' % display_path(download_location))
|
||||
os.remove(download_location)
|
||||
elif response == 'b':
|
||||
dest_file = backup_dir(download_location)
|
||||
logger.warn('Backing up %s to %s'
|
||||
% (display_path(download_location), display_path(dest_file)))
|
||||
shutil.move(download_location, dest_file)
|
||||
if copy:
|
||||
shutil.copy(filename, download_location)
|
||||
logger.notify('Saved %s' % display_path(download_location))
|
||||
|
||||
|
||||
def unpack_http_url(link, location, download_cache, download_dir=None,
|
||||
session=None):
|
||||
if session is None:
|
||||
session = PipSession()
|
||||
|
||||
temp_dir = tempfile.mkdtemp('-unpack', 'pip-')
|
||||
temp_location = None
|
||||
target_url = link.url.split('#', 1)[0]
|
||||
already_cached = False
|
||||
cache_file = None
|
||||
cache_content_type_file = None
|
||||
download_hash = None
|
||||
|
||||
# If a download cache is specified, is the file cached there?
|
||||
if download_cache:
|
||||
cache_file = os.path.join(download_cache,
|
||||
urllib.quote(target_url, ''))
|
||||
cache_content_type_file = cache_file + '.content-type'
|
||||
already_cached = (
|
||||
os.path.exists(cache_file) and
|
||||
os.path.exists(cache_content_type_file)
|
||||
)
|
||||
if not os.path.isdir(download_cache):
|
||||
create_download_cache_folder(download_cache)
|
||||
|
||||
# If a download dir is specified, is the file already downloaded there?
|
||||
already_downloaded = None
|
||||
if download_dir:
|
||||
already_downloaded = os.path.join(download_dir, link.filename)
|
||||
if not os.path.exists(already_downloaded):
|
||||
already_downloaded = None
|
||||
|
||||
# If already downloaded, does it's hash match?
|
||||
if already_downloaded:
|
||||
temp_location = already_downloaded
|
||||
content_type = mimetypes.guess_type(already_downloaded)[0]
|
||||
logger.notify('File was already downloaded %s' % already_downloaded)
|
||||
if link.hash:
|
||||
download_hash = _get_hash_from_file(temp_location, link)
|
||||
try:
|
||||
_check_hash(download_hash, link)
|
||||
except HashMismatch:
|
||||
logger.warn(
|
||||
'Previously-downloaded file %s has bad hash, '
|
||||
're-downloading.' % temp_location
|
||||
)
|
||||
temp_location = None
|
||||
os.unlink(already_downloaded)
|
||||
already_downloaded = None
|
||||
|
||||
# If not a valid download, let's confirm the cached file is valid
|
||||
if already_cached and not temp_location:
|
||||
with open(cache_content_type_file) as fp:
|
||||
content_type = fp.read().strip()
|
||||
temp_location = cache_file
|
||||
logger.notify('Using download cache from %s' % cache_file)
|
||||
if link.hash and link.hash_name:
|
||||
download_hash = _get_hash_from_file(cache_file, link)
|
||||
try:
|
||||
_check_hash(download_hash, link)
|
||||
except HashMismatch:
|
||||
logger.warn(
|
||||
'Cached file %s has bad hash, '
|
||||
're-downloading.' % temp_location
|
||||
)
|
||||
temp_location = None
|
||||
os.unlink(cache_file)
|
||||
os.unlink(cache_content_type_file)
|
||||
already_cached = False
|
||||
|
||||
# We don't have either a cached or a downloaded copy
|
||||
# let's download to a tmp dir
|
||||
if not temp_location:
|
||||
try:
|
||||
resp = session.get(target_url, stream=True)
|
||||
resp.raise_for_status()
|
||||
except requests.HTTPError as exc:
|
||||
logger.fatal("HTTP error %s while getting %s" %
|
||||
(exc.response.status_code, link))
|
||||
raise
|
||||
|
||||
content_type = resp.headers.get('content-type', '')
|
||||
filename = link.filename # fallback
|
||||
# Have a look at the Content-Disposition header for a better guess
|
||||
content_disposition = resp.headers.get('content-disposition')
|
||||
if content_disposition:
|
||||
type, params = cgi.parse_header(content_disposition)
|
||||
# We use ``or`` here because we don't want to use an "empty" value
|
||||
# from the filename param.
|
||||
filename = params.get('filename') or filename
|
||||
ext = splitext(filename)[1]
|
||||
if not ext:
|
||||
ext = mimetypes.guess_extension(content_type)
|
||||
if ext:
|
||||
filename += ext
|
||||
if not ext and link.url != resp.url:
|
||||
ext = os.path.splitext(resp.url)[1]
|
||||
if ext:
|
||||
filename += ext
|
||||
temp_location = os.path.join(temp_dir, filename)
|
||||
download_hash = _download_url(resp, link, temp_location)
|
||||
if link.hash and link.hash_name:
|
||||
_check_hash(download_hash, link)
|
||||
|
||||
# a download dir is specified; let's copy the archive there
|
||||
if download_dir and not already_downloaded:
|
||||
_copy_file(temp_location, download_dir, content_type, link)
|
||||
|
||||
# unpack the archive to the build dir location. even when only downloading
|
||||
# archives, they have to be unpacked to parse dependencies
|
||||
unpack_file(temp_location, location, content_type, link)
|
||||
|
||||
# if using a download cache, cache it, if needed
|
||||
if cache_file and not already_cached:
|
||||
cache_download(cache_file, temp_location, content_type)
|
||||
|
||||
if not (already_cached or already_downloaded):
|
||||
os.unlink(temp_location)
|
||||
|
||||
os.rmdir(temp_dir)
|
||||
|
||||
|
||||
def unpack_file_url(link, location, download_dir=None):
|
||||
|
||||
link_path = url_to_path(link.url_without_fragment)
|
||||
already_downloaded = False
|
||||
|
||||
# If it's a url to a local directory
|
||||
if os.path.isdir(link_path):
|
||||
if os.path.isdir(location):
|
||||
rmtree(location)
|
||||
shutil.copytree(link_path, location, symlinks=True)
|
||||
return
|
||||
|
||||
# if link has a hash, let's confirm it matches
|
||||
if link.hash:
|
||||
link_path_hash = _get_hash_from_file(link_path, link)
|
||||
_check_hash(link_path_hash, link)
|
||||
|
||||
# If a download dir is specified, is the file already there and valid?
|
||||
if download_dir:
|
||||
download_path = os.path.join(download_dir, link.filename)
|
||||
if os.path.exists(download_path):
|
||||
content_type = mimetypes.guess_type(download_path)[0]
|
||||
logger.notify('File was already downloaded %s' % download_path)
|
||||
if link.hash:
|
||||
download_hash = _get_hash_from_file(download_path, link)
|
||||
try:
|
||||
_check_hash(download_hash, link)
|
||||
already_downloaded = True
|
||||
except HashMismatch:
|
||||
logger.warn(
|
||||
'Previously-downloaded file %s has bad hash, '
|
||||
're-downloading.' % link_path
|
||||
)
|
||||
os.unlink(download_path)
|
||||
else:
|
||||
already_downloaded = True
|
||||
|
||||
if already_downloaded:
|
||||
from_path = download_path
|
||||
else:
|
||||
from_path = link_path
|
||||
|
||||
content_type = mimetypes.guess_type(from_path)[0]
|
||||
|
||||
# unpack the archive to the build dir location. even when only downloading
|
||||
# archives, they have to be unpacked to parse dependencies
|
||||
unpack_file(from_path, location, content_type, link)
|
||||
|
||||
# a download dir is specified and not already downloaded
|
||||
if download_dir and not already_downloaded:
|
||||
_copy_file(from_path, download_dir, content_type, link)
|
||||
46
lib/python3.4/site-packages/pip/exceptions.py
Normal file
46
lib/python3.4/site-packages/pip/exceptions.py
Normal file
|
|
@ -0,0 +1,46 @@
|
|||
"""Exceptions used throughout package"""
|
||||
|
||||
|
||||
class PipError(Exception):
|
||||
"""Base pip exception"""
|
||||
|
||||
|
||||
class InstallationError(PipError):
|
||||
"""General exception during installation"""
|
||||
|
||||
|
||||
class UninstallationError(PipError):
|
||||
"""General exception during uninstallation"""
|
||||
|
||||
|
||||
class DistributionNotFound(InstallationError):
|
||||
"""Raised when a distribution cannot be found to satisfy a requirement"""
|
||||
|
||||
|
||||
class BestVersionAlreadyInstalled(PipError):
|
||||
"""Raised when the most up-to-date version of a package is already
|
||||
installed. """
|
||||
|
||||
|
||||
class BadCommand(PipError):
|
||||
"""Raised when virtualenv or a command is not found"""
|
||||
|
||||
|
||||
class CommandError(PipError):
|
||||
"""Raised when there is an error in command-line arguments"""
|
||||
|
||||
|
||||
class PreviousBuildDirError(PipError):
|
||||
"""Raised when there's a previous conflicting build directory"""
|
||||
|
||||
|
||||
class HashMismatch(InstallationError):
|
||||
"""Distribution file hash values don't match."""
|
||||
|
||||
|
||||
class InvalidWheelFilename(InstallationError):
|
||||
"""Invalid wheel filename."""
|
||||
|
||||
|
||||
class UnsupportedWheel(InstallationError):
|
||||
"""Unsupported wheel."""
|
||||
990
lib/python3.4/site-packages/pip/index.py
Normal file
990
lib/python3.4/site-packages/pip/index.py
Normal file
|
|
@ -0,0 +1,990 @@
|
|||
"""Routines related to PyPI, indexes"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
import mimetypes
|
||||
import posixpath
|
||||
|
||||
from pip.log import logger
|
||||
from pip.util import Inf, normalize_name, splitext, is_prerelease
|
||||
from pip.exceptions import (DistributionNotFound, BestVersionAlreadyInstalled,
|
||||
InstallationError, InvalidWheelFilename, UnsupportedWheel)
|
||||
from pip.backwardcompat import urlparse, url2pathname
|
||||
from pip.download import PipSession, url_to_path, path_to_url
|
||||
from pip.wheel import Wheel, wheel_ext
|
||||
from pip.pep425tags import supported_tags, supported_tags_noarch, get_platform
|
||||
import html5lib, requests, pkg_resources
|
||||
from requests.exceptions import SSLError
|
||||
|
||||
|
||||
__all__ = ['PackageFinder']
|
||||
|
||||
|
||||
DEFAULT_MIRROR_HOSTNAME = "last.pypi.python.org"
|
||||
|
||||
INSECURE_SCHEMES = {
|
||||
"http": ["https"],
|
||||
}
|
||||
|
||||
|
||||
class PackageFinder(object):
|
||||
"""This finds packages.
|
||||
|
||||
This is meant to match easy_install's technique for looking for
|
||||
packages, by reading pages and looking for appropriate links
|
||||
"""
|
||||
|
||||
def __init__(self, find_links, index_urls,
|
||||
use_wheel=True, allow_external=[], allow_unverified=[],
|
||||
allow_all_external=False, allow_all_prereleases=False,
|
||||
process_dependency_links=False, session=None):
|
||||
self.find_links = find_links
|
||||
self.index_urls = index_urls
|
||||
self.dependency_links = []
|
||||
self.cache = PageCache()
|
||||
# These are boring links that have already been logged somehow:
|
||||
self.logged_links = set()
|
||||
|
||||
self.use_wheel = use_wheel
|
||||
|
||||
# Do we allow (safe and verifiable) externally hosted files?
|
||||
self.allow_external = set(normalize_name(n) for n in allow_external)
|
||||
|
||||
# Which names are allowed to install insecure and unverifiable files?
|
||||
self.allow_unverified = set(
|
||||
normalize_name(n) for n in allow_unverified
|
||||
)
|
||||
|
||||
# Anything that is allowed unverified is also allowed external
|
||||
self.allow_external |= self.allow_unverified
|
||||
|
||||
# Do we allow all (safe and verifiable) externally hosted files?
|
||||
self.allow_all_external = allow_all_external
|
||||
|
||||
# Stores if we ignored any external links so that we can instruct
|
||||
# end users how to install them if no distributions are available
|
||||
self.need_warn_external = False
|
||||
|
||||
# Stores if we ignored any unsafe links so that we can instruct
|
||||
# end users how to install them if no distributions are available
|
||||
self.need_warn_unverified = False
|
||||
|
||||
# Do we want to allow _all_ pre-releases?
|
||||
self.allow_all_prereleases = allow_all_prereleases
|
||||
|
||||
# Do we process dependency links?
|
||||
self.process_dependency_links = process_dependency_links
|
||||
self._have_warned_dependency_links = False
|
||||
|
||||
# The Session we'll use to make requests
|
||||
self.session = session or PipSession()
|
||||
|
||||
def add_dependency_links(self, links):
|
||||
## FIXME: this shouldn't be global list this, it should only
|
||||
## apply to requirements of the package that specifies the
|
||||
## dependency_links value
|
||||
## FIXME: also, we should track comes_from (i.e., use Link)
|
||||
if self.process_dependency_links:
|
||||
if not self._have_warned_dependency_links:
|
||||
logger.deprecated(
|
||||
"1.6",
|
||||
"Dependency Links processing has been deprecated with an "
|
||||
"accelerated time schedule and will be removed in pip 1.6",
|
||||
)
|
||||
self._have_warned_dependency_links = True
|
||||
self.dependency_links.extend(links)
|
||||
|
||||
def _sort_locations(self, locations):
|
||||
"""
|
||||
Sort locations into "files" (archives) and "urls", and return
|
||||
a pair of lists (files,urls)
|
||||
"""
|
||||
files = []
|
||||
urls = []
|
||||
|
||||
# puts the url for the given file path into the appropriate list
|
||||
def sort_path(path):
|
||||
url = path_to_url(path)
|
||||
if mimetypes.guess_type(url, strict=False)[0] == 'text/html':
|
||||
urls.append(url)
|
||||
else:
|
||||
files.append(url)
|
||||
|
||||
for url in locations:
|
||||
|
||||
is_local_path = os.path.exists(url)
|
||||
is_file_url = url.startswith('file:')
|
||||
is_find_link = url in self.find_links
|
||||
|
||||
if is_local_path or is_file_url:
|
||||
if is_local_path:
|
||||
path = url
|
||||
else:
|
||||
path = url_to_path(url)
|
||||
if is_find_link and os.path.isdir(path):
|
||||
path = os.path.realpath(path)
|
||||
for item in os.listdir(path):
|
||||
sort_path(os.path.join(path, item))
|
||||
elif is_file_url and os.path.isdir(path):
|
||||
urls.append(url)
|
||||
elif os.path.isfile(path):
|
||||
sort_path(path)
|
||||
else:
|
||||
urls.append(url)
|
||||
|
||||
return files, urls
|
||||
|
||||
def _link_sort_key(self, link_tuple):
|
||||
"""
|
||||
Function used to generate link sort key for link tuples.
|
||||
The greater the return value, the more preferred it is.
|
||||
If not finding wheels, then sorted by version only.
|
||||
If finding wheels, then the sort order is by version, then:
|
||||
1. existing installs
|
||||
2. wheels ordered via Wheel.support_index_min()
|
||||
3. source archives
|
||||
Note: it was considered to embed this logic into the Link
|
||||
comparison operators, but then different sdist links
|
||||
with the same version, would have to be considered equal
|
||||
"""
|
||||
parsed_version, link, _ = link_tuple
|
||||
if self.use_wheel:
|
||||
support_num = len(supported_tags)
|
||||
if link == INSTALLED_VERSION:
|
||||
pri = 1
|
||||
elif link.ext == wheel_ext:
|
||||
wheel = Wheel(link.filename) # can raise InvalidWheelFilename
|
||||
if not wheel.supported():
|
||||
raise UnsupportedWheel("%s is not a supported wheel for this platform. It can't be sorted." % wheel.filename)
|
||||
pri = -(wheel.support_index_min())
|
||||
else: # sdist
|
||||
pri = -(support_num)
|
||||
return (parsed_version, pri)
|
||||
else:
|
||||
return parsed_version
|
||||
|
||||
def _sort_versions(self, applicable_versions):
|
||||
"""
|
||||
Bring the latest version (and wheels) to the front, but maintain the existing ordering as secondary.
|
||||
See the docstring for `_link_sort_key` for details.
|
||||
This function is isolated for easier unit testing.
|
||||
"""
|
||||
return sorted(applicable_versions, key=self._link_sort_key, reverse=True)
|
||||
|
||||
def find_requirement(self, req, upgrade):
|
||||
|
||||
def mkurl_pypi_url(url):
|
||||
loc = posixpath.join(url, url_name)
|
||||
# For maximum compatibility with easy_install, ensure the path
|
||||
# ends in a trailing slash. Although this isn't in the spec
|
||||
# (and PyPI can handle it without the slash) some other index
|
||||
# implementations might break if they relied on easy_install's behavior.
|
||||
if not loc.endswith('/'):
|
||||
loc = loc + '/'
|
||||
return loc
|
||||
|
||||
url_name = req.url_name
|
||||
# Only check main index if index URL is given:
|
||||
main_index_url = None
|
||||
if self.index_urls:
|
||||
# Check that we have the url_name correctly spelled:
|
||||
main_index_url = Link(mkurl_pypi_url(self.index_urls[0]), trusted=True)
|
||||
# This will also cache the page, so it's okay that we get it again later:
|
||||
page = self._get_page(main_index_url, req)
|
||||
if page is None:
|
||||
url_name = self._find_url_name(Link(self.index_urls[0], trusted=True), url_name, req) or req.url_name
|
||||
|
||||
if url_name is not None:
|
||||
locations = [
|
||||
mkurl_pypi_url(url)
|
||||
for url in self.index_urls] + self.find_links
|
||||
else:
|
||||
locations = list(self.find_links)
|
||||
for version in req.absolute_versions:
|
||||
if url_name is not None and main_index_url is not None:
|
||||
locations = [
|
||||
posixpath.join(main_index_url.url, version)] + locations
|
||||
|
||||
file_locations, url_locations = self._sort_locations(locations)
|
||||
_flocations, _ulocations = self._sort_locations(self.dependency_links)
|
||||
file_locations.extend(_flocations)
|
||||
|
||||
# We trust every url that the user has given us whether it was given
|
||||
# via --index-url or --find-links
|
||||
locations = [Link(url, trusted=True) for url in url_locations]
|
||||
|
||||
# We explicitly do not trust links that came from dependency_links
|
||||
locations.extend([Link(url) for url in _ulocations])
|
||||
|
||||
logger.debug('URLs to search for versions for %s:' % req)
|
||||
for location in locations:
|
||||
logger.debug('* %s' % location)
|
||||
|
||||
# Determine if this url used a secure transport mechanism
|
||||
parsed = urlparse.urlparse(str(location))
|
||||
if parsed.scheme in INSECURE_SCHEMES:
|
||||
secure_schemes = INSECURE_SCHEMES[parsed.scheme]
|
||||
|
||||
if len(secure_schemes) == 1:
|
||||
ctx = (location, parsed.scheme, secure_schemes[0],
|
||||
parsed.netloc)
|
||||
logger.warn("%s uses an insecure transport scheme (%s). "
|
||||
"Consider using %s if %s has it available" %
|
||||
ctx)
|
||||
elif len(secure_schemes) > 1:
|
||||
ctx = (location, parsed.scheme, ", ".join(secure_schemes),
|
||||
parsed.netloc)
|
||||
logger.warn("%s uses an insecure transport scheme (%s). "
|
||||
"Consider using one of %s if %s has any of "
|
||||
"them available" % ctx)
|
||||
else:
|
||||
ctx = (location, parsed.scheme)
|
||||
logger.warn("%s uses an insecure transport scheme (%s)." %
|
||||
ctx)
|
||||
|
||||
found_versions = []
|
||||
found_versions.extend(
|
||||
self._package_versions(
|
||||
# We trust every directly linked archive in find_links
|
||||
[Link(url, '-f', trusted=True) for url in self.find_links], req.name.lower()))
|
||||
page_versions = []
|
||||
for page in self._get_pages(locations, req):
|
||||
logger.debug('Analyzing links from page %s' % page.url)
|
||||
logger.indent += 2
|
||||
try:
|
||||
page_versions.extend(self._package_versions(page.links, req.name.lower()))
|
||||
finally:
|
||||
logger.indent -= 2
|
||||
dependency_versions = list(self._package_versions(
|
||||
[Link(url) for url in self.dependency_links], req.name.lower()))
|
||||
if dependency_versions:
|
||||
logger.info('dependency_links found: %s' % ', '.join([link.url for parsed, link, version in dependency_versions]))
|
||||
file_versions = list(self._package_versions(
|
||||
[Link(url) for url in file_locations], req.name.lower()))
|
||||
if not found_versions and not page_versions and not dependency_versions and not file_versions:
|
||||
logger.fatal('Could not find any downloads that satisfy the requirement %s' % req)
|
||||
|
||||
if self.need_warn_external:
|
||||
logger.warn("Some externally hosted files were ignored (use "
|
||||
"--allow-external %s to allow)." % req.name)
|
||||
|
||||
if self.need_warn_unverified:
|
||||
logger.warn("Some insecure and unverifiable files were ignored"
|
||||
" (use --allow-unverified %s to allow)." %
|
||||
req.name)
|
||||
|
||||
raise DistributionNotFound('No distributions at all found for %s' % req)
|
||||
installed_version = []
|
||||
if req.satisfied_by is not None:
|
||||
installed_version = [(req.satisfied_by.parsed_version, INSTALLED_VERSION, req.satisfied_by.version)]
|
||||
if file_versions:
|
||||
file_versions.sort(reverse=True)
|
||||
logger.info('Local files found: %s' % ', '.join([url_to_path(link.url) for parsed, link, version in file_versions]))
|
||||
#this is an intentional priority ordering
|
||||
all_versions = installed_version + file_versions + found_versions + page_versions + dependency_versions
|
||||
applicable_versions = []
|
||||
for (parsed_version, link, version) in all_versions:
|
||||
if version not in req.req:
|
||||
logger.info("Ignoring link %s, version %s doesn't match %s"
|
||||
% (link, version, ','.join([''.join(s) for s in req.req.specs])))
|
||||
continue
|
||||
elif is_prerelease(version) and not (self.allow_all_prereleases or req.prereleases):
|
||||
# If this version isn't the already installed one, then
|
||||
# ignore it if it's a pre-release.
|
||||
if link is not INSTALLED_VERSION:
|
||||
logger.info("Ignoring link %s, version %s is a pre-release (use --pre to allow)." % (link, version))
|
||||
continue
|
||||
applicable_versions.append((parsed_version, link, version))
|
||||
applicable_versions = self._sort_versions(applicable_versions)
|
||||
existing_applicable = bool([link for parsed_version, link, version in applicable_versions if link is INSTALLED_VERSION])
|
||||
if not upgrade and existing_applicable:
|
||||
if applicable_versions[0][1] is INSTALLED_VERSION:
|
||||
logger.info('Existing installed version (%s) is most up-to-date and satisfies requirement'
|
||||
% req.satisfied_by.version)
|
||||
else:
|
||||
logger.info('Existing installed version (%s) satisfies requirement (most up-to-date version is %s)'
|
||||
% (req.satisfied_by.version, applicable_versions[0][2]))
|
||||
return None
|
||||
if not applicable_versions:
|
||||
logger.fatal('Could not find a version that satisfies the requirement %s (from versions: %s)'
|
||||
% (req, ', '.join([version for parsed_version, link, version in all_versions])))
|
||||
|
||||
if self.need_warn_external:
|
||||
logger.warn("Some externally hosted files were ignored (use "
|
||||
"--allow-external to allow).")
|
||||
|
||||
if self.need_warn_unverified:
|
||||
logger.warn("Some insecure and unverifiable files were ignored"
|
||||
" (use --allow-unverified %s to allow)." %
|
||||
req.name)
|
||||
|
||||
raise DistributionNotFound('No distributions matching the version for %s' % req)
|
||||
if applicable_versions[0][1] is INSTALLED_VERSION:
|
||||
# We have an existing version, and its the best version
|
||||
logger.info('Installed version (%s) is most up-to-date (past versions: %s)'
|
||||
% (req.satisfied_by.version, ', '.join([version for parsed_version, link, version in applicable_versions[1:]]) or 'none'))
|
||||
raise BestVersionAlreadyInstalled
|
||||
if len(applicable_versions) > 1:
|
||||
logger.info('Using version %s (newest of versions: %s)' %
|
||||
(applicable_versions[0][2], ', '.join([version for parsed_version, link, version in applicable_versions])))
|
||||
|
||||
selected_version = applicable_versions[0][1]
|
||||
|
||||
if (selected_version.internal is not None
|
||||
and not selected_version.internal):
|
||||
logger.warn("%s an externally hosted file and may be "
|
||||
"unreliable" % req.name)
|
||||
|
||||
if (selected_version.verifiable is not None
|
||||
and not selected_version.verifiable):
|
||||
logger.warn("%s is potentially insecure and "
|
||||
"unverifiable." % req.name)
|
||||
|
||||
if selected_version._deprecated_regex:
|
||||
logger.deprecated(
|
||||
"1.7",
|
||||
"%s discovered using a deprecated method of parsing, "
|
||||
"in the future it will no longer be discovered" % req.name
|
||||
)
|
||||
|
||||
return selected_version
|
||||
|
||||
|
||||
def _find_url_name(self, index_url, url_name, req):
|
||||
"""Finds the true URL name of a package, when the given name isn't quite correct.
|
||||
This is usually used to implement case-insensitivity."""
|
||||
if not index_url.url.endswith('/'):
|
||||
# Vaguely part of the PyPI API... weird but true.
|
||||
## FIXME: bad to modify this?
|
||||
index_url.url += '/'
|
||||
page = self._get_page(index_url, req)
|
||||
if page is None:
|
||||
logger.fatal('Cannot fetch index base URL %s' % index_url)
|
||||
return
|
||||
norm_name = normalize_name(req.url_name)
|
||||
for link in page.links:
|
||||
base = posixpath.basename(link.path.rstrip('/'))
|
||||
if norm_name == normalize_name(base):
|
||||
logger.notify('Real name of requirement %s is %s' % (url_name, base))
|
||||
return base
|
||||
return None
|
||||
|
||||
def _get_pages(self, locations, req):
|
||||
"""
|
||||
Yields (page, page_url) from the given locations, skipping
|
||||
locations that have errors, and adding download/homepage links
|
||||
"""
|
||||
all_locations = list(locations)
|
||||
seen = set()
|
||||
|
||||
while all_locations:
|
||||
location = all_locations.pop(0)
|
||||
if location in seen:
|
||||
continue
|
||||
seen.add(location)
|
||||
|
||||
page = self._get_page(location, req)
|
||||
if page is None:
|
||||
continue
|
||||
|
||||
yield page
|
||||
|
||||
for link in page.rel_links():
|
||||
normalized = normalize_name(req.name).lower()
|
||||
|
||||
if (not normalized in self.allow_external
|
||||
and not self.allow_all_external):
|
||||
self.need_warn_external = True
|
||||
logger.debug("Not searching %s for files because external "
|
||||
"urls are disallowed." % link)
|
||||
continue
|
||||
|
||||
if (link.trusted is not None
|
||||
and not link.trusted
|
||||
and not normalized in self.allow_unverified):
|
||||
logger.debug("Not searching %s for urls, it is an "
|
||||
"untrusted link and cannot produce safe or "
|
||||
"verifiable files." % link)
|
||||
self.need_warn_unverified = True
|
||||
continue
|
||||
|
||||
all_locations.append(link)
|
||||
|
||||
_egg_fragment_re = re.compile(r'#egg=([^&]*)')
|
||||
_egg_info_re = re.compile(r'([a-z0-9_.]+)-([a-z0-9_.-]+)', re.I)
|
||||
_py_version_re = re.compile(r'-py([123]\.?[0-9]?)$')
|
||||
|
||||
def _sort_links(self, links):
|
||||
"Returns elements of links in order, non-egg links first, egg links second, while eliminating duplicates"
|
||||
eggs, no_eggs = [], []
|
||||
seen = set()
|
||||
for link in links:
|
||||
if link not in seen:
|
||||
seen.add(link)
|
||||
if link.egg_fragment:
|
||||
eggs.append(link)
|
||||
else:
|
||||
no_eggs.append(link)
|
||||
return no_eggs + eggs
|
||||
|
||||
def _package_versions(self, links, search_name):
|
||||
for link in self._sort_links(links):
|
||||
for v in self._link_package_versions(link, search_name):
|
||||
yield v
|
||||
|
||||
def _known_extensions(self):
|
||||
extensions = ('.tar.gz', '.tar.bz2', '.tar', '.tgz', '.zip')
|
||||
if self.use_wheel:
|
||||
return extensions + (wheel_ext,)
|
||||
return extensions
|
||||
|
||||
def _link_package_versions(self, link, search_name):
|
||||
"""
|
||||
Return an iterable of triples (pkg_resources_version_key,
|
||||
link, python_version) that can be extracted from the given
|
||||
link.
|
||||
|
||||
Meant to be overridden by subclasses, not called by clients.
|
||||
"""
|
||||
platform = get_platform()
|
||||
|
||||
version = None
|
||||
if link.egg_fragment:
|
||||
egg_info = link.egg_fragment
|
||||
else:
|
||||
egg_info, ext = link.splitext()
|
||||
if not ext:
|
||||
if link not in self.logged_links:
|
||||
logger.debug('Skipping link %s; not a file' % link)
|
||||
self.logged_links.add(link)
|
||||
return []
|
||||
if egg_info.endswith('.tar'):
|
||||
# Special double-extension case:
|
||||
egg_info = egg_info[:-4]
|
||||
ext = '.tar' + ext
|
||||
if ext not in self._known_extensions():
|
||||
if link not in self.logged_links:
|
||||
logger.debug('Skipping link %s; unknown archive format: %s' % (link, ext))
|
||||
self.logged_links.add(link)
|
||||
return []
|
||||
if "macosx10" in link.path and ext == '.zip':
|
||||
if link not in self.logged_links:
|
||||
logger.debug('Skipping link %s; macosx10 one' % (link))
|
||||
self.logged_links.add(link)
|
||||
return []
|
||||
if ext == wheel_ext:
|
||||
try:
|
||||
wheel = Wheel(link.filename)
|
||||
except InvalidWheelFilename:
|
||||
logger.debug('Skipping %s because the wheel filename is invalid' % link)
|
||||
return []
|
||||
if wheel.name.lower() != search_name.lower():
|
||||
logger.debug('Skipping link %s; wrong project name (not %s)' % (link, search_name))
|
||||
return []
|
||||
if not wheel.supported():
|
||||
logger.debug('Skipping %s because it is not compatible with this Python' % link)
|
||||
return []
|
||||
# This is a dirty hack to prevent installing Binary Wheels from
|
||||
# PyPI unless it is a Windows or Mac Binary Wheel. This is
|
||||
# paired with a change to PyPI disabling uploads for the
|
||||
# same. Once we have a mechanism for enabling support for binary
|
||||
# wheels on linux that deals with the inherent problems of
|
||||
# binary distribution this can be removed.
|
||||
comes_from = getattr(link, "comes_from", None)
|
||||
if ((
|
||||
not platform.startswith('win')
|
||||
and not platform.startswith('macosx')
|
||||
)
|
||||
and comes_from is not None
|
||||
and urlparse.urlparse(comes_from.url).netloc.endswith(
|
||||
"pypi.python.org")):
|
||||
if not wheel.supported(tags=supported_tags_noarch):
|
||||
logger.debug(
|
||||
"Skipping %s because it is a pypi-hosted binary "
|
||||
"Wheel on an unsupported platform" % link
|
||||
)
|
||||
return []
|
||||
version = wheel.version
|
||||
|
||||
if not version:
|
||||
version = self._egg_info_matches(egg_info, search_name, link)
|
||||
if version is None:
|
||||
logger.debug('Skipping link %s; wrong project name (not %s)' % (link, search_name))
|
||||
return []
|
||||
|
||||
if (link.internal is not None
|
||||
and not link.internal
|
||||
and not normalize_name(search_name).lower() in self.allow_external
|
||||
and not self.allow_all_external):
|
||||
# We have a link that we are sure is external, so we should skip
|
||||
# it unless we are allowing externals
|
||||
logger.debug("Skipping %s because it is externally hosted." % link)
|
||||
self.need_warn_external = True
|
||||
return []
|
||||
|
||||
if (link.verifiable is not None
|
||||
and not link.verifiable
|
||||
and not (normalize_name(search_name).lower()
|
||||
in self.allow_unverified)):
|
||||
# We have a link that we are sure we cannot verify it's integrity,
|
||||
# so we should skip it unless we are allowing unsafe installs
|
||||
# for this requirement.
|
||||
logger.debug("Skipping %s because it is an insecure and "
|
||||
"unverifiable file." % link)
|
||||
self.need_warn_unverified = True
|
||||
return []
|
||||
|
||||
match = self._py_version_re.search(version)
|
||||
if match:
|
||||
version = version[:match.start()]
|
||||
py_version = match.group(1)
|
||||
if py_version != sys.version[:3]:
|
||||
logger.debug('Skipping %s because Python version is incorrect' % link)
|
||||
return []
|
||||
logger.debug('Found link %s, version: %s' % (link, version))
|
||||
return [(pkg_resources.parse_version(version),
|
||||
link,
|
||||
version)]
|
||||
|
||||
def _egg_info_matches(self, egg_info, search_name, link):
|
||||
match = self._egg_info_re.search(egg_info)
|
||||
if not match:
|
||||
logger.debug('Could not parse version from link: %s' % link)
|
||||
return None
|
||||
name = match.group(0).lower()
|
||||
# To match the "safe" name that pkg_resources creates:
|
||||
name = name.replace('_', '-')
|
||||
# project name and version must be separated by a dash
|
||||
look_for = search_name.lower() + "-"
|
||||
if name.startswith(look_for):
|
||||
return match.group(0)[len(look_for):]
|
||||
else:
|
||||
return None
|
||||
|
||||
def _get_page(self, link, req):
|
||||
return HTMLPage.get_page(link, req,
|
||||
cache=self.cache,
|
||||
session=self.session,
|
||||
)
|
||||
|
||||
|
||||
class PageCache(object):
|
||||
"""Cache of HTML pages"""
|
||||
|
||||
failure_limit = 3
|
||||
|
||||
def __init__(self):
|
||||
self._failures = {}
|
||||
self._pages = {}
|
||||
self._archives = {}
|
||||
|
||||
def too_many_failures(self, url):
|
||||
return self._failures.get(url, 0) >= self.failure_limit
|
||||
|
||||
def get_page(self, url):
|
||||
return self._pages.get(url)
|
||||
|
||||
def is_archive(self, url):
|
||||
return self._archives.get(url, False)
|
||||
|
||||
def set_is_archive(self, url, value=True):
|
||||
self._archives[url] = value
|
||||
|
||||
def add_page_failure(self, url, level):
|
||||
self._failures[url] = self._failures.get(url, 0)+level
|
||||
|
||||
def add_page(self, urls, page):
|
||||
for url in urls:
|
||||
self._pages[url] = page
|
||||
|
||||
|
||||
class HTMLPage(object):
|
||||
"""Represents one page, along with its URL"""
|
||||
|
||||
## FIXME: these regexes are horrible hacks:
|
||||
_homepage_re = re.compile(r'<th>\s*home\s*page', re.I)
|
||||
_download_re = re.compile(r'<th>\s*download\s+url', re.I)
|
||||
_href_re = re.compile('href=(?:"([^"]*)"|\'([^\']*)\'|([^>\\s\\n]*))', re.I|re.S)
|
||||
|
||||
def __init__(self, content, url, headers=None, trusted=None):
|
||||
self.content = content
|
||||
self.parsed = html5lib.parse(self.content, namespaceHTMLElements=False)
|
||||
self.url = url
|
||||
self.headers = headers
|
||||
self.trusted = trusted
|
||||
|
||||
def __str__(self):
|
||||
return self.url
|
||||
|
||||
@classmethod
|
||||
def get_page(cls, link, req, cache=None, skip_archives=True, session=None):
|
||||
if session is None:
|
||||
session = PipSession()
|
||||
|
||||
url = link.url
|
||||
url = url.split('#', 1)[0]
|
||||
if cache.too_many_failures(url):
|
||||
return None
|
||||
|
||||
# Check for VCS schemes that do not support lookup as web pages.
|
||||
from pip.vcs import VcsSupport
|
||||
for scheme in VcsSupport.schemes:
|
||||
if url.lower().startswith(scheme) and url[len(scheme)] in '+:':
|
||||
logger.debug('Cannot look at %(scheme)s URL %(link)s' % locals())
|
||||
return None
|
||||
|
||||
if cache is not None:
|
||||
inst = cache.get_page(url)
|
||||
if inst is not None:
|
||||
return inst
|
||||
try:
|
||||
if skip_archives:
|
||||
if cache is not None:
|
||||
if cache.is_archive(url):
|
||||
return None
|
||||
filename = link.filename
|
||||
for bad_ext in ['.tar', '.tar.gz', '.tar.bz2', '.tgz', '.zip']:
|
||||
if filename.endswith(bad_ext):
|
||||
content_type = cls._get_content_type(url,
|
||||
session=session,
|
||||
)
|
||||
if content_type.lower().startswith('text/html'):
|
||||
break
|
||||
else:
|
||||
logger.debug('Skipping page %s because of Content-Type: %s' % (link, content_type))
|
||||
if cache is not None:
|
||||
cache.set_is_archive(url)
|
||||
return None
|
||||
logger.debug('Getting page %s' % url)
|
||||
|
||||
# Tack index.html onto file:// URLs that point to directories
|
||||
(scheme, netloc, path, params, query, fragment) = urlparse.urlparse(url)
|
||||
if scheme == 'file' and os.path.isdir(url2pathname(path)):
|
||||
# add trailing slash if not present so urljoin doesn't trim final segment
|
||||
if not url.endswith('/'):
|
||||
url += '/'
|
||||
url = urlparse.urljoin(url, 'index.html')
|
||||
logger.debug(' file: URL is directory, getting %s' % url)
|
||||
|
||||
resp = session.get(url, headers={"Accept": "text/html"})
|
||||
resp.raise_for_status()
|
||||
|
||||
# The check for archives above only works if the url ends with
|
||||
# something that looks like an archive. However that is not a
|
||||
# requirement. For instance http://sourceforge.net/projects/docutils/files/docutils/0.8.1/docutils-0.8.1.tar.gz/download
|
||||
# redirects to http://superb-dca3.dl.sourceforge.net/project/docutils/docutils/0.8.1/docutils-0.8.1.tar.gz
|
||||
# Unless we issue a HEAD request on every url we cannot know
|
||||
# ahead of time for sure if something is HTML or not. However we
|
||||
# can check after we've downloaded it.
|
||||
content_type = resp.headers.get('Content-Type', 'unknown')
|
||||
if not content_type.lower().startswith("text/html"):
|
||||
logger.debug('Skipping page %s because of Content-Type: %s' %
|
||||
(link, content_type))
|
||||
if cache is not None:
|
||||
cache.set_is_archive(url)
|
||||
return None
|
||||
|
||||
inst = cls(resp.text, resp.url, resp.headers, trusted=link.trusted)
|
||||
except requests.HTTPError as exc:
|
||||
level = 2 if exc.response.status_code == 404 else 1
|
||||
cls._handle_fail(req, link, exc, url, cache=cache, level=level)
|
||||
except requests.ConnectionError as exc:
|
||||
cls._handle_fail(
|
||||
req, link, "connection error: %s" % exc, url,
|
||||
cache=cache,
|
||||
)
|
||||
except requests.Timeout:
|
||||
cls._handle_fail(req, link, "timed out", url, cache=cache)
|
||||
except SSLError as exc:
|
||||
reason = ("There was a problem confirming the ssl certificate: "
|
||||
"%s" % exc)
|
||||
cls._handle_fail(req, link, reason, url,
|
||||
cache=cache,
|
||||
level=2,
|
||||
meth=logger.notify,
|
||||
)
|
||||
else:
|
||||
if cache is not None:
|
||||
cache.add_page([url, resp.url], inst)
|
||||
return inst
|
||||
|
||||
@staticmethod
|
||||
def _handle_fail(req, link, reason, url, cache=None, level=1, meth=None):
|
||||
if meth is None:
|
||||
meth = logger.info
|
||||
|
||||
meth("Could not fetch URL %s: %s", link, reason)
|
||||
meth("Will skip URL %s when looking for download links for %s" %
|
||||
(link.url, req))
|
||||
|
||||
if cache is not None:
|
||||
cache.add_page_failure(url, level)
|
||||
|
||||
@staticmethod
|
||||
def _get_content_type(url, session=None):
|
||||
"""Get the Content-Type of the given url, using a HEAD request"""
|
||||
if session is None:
|
||||
session = PipSession()
|
||||
|
||||
scheme, netloc, path, query, fragment = urlparse.urlsplit(url)
|
||||
if not scheme in ('http', 'https', 'ftp', 'ftps'):
|
||||
## FIXME: some warning or something?
|
||||
## assertion error?
|
||||
return ''
|
||||
|
||||
resp = session.head(url, allow_redirects=True)
|
||||
resp.raise_for_status()
|
||||
|
||||
return resp.headers.get("Content-Type", "")
|
||||
|
||||
@property
|
||||
def api_version(self):
|
||||
if not hasattr(self, "_api_version"):
|
||||
_api_version = None
|
||||
|
||||
metas = [x for x in self.parsed.findall(".//meta")
|
||||
if x.get("name", "").lower() == "api-version"]
|
||||
if metas:
|
||||
try:
|
||||
_api_version = int(metas[0].get("value", None))
|
||||
except (TypeError, ValueError):
|
||||
_api_version = None
|
||||
self._api_version = _api_version
|
||||
return self._api_version
|
||||
|
||||
@property
|
||||
def base_url(self):
|
||||
if not hasattr(self, "_base_url"):
|
||||
base = self.parsed.find(".//base")
|
||||
if base is not None and base.get("href"):
|
||||
self._base_url = base.get("href")
|
||||
else:
|
||||
self._base_url = self.url
|
||||
return self._base_url
|
||||
|
||||
@property
|
||||
def links(self):
|
||||
"""Yields all links in the page"""
|
||||
for anchor in self.parsed.findall(".//a"):
|
||||
if anchor.get("href"):
|
||||
href = anchor.get("href")
|
||||
url = self.clean_link(urlparse.urljoin(self.base_url, href))
|
||||
|
||||
# Determine if this link is internal. If that distinction
|
||||
# doesn't make sense in this context, then we don't make
|
||||
# any distinction.
|
||||
internal = None
|
||||
if self.api_version and self.api_version >= 2:
|
||||
# Only api_versions >= 2 have a distinction between
|
||||
# external and internal links
|
||||
internal = bool(anchor.get("rel")
|
||||
and "internal" in anchor.get("rel").split())
|
||||
|
||||
yield Link(url, self, internal=internal)
|
||||
|
||||
def rel_links(self):
|
||||
for url in self.explicit_rel_links():
|
||||
yield url
|
||||
for url in self.scraped_rel_links():
|
||||
yield url
|
||||
|
||||
def explicit_rel_links(self, rels=('homepage', 'download')):
|
||||
"""Yields all links with the given relations"""
|
||||
rels = set(rels)
|
||||
|
||||
for anchor in self.parsed.findall(".//a"):
|
||||
if anchor.get("rel") and anchor.get("href"):
|
||||
found_rels = set(anchor.get("rel").split())
|
||||
# Determine the intersection between what rels were found and
|
||||
# what rels were being looked for
|
||||
if found_rels & rels:
|
||||
href = anchor.get("href")
|
||||
url = self.clean_link(urlparse.urljoin(self.base_url, href))
|
||||
yield Link(url, self, trusted=False)
|
||||
|
||||
def scraped_rel_links(self):
|
||||
# Can we get rid of this horrible horrible method?
|
||||
for regex in (self._homepage_re, self._download_re):
|
||||
match = regex.search(self.content)
|
||||
if not match:
|
||||
continue
|
||||
href_match = self._href_re.search(self.content, pos=match.end())
|
||||
if not href_match:
|
||||
continue
|
||||
url = href_match.group(1) or href_match.group(2) or href_match.group(3)
|
||||
if not url:
|
||||
continue
|
||||
url = self.clean_link(urlparse.urljoin(self.base_url, url))
|
||||
yield Link(url, self, trusted=False, _deprecated_regex=True)
|
||||
|
||||
_clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I)
|
||||
|
||||
def clean_link(self, url):
|
||||
"""Makes sure a link is fully encoded. That is, if a ' ' shows up in
|
||||
the link, it will be rewritten to %20 (while not over-quoting
|
||||
% or other characters)."""
|
||||
return self._clean_re.sub(
|
||||
lambda match: '%%%2x' % ord(match.group(0)), url)
|
||||
|
||||
|
||||
class Link(object):
|
||||
|
||||
def __init__(self, url, comes_from=None, internal=None, trusted=None,
|
||||
_deprecated_regex=False):
|
||||
self.url = url
|
||||
self.comes_from = comes_from
|
||||
self.internal = internal
|
||||
self.trusted = trusted
|
||||
self._deprecated_regex = _deprecated_regex
|
||||
|
||||
def __str__(self):
|
||||
if self.comes_from:
|
||||
return '%s (from %s)' % (self.url, self.comes_from)
|
||||
else:
|
||||
return str(self.url)
|
||||
|
||||
def __repr__(self):
|
||||
return '<Link %s>' % self
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.url == other.url
|
||||
|
||||
def __ne__(self, other):
|
||||
return self.url != other.url
|
||||
|
||||
def __lt__(self, other):
|
||||
return self.url < other.url
|
||||
|
||||
def __le__(self, other):
|
||||
return self.url <= other.url
|
||||
|
||||
def __gt__(self, other):
|
||||
return self.url > other.url
|
||||
|
||||
def __ge__(self, other):
|
||||
return self.url >= other.url
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.url)
|
||||
|
||||
@property
|
||||
def filename(self):
|
||||
_, netloc, path, _, _ = urlparse.urlsplit(self.url)
|
||||
name = posixpath.basename(path.rstrip('/')) or netloc
|
||||
assert name, ('URL %r produced no filename' % self.url)
|
||||
return name
|
||||
|
||||
@property
|
||||
def scheme(self):
|
||||
return urlparse.urlsplit(self.url)[0]
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
return urlparse.urlsplit(self.url)[2]
|
||||
|
||||
def splitext(self):
|
||||
return splitext(posixpath.basename(self.path.rstrip('/')))
|
||||
|
||||
@property
|
||||
def ext(self):
|
||||
return self.splitext()[1]
|
||||
|
||||
@property
|
||||
def url_without_fragment(self):
|
||||
scheme, netloc, path, query, fragment = urlparse.urlsplit(self.url)
|
||||
return urlparse.urlunsplit((scheme, netloc, path, query, None))
|
||||
|
||||
_egg_fragment_re = re.compile(r'#egg=([^&]*)')
|
||||
|
||||
@property
|
||||
def egg_fragment(self):
|
||||
match = self._egg_fragment_re.search(self.url)
|
||||
if not match:
|
||||
return None
|
||||
return match.group(1)
|
||||
|
||||
_hash_re = re.compile(r'(sha1|sha224|sha384|sha256|sha512|md5)=([a-f0-9]+)')
|
||||
|
||||
@property
|
||||
def hash(self):
|
||||
match = self._hash_re.search(self.url)
|
||||
if match:
|
||||
return match.group(2)
|
||||
return None
|
||||
|
||||
@property
|
||||
def hash_name(self):
|
||||
match = self._hash_re.search(self.url)
|
||||
if match:
|
||||
return match.group(1)
|
||||
return None
|
||||
|
||||
@property
|
||||
def show_url(self):
|
||||
return posixpath.basename(self.url.split('#', 1)[0].split('?', 1)[0])
|
||||
|
||||
@property
|
||||
def verifiable(self):
|
||||
"""
|
||||
Returns True if this link can be verified after download, False if it
|
||||
cannot, and None if we cannot determine.
|
||||
"""
|
||||
trusted = self.trusted or getattr(self.comes_from, "trusted", None)
|
||||
if trusted is not None and trusted:
|
||||
# This link came from a trusted source. It *may* be verifiable but
|
||||
# first we need to see if this page is operating under the new
|
||||
# API version.
|
||||
try:
|
||||
api_version = getattr(self.comes_from, "api_version", None)
|
||||
api_version = int(api_version)
|
||||
except (ValueError, TypeError):
|
||||
api_version = None
|
||||
|
||||
if api_version is None or api_version <= 1:
|
||||
# This link is either trusted, or it came from a trusted,
|
||||
# however it is not operating under the API version 2 so
|
||||
# we can't make any claims about if it's safe or not
|
||||
return
|
||||
|
||||
if self.hash:
|
||||
# This link came from a trusted source and it has a hash, so we
|
||||
# can consider it safe.
|
||||
return True
|
||||
else:
|
||||
# This link came from a trusted source, using the new API
|
||||
# version, and it does not have a hash. It is NOT verifiable
|
||||
return False
|
||||
elif trusted is not None:
|
||||
# This link came from an untrusted source and we cannot trust it
|
||||
return False
|
||||
|
||||
|
||||
# An object to represent the "link" for the installed version of a requirement.
|
||||
# Using Inf as the url makes it sort higher.
|
||||
INSTALLED_VERSION = Link(Inf)
|
||||
|
||||
|
||||
def get_requirement_from_url(url):
|
||||
"""Get a requirement from the URL, if possible. This looks for #egg
|
||||
in the URL"""
|
||||
link = Link(url)
|
||||
egg_info = link.egg_fragment
|
||||
if not egg_info:
|
||||
egg_info = splitext(link.filename)[0]
|
||||
return package_to_requirement(egg_info)
|
||||
|
||||
|
||||
def package_to_requirement(package_name):
|
||||
"""Translate a name like Foo-1.2 to Foo==1.3"""
|
||||
match = re.search(r'^(.*?)-(dev|\d.*)', package_name)
|
||||
if match:
|
||||
name = match.group(1)
|
||||
version = match.group(2)
|
||||
else:
|
||||
name = package_name
|
||||
version = ''
|
||||
if version:
|
||||
return '%s==%s' % (name, version)
|
||||
else:
|
||||
return name
|
||||
172
lib/python3.4/site-packages/pip/locations.py
Normal file
172
lib/python3.4/site-packages/pip/locations.py
Normal file
|
|
@ -0,0 +1,172 @@
|
|||
"""Locations where we look for configs, install stuff, etc"""
|
||||
|
||||
import sys
|
||||
import site
|
||||
import os
|
||||
import tempfile
|
||||
from distutils.command.install import install, SCHEME_KEYS
|
||||
import getpass
|
||||
from pip.backwardcompat import get_python_lib, get_path_uid, user_site
|
||||
import pip.exceptions
|
||||
|
||||
|
||||
DELETE_MARKER_MESSAGE = '''\
|
||||
This file is placed here by pip to indicate the source was put
|
||||
here by pip.
|
||||
|
||||
Once this package is successfully installed this source code will be
|
||||
deleted (unless you remove this file).
|
||||
'''
|
||||
PIP_DELETE_MARKER_FILENAME = 'pip-delete-this-directory.txt'
|
||||
|
||||
def write_delete_marker_file(directory):
|
||||
"""
|
||||
Write the pip delete marker file into this directory.
|
||||
"""
|
||||
filepath = os.path.join(directory, PIP_DELETE_MARKER_FILENAME)
|
||||
marker_fp = open(filepath, 'w')
|
||||
marker_fp.write(DELETE_MARKER_MESSAGE)
|
||||
marker_fp.close()
|
||||
|
||||
|
||||
def running_under_virtualenv():
|
||||
"""
|
||||
Return True if we're running inside a virtualenv, False otherwise.
|
||||
|
||||
"""
|
||||
if hasattr(sys, 'real_prefix'):
|
||||
return True
|
||||
elif sys.prefix != getattr(sys, "base_prefix", sys.prefix):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def virtualenv_no_global():
|
||||
"""
|
||||
Return True if in a venv and no system site packages.
|
||||
"""
|
||||
#this mirrors the logic in virtualenv.py for locating the no-global-site-packages.txt file
|
||||
site_mod_dir = os.path.dirname(os.path.abspath(site.__file__))
|
||||
no_global_file = os.path.join(site_mod_dir, 'no-global-site-packages.txt')
|
||||
if running_under_virtualenv() and os.path.isfile(no_global_file):
|
||||
return True
|
||||
|
||||
def __get_username():
|
||||
""" Returns the effective username of the current process. """
|
||||
if sys.platform == 'win32':
|
||||
return getpass.getuser()
|
||||
import pwd
|
||||
return pwd.getpwuid(os.geteuid()).pw_name
|
||||
|
||||
def _get_build_prefix():
|
||||
""" Returns a safe build_prefix """
|
||||
path = os.path.join(tempfile.gettempdir(), 'pip_build_%s' %
|
||||
__get_username())
|
||||
if sys.platform == 'win32':
|
||||
""" on windows(tested on 7) temp dirs are isolated """
|
||||
return path
|
||||
try:
|
||||
os.mkdir(path)
|
||||
write_delete_marker_file(path)
|
||||
except OSError:
|
||||
file_uid = None
|
||||
try:
|
||||
# raises OSError for symlinks
|
||||
# https://github.com/pypa/pip/pull/935#discussion_r5307003
|
||||
file_uid = get_path_uid(path)
|
||||
except OSError:
|
||||
file_uid = None
|
||||
|
||||
if file_uid != os.geteuid():
|
||||
msg = "The temporary folder for building (%s) is either not owned by you, or is a symlink." \
|
||||
% path
|
||||
print (msg)
|
||||
print("pip will not work until the temporary folder is " + \
|
||||
"either deleted or is a real directory owned by your user account.")
|
||||
raise pip.exceptions.InstallationError(msg)
|
||||
return path
|
||||
|
||||
if running_under_virtualenv():
|
||||
build_prefix = os.path.join(sys.prefix, 'build')
|
||||
src_prefix = os.path.join(sys.prefix, 'src')
|
||||
else:
|
||||
# Note: intentionally NOT using mkdtemp
|
||||
# See https://github.com/pypa/pip/issues/906 for plan to move to mkdtemp
|
||||
build_prefix = _get_build_prefix()
|
||||
|
||||
## FIXME: keep src in cwd for now (it is not a temporary folder)
|
||||
try:
|
||||
src_prefix = os.path.join(os.getcwd(), 'src')
|
||||
except OSError:
|
||||
# In case the current working directory has been renamed or deleted
|
||||
sys.exit("The folder you are executing pip from can no longer be found.")
|
||||
|
||||
# under Mac OS X + virtualenv sys.prefix is not properly resolved
|
||||
# it is something like /path/to/python/bin/..
|
||||
# Note: using realpath due to tmp dirs on OSX being symlinks
|
||||
build_prefix = os.path.abspath(os.path.realpath(build_prefix))
|
||||
src_prefix = os.path.abspath(src_prefix)
|
||||
|
||||
# FIXME doesn't account for venv linked to global site-packages
|
||||
|
||||
site_packages = get_python_lib()
|
||||
user_dir = os.path.expanduser('~')
|
||||
if sys.platform == 'win32':
|
||||
bin_py = os.path.join(sys.prefix, 'Scripts')
|
||||
bin_user = os.path.join(user_site, 'Scripts') if user_site else None
|
||||
# buildout uses 'bin' on Windows too?
|
||||
if not os.path.exists(bin_py):
|
||||
bin_py = os.path.join(sys.prefix, 'bin')
|
||||
bin_user = os.path.join(user_site, 'bin') if user_site else None
|
||||
default_storage_dir = os.path.join(user_dir, 'pip')
|
||||
default_config_file = os.path.join(default_storage_dir, 'pip.ini')
|
||||
default_log_file = os.path.join(default_storage_dir, 'pip.log')
|
||||
else:
|
||||
bin_py = os.path.join(sys.prefix, 'bin')
|
||||
bin_user = os.path.join(user_site, 'bin') if user_site else None
|
||||
default_storage_dir = os.path.join(user_dir, '.pip')
|
||||
default_config_file = os.path.join(default_storage_dir, 'pip.conf')
|
||||
default_log_file = os.path.join(default_storage_dir, 'pip.log')
|
||||
|
||||
# Forcing to use /usr/local/bin for standard Mac OS X framework installs
|
||||
# Also log to ~/Library/Logs/ for use with the Console.app log viewer
|
||||
if sys.platform[:6] == 'darwin' and sys.prefix[:16] == '/System/Library/':
|
||||
bin_py = '/usr/local/bin'
|
||||
default_log_file = os.path.join(user_dir, 'Library/Logs/pip.log')
|
||||
|
||||
|
||||
def distutils_scheme(dist_name, user=False, home=None, root=None):
|
||||
"""
|
||||
Return a distutils install scheme
|
||||
"""
|
||||
from distutils.dist import Distribution
|
||||
|
||||
scheme = {}
|
||||
d = Distribution({'name': dist_name})
|
||||
d.parse_config_files()
|
||||
i = d.get_command_obj('install', create=True)
|
||||
# NOTE: setting user or home has the side-effect of creating the home dir or
|
||||
# user base for installations during finalize_options()
|
||||
# ideally, we'd prefer a scheme class that has no side-effects.
|
||||
i.user = user or i.user
|
||||
i.home = home or i.home
|
||||
i.root = root or i.root
|
||||
i.finalize_options()
|
||||
for key in SCHEME_KEYS:
|
||||
scheme[key] = getattr(i, 'install_'+key)
|
||||
|
||||
if running_under_virtualenv():
|
||||
scheme['headers'] = os.path.join(sys.prefix,
|
||||
'include',
|
||||
'site',
|
||||
'python' + sys.version[:3],
|
||||
dist_name)
|
||||
|
||||
if root is not None:
|
||||
scheme["headers"] = os.path.join(
|
||||
root,
|
||||
os.path.abspath(scheme["headers"])[1:],
|
||||
)
|
||||
|
||||
return scheme
|
||||
276
lib/python3.4/site-packages/pip/log.py
Normal file
276
lib/python3.4/site-packages/pip/log.py
Normal file
|
|
@ -0,0 +1,276 @@
|
|||
"""Logging
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
import logging
|
||||
|
||||
from pip import backwardcompat
|
||||
import colorama, pkg_resources
|
||||
|
||||
|
||||
def _color_wrap(*colors):
|
||||
def wrapped(inp):
|
||||
return "".join(list(colors) + [inp, colorama.Style.RESET_ALL])
|
||||
return wrapped
|
||||
|
||||
|
||||
def should_color(consumer, environ, std=(sys.stdout, sys.stderr)):
|
||||
real_consumer = (consumer if not isinstance(consumer, colorama.AnsiToWin32)
|
||||
else consumer.wrapped)
|
||||
|
||||
# If consumer isn't stdout or stderr we shouldn't colorize it
|
||||
if real_consumer not in std:
|
||||
return False
|
||||
|
||||
# If consumer is a tty we should color it
|
||||
if hasattr(real_consumer, "isatty") and real_consumer.isatty():
|
||||
return True
|
||||
|
||||
# If we have an ASNI term we should color it
|
||||
if environ.get("TERM") == "ANSI":
|
||||
return True
|
||||
|
||||
# If anything else we should not color it
|
||||
return False
|
||||
|
||||
|
||||
def should_warn(current_version, removal_version):
|
||||
# Our Significant digits on versions is 2, so remove everything but the
|
||||
# first two places.
|
||||
current_version = ".".join(current_version.split(".")[:2])
|
||||
removal_version = ".".join(removal_version.split(".")[:2])
|
||||
|
||||
# Our warning threshold is one minor version before removal, so we
|
||||
# decrement the minor version by one
|
||||
major, minor = removal_version.split(".")
|
||||
minor = str(int(minor) - 1)
|
||||
warn_version = ".".join([major, minor])
|
||||
|
||||
# Test if our current_version should be a warn
|
||||
return (pkg_resources.parse_version(current_version)
|
||||
< pkg_resources.parse_version(warn_version))
|
||||
|
||||
|
||||
class Logger(object):
|
||||
"""
|
||||
Logging object for use in command-line script. Allows ranges of
|
||||
levels, to avoid some redundancy of displayed information.
|
||||
"""
|
||||
VERBOSE_DEBUG = logging.DEBUG - 1
|
||||
DEBUG = logging.DEBUG
|
||||
INFO = logging.INFO
|
||||
NOTIFY = (logging.INFO + logging.WARN) / 2
|
||||
WARN = WARNING = logging.WARN
|
||||
ERROR = logging.ERROR
|
||||
FATAL = logging.FATAL
|
||||
|
||||
LEVELS = [VERBOSE_DEBUG, DEBUG, INFO, NOTIFY, WARN, ERROR, FATAL]
|
||||
|
||||
COLORS = {
|
||||
WARN: _color_wrap(colorama.Fore.YELLOW),
|
||||
ERROR: _color_wrap(colorama.Fore.RED),
|
||||
FATAL: _color_wrap(colorama.Fore.RED),
|
||||
}
|
||||
|
||||
def __init__(self):
|
||||
self.consumers = []
|
||||
self.indent = 0
|
||||
self.explicit_levels = False
|
||||
self.in_progress = None
|
||||
self.in_progress_hanging = False
|
||||
|
||||
def add_consumers(self, *consumers):
|
||||
if sys.platform.startswith("win"):
|
||||
for level, consumer in consumers:
|
||||
if hasattr(consumer, "write"):
|
||||
self.consumers.append(
|
||||
(level, colorama.AnsiToWin32(consumer)),
|
||||
)
|
||||
else:
|
||||
self.consumers.append((level, consumer))
|
||||
else:
|
||||
self.consumers.extend(consumers)
|
||||
|
||||
def debug(self, msg, *args, **kw):
|
||||
self.log(self.DEBUG, msg, *args, **kw)
|
||||
|
||||
def info(self, msg, *args, **kw):
|
||||
self.log(self.INFO, msg, *args, **kw)
|
||||
|
||||
def notify(self, msg, *args, **kw):
|
||||
self.log(self.NOTIFY, msg, *args, **kw)
|
||||
|
||||
def warn(self, msg, *args, **kw):
|
||||
self.log(self.WARN, msg, *args, **kw)
|
||||
|
||||
def error(self, msg, *args, **kw):
|
||||
self.log(self.ERROR, msg, *args, **kw)
|
||||
|
||||
def fatal(self, msg, *args, **kw):
|
||||
self.log(self.FATAL, msg, *args, **kw)
|
||||
|
||||
def deprecated(self, removal_version, msg, *args, **kwargs):
|
||||
"""
|
||||
Logs deprecation message which is log level WARN if the
|
||||
``removal_version`` is > 1 minor release away and log level ERROR
|
||||
otherwise.
|
||||
|
||||
removal_version should be the version that the deprecated feature is
|
||||
expected to be removed in, so something that will not exist in
|
||||
version 1.7, but will in 1.6 would have a removal_version of 1.7.
|
||||
"""
|
||||
from pip import __version__
|
||||
|
||||
if should_warn(__version__, removal_version):
|
||||
self.warn(msg, *args, **kwargs)
|
||||
else:
|
||||
self.error(msg, *args, **kwargs)
|
||||
|
||||
def log(self, level, msg, *args, **kw):
|
||||
if args:
|
||||
if kw:
|
||||
raise TypeError(
|
||||
"You may give positional or keyword arguments, not both")
|
||||
args = args or kw
|
||||
|
||||
# render
|
||||
if args:
|
||||
rendered = msg % args
|
||||
else:
|
||||
rendered = msg
|
||||
rendered = ' ' * self.indent + rendered
|
||||
if self.explicit_levels:
|
||||
## FIXME: should this be a name, not a level number?
|
||||
rendered = '%02i %s' % (level, rendered)
|
||||
|
||||
for consumer_level, consumer in self.consumers:
|
||||
if self.level_matches(level, consumer_level):
|
||||
if (self.in_progress_hanging
|
||||
and consumer in (sys.stdout, sys.stderr)):
|
||||
self.in_progress_hanging = False
|
||||
sys.stdout.write('\n')
|
||||
sys.stdout.flush()
|
||||
if hasattr(consumer, 'write'):
|
||||
write_content = rendered + '\n'
|
||||
if should_color(consumer, os.environ):
|
||||
# We are printing to stdout or stderr and it supports
|
||||
# colors so render our text colored
|
||||
colorizer = self.COLORS.get(level, lambda x: x)
|
||||
write_content = colorizer(write_content)
|
||||
|
||||
consumer.write(write_content)
|
||||
if hasattr(consumer, 'flush'):
|
||||
consumer.flush()
|
||||
else:
|
||||
consumer(rendered)
|
||||
|
||||
def _show_progress(self):
|
||||
"""Should we display download progress?"""
|
||||
return (self.stdout_level_matches(self.NOTIFY) and sys.stdout.isatty())
|
||||
|
||||
def start_progress(self, msg):
|
||||
assert not self.in_progress, (
|
||||
"Tried to start_progress(%r) while in_progress %r"
|
||||
% (msg, self.in_progress))
|
||||
if self._show_progress():
|
||||
sys.stdout.write(' ' * self.indent + msg)
|
||||
sys.stdout.flush()
|
||||
self.in_progress_hanging = True
|
||||
else:
|
||||
self.in_progress_hanging = False
|
||||
self.in_progress = msg
|
||||
self.last_message = None
|
||||
|
||||
def end_progress(self, msg='done.'):
|
||||
assert self.in_progress, (
|
||||
"Tried to end_progress without start_progress")
|
||||
if self._show_progress():
|
||||
if not self.in_progress_hanging:
|
||||
# Some message has been printed out since start_progress
|
||||
sys.stdout.write('...' + self.in_progress + msg + '\n')
|
||||
sys.stdout.flush()
|
||||
else:
|
||||
# These erase any messages shown with show_progress (besides .'s)
|
||||
logger.show_progress('')
|
||||
logger.show_progress('')
|
||||
sys.stdout.write(msg + '\n')
|
||||
sys.stdout.flush()
|
||||
self.in_progress = None
|
||||
self.in_progress_hanging = False
|
||||
|
||||
def show_progress(self, message=None):
|
||||
"""If we are in a progress scope, and no log messages have been
|
||||
shown, write out another '.'"""
|
||||
if self.in_progress_hanging:
|
||||
if message is None:
|
||||
sys.stdout.write('.')
|
||||
sys.stdout.flush()
|
||||
else:
|
||||
if self.last_message:
|
||||
padding = ' ' * max(0, len(self.last_message) - len(message))
|
||||
else:
|
||||
padding = ''
|
||||
sys.stdout.write('\r%s%s%s%s' %
|
||||
(' ' * self.indent, self.in_progress, message, padding))
|
||||
sys.stdout.flush()
|
||||
self.last_message = message
|
||||
|
||||
def stdout_level_matches(self, level):
|
||||
"""Returns true if a message at this level will go to stdout"""
|
||||
return self.level_matches(level, self._stdout_level())
|
||||
|
||||
def _stdout_level(self):
|
||||
"""Returns the level that stdout runs at"""
|
||||
for level, consumer in self.consumers:
|
||||
if consumer is sys.stdout:
|
||||
return level
|
||||
return self.FATAL
|
||||
|
||||
def level_matches(self, level, consumer_level):
|
||||
"""
|
||||
>>> l = Logger()
|
||||
>>> l.level_matches(3, 4)
|
||||
False
|
||||
>>> l.level_matches(3, 2)
|
||||
True
|
||||
>>> l.level_matches(slice(None, 3), 3)
|
||||
False
|
||||
>>> l.level_matches(slice(None, 3), 2)
|
||||
True
|
||||
>>> l.level_matches(slice(1, 3), 1)
|
||||
True
|
||||
>>> l.level_matches(slice(2, 3), 1)
|
||||
False
|
||||
"""
|
||||
if isinstance(level, slice):
|
||||
start, stop = level.start, level.stop
|
||||
if start is not None and start > consumer_level:
|
||||
return False
|
||||
if stop is not None or stop <= consumer_level:
|
||||
return False
|
||||
return True
|
||||
else:
|
||||
return level >= consumer_level
|
||||
|
||||
@classmethod
|
||||
def level_for_integer(cls, level):
|
||||
levels = cls.LEVELS
|
||||
if level < 0:
|
||||
return levels[0]
|
||||
if level >= len(levels):
|
||||
return levels[-1]
|
||||
return levels[level]
|
||||
|
||||
def move_stdout_to_stderr(self):
|
||||
to_remove = []
|
||||
to_add = []
|
||||
for consumer_level, consumer in self.consumers:
|
||||
if consumer == sys.stdout:
|
||||
to_remove.append((consumer_level, consumer))
|
||||
to_add.append((consumer_level, sys.stderr))
|
||||
for item in to_remove:
|
||||
self.consumers.remove(item)
|
||||
self.consumers.extend(to_add)
|
||||
|
||||
logger = Logger()
|
||||
102
lib/python3.4/site-packages/pip/pep425tags.py
Normal file
102
lib/python3.4/site-packages/pip/pep425tags.py
Normal file
|
|
@ -0,0 +1,102 @@
|
|||
"""Generate and work with PEP 425 Compatibility Tags."""
|
||||
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
try:
|
||||
import sysconfig
|
||||
except ImportError: # pragma nocover
|
||||
# Python < 2.7
|
||||
import distutils.sysconfig as sysconfig
|
||||
import distutils.util
|
||||
|
||||
|
||||
def get_abbr_impl():
|
||||
"""Return abbreviated implementation name."""
|
||||
if hasattr(sys, 'pypy_version_info'):
|
||||
pyimpl = 'pp'
|
||||
elif sys.platform.startswith('java'):
|
||||
pyimpl = 'jy'
|
||||
elif sys.platform == 'cli':
|
||||
pyimpl = 'ip'
|
||||
else:
|
||||
pyimpl = 'cp'
|
||||
return pyimpl
|
||||
|
||||
|
||||
def get_impl_ver():
|
||||
"""Return implementation version."""
|
||||
return ''.join(map(str, sys.version_info[:2]))
|
||||
|
||||
|
||||
def get_platform():
|
||||
"""Return our platform name 'win32', 'linux_x86_64'"""
|
||||
# XXX remove distutils dependency
|
||||
return distutils.util.get_platform().replace('.', '_').replace('-', '_')
|
||||
|
||||
|
||||
def get_supported(versions=None, noarch=False):
|
||||
"""Return a list of supported tags for each version specified in
|
||||
`versions`.
|
||||
|
||||
:param versions: a list of string versions, of the form ["33", "32"],
|
||||
or None. The first version will be assumed to support our ABI.
|
||||
"""
|
||||
supported = []
|
||||
|
||||
# Versions must be given with respect to the preference
|
||||
if versions is None:
|
||||
versions = []
|
||||
major = sys.version_info[0]
|
||||
# Support all previous minor Python versions.
|
||||
for minor in range(sys.version_info[1], -1, -1):
|
||||
versions.append(''.join(map(str, (major, minor))))
|
||||
|
||||
impl = get_abbr_impl()
|
||||
|
||||
abis = []
|
||||
|
||||
try:
|
||||
soabi = sysconfig.get_config_var('SOABI')
|
||||
except IOError as e: # Issue #1074
|
||||
warnings.warn("{0}".format(e), RuntimeWarning)
|
||||
soabi = None
|
||||
|
||||
if soabi and soabi.startswith('cpython-'):
|
||||
abis[0:0] = ['cp' + soabi.split('-', 1)[-1]]
|
||||
|
||||
abi3s = set()
|
||||
import imp
|
||||
for suffix in imp.get_suffixes():
|
||||
if suffix[0].startswith('.abi'):
|
||||
abi3s.add(suffix[0].split('.', 2)[1])
|
||||
|
||||
abis.extend(sorted(list(abi3s)))
|
||||
|
||||
abis.append('none')
|
||||
|
||||
if not noarch:
|
||||
arch = get_platform()
|
||||
|
||||
# Current version, current API (built specifically for our Python):
|
||||
for abi in abis:
|
||||
supported.append(('%s%s' % (impl, versions[0]), abi, arch))
|
||||
|
||||
# No abi / arch, but requires our implementation:
|
||||
for i, version in enumerate(versions):
|
||||
supported.append(('%s%s' % (impl, version), 'none', 'any'))
|
||||
if i == 0:
|
||||
# Tagged specifically as being cross-version compatible
|
||||
# (with just the major version specified)
|
||||
supported.append(('%s%s' % (impl, versions[0][0]), 'none', 'any'))
|
||||
|
||||
# No abi / arch, generic Python
|
||||
for i, version in enumerate(versions):
|
||||
supported.append(('py%s' % (version,), 'none', 'any'))
|
||||
if i == 0:
|
||||
supported.append(('py%s' % (version[0]), 'none', 'any'))
|
||||
|
||||
return supported
|
||||
|
||||
supported_tags = get_supported()
|
||||
supported_tags_noarch = get_supported(noarch=True)
|
||||
1940
lib/python3.4/site-packages/pip/req.py
Normal file
1940
lib/python3.4/site-packages/pip/req.py
Normal file
File diff suppressed because it is too large
Load diff
18
lib/python3.4/site-packages/pip/runner.py
Normal file
18
lib/python3.4/site-packages/pip/runner.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
import sys
|
||||
import os
|
||||
|
||||
|
||||
def run():
|
||||
base = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
## FIXME: this is kind of crude; if we could create a fake pip
|
||||
## module, then exec into it and update pip.__path__ properly, we
|
||||
## wouldn't have to update sys.path:
|
||||
sys.path.insert(0, base)
|
||||
import pip
|
||||
return pip.main()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
exit = run()
|
||||
if exit:
|
||||
sys.exit(exit)
|
||||
6
lib/python3.4/site-packages/pip/status_codes.py
Normal file
6
lib/python3.4/site-packages/pip/status_codes.py
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
SUCCESS = 0
|
||||
ERROR = 1
|
||||
UNKNOWN_ERROR = 2
|
||||
VIRTUALENV_NOT_FOUND = 3
|
||||
PREVIOUS_BUILD_DIR_ERROR = 4
|
||||
NO_MATCHES_FOUND = 23
|
||||
771
lib/python3.4/site-packages/pip/util.py
Normal file
771
lib/python3.4/site-packages/pip/util.py
Normal file
|
|
@ -0,0 +1,771 @@
|
|||
import sys
|
||||
import shutil
|
||||
import os
|
||||
import stat
|
||||
import re
|
||||
import posixpath
|
||||
import zipfile
|
||||
import tarfile
|
||||
import subprocess
|
||||
import textwrap
|
||||
import tempfile
|
||||
|
||||
from pip.exceptions import InstallationError, BadCommand, PipError
|
||||
from pip.backwardcompat import(WindowsError, string_types, raw_input,
|
||||
console_to_str, user_site, PermissionError)
|
||||
from pip.locations import (site_packages, running_under_virtualenv, virtualenv_no_global,
|
||||
distutils_scheme)
|
||||
from pip.log import logger
|
||||
import pkg_resources
|
||||
from distlib import version
|
||||
|
||||
__all__ = ['rmtree', 'display_path', 'backup_dir',
|
||||
'find_command', 'ask', 'Inf',
|
||||
'normalize_name', 'splitext',
|
||||
'format_size', 'is_installable_dir',
|
||||
'is_svn_page', 'file_contents',
|
||||
'split_leading_dir', 'has_leading_dir',
|
||||
'make_path_relative', 'normalize_path',
|
||||
'renames', 'get_terminal_size', 'get_prog',
|
||||
'unzip_file', 'untar_file', 'create_download_cache_folder',
|
||||
'cache_download', 'unpack_file', 'call_subprocess']
|
||||
|
||||
|
||||
def get_prog():
|
||||
try:
|
||||
if os.path.basename(sys.argv[0]) in ('__main__.py', '-c'):
|
||||
return "%s -m pip" % sys.executable
|
||||
except (AttributeError, TypeError, IndexError):
|
||||
pass
|
||||
return 'pip'
|
||||
|
||||
|
||||
def rmtree(dir, ignore_errors=False):
|
||||
shutil.rmtree(dir, ignore_errors=ignore_errors,
|
||||
onerror=rmtree_errorhandler)
|
||||
|
||||
|
||||
def rmtree_errorhandler(func, path, exc_info):
|
||||
"""On Windows, the files in .svn are read-only, so when rmtree() tries to
|
||||
remove them, an exception is thrown. We catch that here, remove the
|
||||
read-only attribute, and hopefully continue without problems."""
|
||||
exctype, value = exc_info[:2]
|
||||
if not ((exctype is WindowsError and value.args[0] == 5) or #others
|
||||
(exctype is OSError and value.args[0] == 13) or #python2.4
|
||||
(exctype is PermissionError and value.args[3] == 5) #python3.3
|
||||
):
|
||||
raise
|
||||
# file type should currently be read only
|
||||
if ((os.stat(path).st_mode & stat.S_IREAD) != stat.S_IREAD):
|
||||
raise
|
||||
# convert to read/write
|
||||
os.chmod(path, stat.S_IWRITE)
|
||||
# use the original function to repeat the operation
|
||||
func(path)
|
||||
|
||||
|
||||
def display_path(path):
|
||||
"""Gives the display value for a given path, making it relative to cwd
|
||||
if possible."""
|
||||
path = os.path.normcase(os.path.abspath(path))
|
||||
if path.startswith(os.getcwd() + os.path.sep):
|
||||
path = '.' + path[len(os.getcwd()):]
|
||||
return path
|
||||
|
||||
|
||||
def backup_dir(dir, ext='.bak'):
|
||||
"""Figure out the name of a directory to back up the given dir to
|
||||
(adding .bak, .bak2, etc)"""
|
||||
n = 1
|
||||
extension = ext
|
||||
while os.path.exists(dir + extension):
|
||||
n += 1
|
||||
extension = ext + str(n)
|
||||
return dir + extension
|
||||
|
||||
|
||||
def find_command(cmd, paths=None, pathext=None):
|
||||
"""Searches the PATH for the given command and returns its path"""
|
||||
if paths is None:
|
||||
paths = os.environ.get('PATH', '').split(os.pathsep)
|
||||
if isinstance(paths, string_types):
|
||||
paths = [paths]
|
||||
# check if there are funny path extensions for executables, e.g. Windows
|
||||
if pathext is None:
|
||||
pathext = get_pathext()
|
||||
pathext = [ext for ext in pathext.lower().split(os.pathsep) if len(ext)]
|
||||
# don't use extensions if the command ends with one of them
|
||||
if os.path.splitext(cmd)[1].lower() in pathext:
|
||||
pathext = ['']
|
||||
# check if we find the command on PATH
|
||||
for path in paths:
|
||||
# try without extension first
|
||||
cmd_path = os.path.join(path, cmd)
|
||||
for ext in pathext:
|
||||
# then including the extension
|
||||
cmd_path_ext = cmd_path + ext
|
||||
if os.path.isfile(cmd_path_ext):
|
||||
return cmd_path_ext
|
||||
if os.path.isfile(cmd_path):
|
||||
return cmd_path
|
||||
raise BadCommand('Cannot find command %r' % cmd)
|
||||
|
||||
|
||||
def get_pathext(default_pathext=None):
|
||||
"""Returns the path extensions from environment or a default"""
|
||||
if default_pathext is None:
|
||||
default_pathext = os.pathsep.join(['.COM', '.EXE', '.BAT', '.CMD'])
|
||||
pathext = os.environ.get('PATHEXT', default_pathext)
|
||||
return pathext
|
||||
|
||||
|
||||
def ask_path_exists(message, options):
|
||||
for action in os.environ.get('PIP_EXISTS_ACTION', '').split():
|
||||
if action in options:
|
||||
return action
|
||||
return ask(message, options)
|
||||
|
||||
|
||||
def ask(message, options):
|
||||
"""Ask the message interactively, with the given possible responses"""
|
||||
while 1:
|
||||
if os.environ.get('PIP_NO_INPUT'):
|
||||
raise Exception('No input was expected ($PIP_NO_INPUT set); question: %s' % message)
|
||||
response = raw_input(message)
|
||||
response = response.strip().lower()
|
||||
if response not in options:
|
||||
print('Your response (%r) was not one of the expected responses: %s' % (
|
||||
response, ', '.join(options)))
|
||||
else:
|
||||
return response
|
||||
|
||||
|
||||
class _Inf(object):
|
||||
"""I am bigger than everything!"""
|
||||
|
||||
def __eq__(self, other):
|
||||
if self is other:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __lt__(self, other):
|
||||
return False
|
||||
|
||||
def __le__(self, other):
|
||||
return False
|
||||
|
||||
def __gt__(self, other):
|
||||
return True
|
||||
|
||||
def __ge__(self, other):
|
||||
return True
|
||||
|
||||
def __repr__(self):
|
||||
return 'Inf'
|
||||
|
||||
|
||||
Inf = _Inf() #this object is not currently used as a sortable in our code
|
||||
del _Inf
|
||||
|
||||
|
||||
_normalize_re = re.compile(r'[^a-z]', re.I)
|
||||
|
||||
|
||||
def normalize_name(name):
|
||||
return _normalize_re.sub('-', name.lower())
|
||||
|
||||
|
||||
def format_size(bytes):
|
||||
if bytes > 1000*1000:
|
||||
return '%.1fMB' % (bytes/1000.0/1000)
|
||||
elif bytes > 10*1000:
|
||||
return '%ikB' % (bytes/1000)
|
||||
elif bytes > 1000:
|
||||
return '%.1fkB' % (bytes/1000.0)
|
||||
else:
|
||||
return '%ibytes' % bytes
|
||||
|
||||
|
||||
def is_installable_dir(path):
|
||||
"""Return True if `path` is a directory containing a setup.py file."""
|
||||
if not os.path.isdir(path):
|
||||
return False
|
||||
setup_py = os.path.join(path, 'setup.py')
|
||||
if os.path.isfile(setup_py):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def is_svn_page(html):
|
||||
"""Returns true if the page appears to be the index page of an svn repository"""
|
||||
return (re.search(r'<title>[^<]*Revision \d+:', html)
|
||||
and re.search(r'Powered by (?:<a[^>]*?>)?Subversion', html, re.I))
|
||||
|
||||
|
||||
def file_contents(filename):
|
||||
fp = open(filename, 'rb')
|
||||
try:
|
||||
return fp.read().decode('utf-8')
|
||||
finally:
|
||||
fp.close()
|
||||
|
||||
|
||||
def split_leading_dir(path):
|
||||
path = str(path)
|
||||
path = path.lstrip('/').lstrip('\\')
|
||||
if '/' in path and (('\\' in path and path.find('/') < path.find('\\'))
|
||||
or '\\' not in path):
|
||||
return path.split('/', 1)
|
||||
elif '\\' in path:
|
||||
return path.split('\\', 1)
|
||||
else:
|
||||
return path, ''
|
||||
|
||||
|
||||
def has_leading_dir(paths):
|
||||
"""Returns true if all the paths have the same leading path name
|
||||
(i.e., everything is in one subdirectory in an archive)"""
|
||||
common_prefix = None
|
||||
for path in paths:
|
||||
prefix, rest = split_leading_dir(path)
|
||||
if not prefix:
|
||||
return False
|
||||
elif common_prefix is None:
|
||||
common_prefix = prefix
|
||||
elif prefix != common_prefix:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def make_path_relative(path, rel_to):
|
||||
"""
|
||||
Make a filename relative, where the filename path, and it is
|
||||
relative to rel_to
|
||||
|
||||
>>> make_relative_path('/usr/share/something/a-file.pth',
|
||||
... '/usr/share/another-place/src/Directory')
|
||||
'../../../something/a-file.pth'
|
||||
>>> make_relative_path('/usr/share/something/a-file.pth',
|
||||
... '/home/user/src/Directory')
|
||||
'../../../usr/share/something/a-file.pth'
|
||||
>>> make_relative_path('/usr/share/a-file.pth', '/usr/share/')
|
||||
'a-file.pth'
|
||||
"""
|
||||
path_filename = os.path.basename(path)
|
||||
path = os.path.dirname(path)
|
||||
path = os.path.normpath(os.path.abspath(path))
|
||||
rel_to = os.path.normpath(os.path.abspath(rel_to))
|
||||
path_parts = path.strip(os.path.sep).split(os.path.sep)
|
||||
rel_to_parts = rel_to.strip(os.path.sep).split(os.path.sep)
|
||||
while path_parts and rel_to_parts and path_parts[0] == rel_to_parts[0]:
|
||||
path_parts.pop(0)
|
||||
rel_to_parts.pop(0)
|
||||
full_parts = ['..']*len(rel_to_parts) + path_parts + [path_filename]
|
||||
if full_parts == ['']:
|
||||
return '.' + os.path.sep
|
||||
return os.path.sep.join(full_parts)
|
||||
|
||||
|
||||
def normalize_path(path):
|
||||
"""
|
||||
Convert a path to its canonical, case-normalized, absolute version.
|
||||
|
||||
"""
|
||||
return os.path.normcase(os.path.realpath(os.path.expanduser(path)))
|
||||
|
||||
|
||||
def splitext(path):
|
||||
"""Like os.path.splitext, but take off .tar too"""
|
||||
base, ext = posixpath.splitext(path)
|
||||
if base.lower().endswith('.tar'):
|
||||
ext = base[-4:] + ext
|
||||
base = base[:-4]
|
||||
return base, ext
|
||||
|
||||
|
||||
def renames(old, new):
|
||||
"""Like os.renames(), but handles renaming across devices."""
|
||||
# Implementation borrowed from os.renames().
|
||||
head, tail = os.path.split(new)
|
||||
if head and tail and not os.path.exists(head):
|
||||
os.makedirs(head)
|
||||
|
||||
shutil.move(old, new)
|
||||
|
||||
head, tail = os.path.split(old)
|
||||
if head and tail:
|
||||
try:
|
||||
os.removedirs(head)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
def is_local(path):
|
||||
"""
|
||||
Return True if this is a path pip is allowed to modify.
|
||||
|
||||
If we're in a virtualenv, sys.prefix points to the virtualenv's
|
||||
prefix; only sys.prefix is considered local.
|
||||
|
||||
If we're not in a virtualenv, in general we can modify anything.
|
||||
However, if the OS vendor has configured distutils to install
|
||||
somewhere other than sys.prefix (which could be a subdirectory of
|
||||
sys.prefix, e.g. /usr/local), we consider sys.prefix itself nonlocal
|
||||
and the domain of the OS vendor. (In other words, everything _other
|
||||
than_ sys.prefix is considered local.)
|
||||
|
||||
"""
|
||||
|
||||
path = normalize_path(path)
|
||||
prefix = normalize_path(sys.prefix)
|
||||
|
||||
if running_under_virtualenv():
|
||||
return path.startswith(normalize_path(sys.prefix))
|
||||
else:
|
||||
if path.startswith(prefix):
|
||||
for local_path in distutils_scheme("").values():
|
||||
if path.startswith(normalize_path(local_path)):
|
||||
return True
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
def dist_is_local(dist):
|
||||
"""
|
||||
Return True if given Distribution object is installed somewhere pip
|
||||
is allowed to modify.
|
||||
|
||||
"""
|
||||
return is_local(dist_location(dist))
|
||||
|
||||
|
||||
def dist_in_usersite(dist):
|
||||
"""
|
||||
Return True if given Distribution is installed in user site.
|
||||
"""
|
||||
if user_site:
|
||||
return normalize_path(dist_location(dist)).startswith(normalize_path(user_site))
|
||||
else:
|
||||
return False
|
||||
|
||||
def dist_in_site_packages(dist):
|
||||
"""
|
||||
Return True if given Distribution is installed in distutils.sysconfig.get_python_lib().
|
||||
"""
|
||||
return normalize_path(dist_location(dist)).startswith(normalize_path(site_packages))
|
||||
|
||||
|
||||
def dist_is_editable(dist):
|
||||
"""Is distribution an editable install?"""
|
||||
#TODO: factor out determining editableness out of FrozenRequirement
|
||||
from pip import FrozenRequirement
|
||||
req = FrozenRequirement.from_dist(dist, [])
|
||||
return req.editable
|
||||
|
||||
def get_installed_distributions(local_only=True,
|
||||
skip=('setuptools', 'pip', 'python', 'distribute'),
|
||||
include_editables=True,
|
||||
editables_only=False):
|
||||
"""
|
||||
Return a list of installed Distribution objects.
|
||||
|
||||
If ``local_only`` is True (default), only return installations
|
||||
local to the current virtualenv, if in a virtualenv.
|
||||
|
||||
``skip`` argument is an iterable of lower-case project names to
|
||||
ignore; defaults to ('setuptools', 'pip', 'python'). [FIXME also
|
||||
skip virtualenv?]
|
||||
|
||||
If ``editables`` is False, don't report editables.
|
||||
|
||||
If ``editables_only`` is True , only report editables.
|
||||
|
||||
"""
|
||||
if local_only:
|
||||
local_test = dist_is_local
|
||||
else:
|
||||
local_test = lambda d: True
|
||||
|
||||
if include_editables:
|
||||
editable_test = lambda d: True
|
||||
else:
|
||||
editable_test = lambda d: not dist_is_editable(d)
|
||||
|
||||
if editables_only:
|
||||
editables_only_test = lambda d: dist_is_editable(d)
|
||||
else:
|
||||
editables_only_test = lambda d: True
|
||||
|
||||
return [d for d in pkg_resources.working_set
|
||||
if local_test(d)
|
||||
and d.key not in skip
|
||||
and editable_test(d)
|
||||
and editables_only_test(d)
|
||||
]
|
||||
|
||||
|
||||
def egg_link_path(dist):
|
||||
"""
|
||||
Return the path for the .egg-link file if it exists, otherwise, None.
|
||||
|
||||
There's 3 scenarios:
|
||||
1) not in a virtualenv
|
||||
try to find in site.USER_SITE, then site_packages
|
||||
2) in a no-global virtualenv
|
||||
try to find in site_packages
|
||||
3) in a yes-global virtualenv
|
||||
try to find in site_packages, then site.USER_SITE (don't look in global location)
|
||||
|
||||
For #1 and #3, there could be odd cases, where there's an egg-link in 2 locations.
|
||||
This method will just return the first one found.
|
||||
"""
|
||||
sites = []
|
||||
if running_under_virtualenv():
|
||||
if virtualenv_no_global():
|
||||
sites.append(site_packages)
|
||||
else:
|
||||
sites.append(site_packages)
|
||||
if user_site:
|
||||
sites.append(user_site)
|
||||
else:
|
||||
if user_site:
|
||||
sites.append(user_site)
|
||||
sites.append(site_packages)
|
||||
|
||||
for site in sites:
|
||||
egglink = os.path.join(site, dist.project_name) + '.egg-link'
|
||||
if os.path.isfile(egglink):
|
||||
return egglink
|
||||
|
||||
|
||||
def dist_location(dist):
|
||||
"""
|
||||
Get the site-packages location of this distribution. Generally
|
||||
this is dist.location, except in the case of develop-installed
|
||||
packages, where dist.location is the source code location, and we
|
||||
want to know where the egg-link file is.
|
||||
|
||||
"""
|
||||
egg_link = egg_link_path(dist)
|
||||
if egg_link:
|
||||
return egg_link
|
||||
return dist.location
|
||||
|
||||
|
||||
def get_terminal_size():
|
||||
"""Returns a tuple (x, y) representing the width(x) and the height(x)
|
||||
in characters of the terminal window."""
|
||||
def ioctl_GWINSZ(fd):
|
||||
try:
|
||||
import fcntl
|
||||
import termios
|
||||
import struct
|
||||
cr = struct.unpack('hh', fcntl.ioctl(fd, termios.TIOCGWINSZ,
|
||||
'1234'))
|
||||
except:
|
||||
return None
|
||||
if cr == (0, 0):
|
||||
return None
|
||||
if cr == (0, 0):
|
||||
return None
|
||||
return cr
|
||||
cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
|
||||
if not cr:
|
||||
try:
|
||||
fd = os.open(os.ctermid(), os.O_RDONLY)
|
||||
cr = ioctl_GWINSZ(fd)
|
||||
os.close(fd)
|
||||
except:
|
||||
pass
|
||||
if not cr:
|
||||
cr = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80))
|
||||
return int(cr[1]), int(cr[0])
|
||||
|
||||
|
||||
def current_umask():
|
||||
"""Get the current umask which involves having to set it temporarily."""
|
||||
mask = os.umask(0)
|
||||
os.umask(mask)
|
||||
return mask
|
||||
|
||||
|
||||
def unzip_file(filename, location, flatten=True):
|
||||
"""
|
||||
Unzip the file (with path `filename`) to the destination `location`. All
|
||||
files are written based on system defaults and umask (i.e. permissions are
|
||||
not preserved), except that regular file members with any execute
|
||||
permissions (user, group, or world) have "chmod +x" applied after being
|
||||
written. Note that for windows, any execute changes using os.chmod are
|
||||
no-ops per the python docs.
|
||||
"""
|
||||
if not os.path.exists(location):
|
||||
os.makedirs(location)
|
||||
zipfp = open(filename, 'rb')
|
||||
try:
|
||||
zip = zipfile.ZipFile(zipfp)
|
||||
leading = has_leading_dir(zip.namelist()) and flatten
|
||||
for info in zip.infolist():
|
||||
name = info.filename
|
||||
data = zip.read(name)
|
||||
fn = name
|
||||
if leading:
|
||||
fn = split_leading_dir(name)[1]
|
||||
fn = os.path.join(location, fn)
|
||||
dir = os.path.dirname(fn)
|
||||
if not os.path.exists(dir):
|
||||
os.makedirs(dir)
|
||||
if fn.endswith('/') or fn.endswith('\\'):
|
||||
# A directory
|
||||
if not os.path.exists(fn):
|
||||
os.makedirs(fn)
|
||||
else:
|
||||
fp = open(fn, 'wb')
|
||||
try:
|
||||
fp.write(data)
|
||||
finally:
|
||||
fp.close()
|
||||
mode = info.external_attr >> 16
|
||||
# if mode and regular file and any execute permissions for user/group/world?
|
||||
if mode and stat.S_ISREG(mode) and mode & 0o111:
|
||||
# make dest file have execute for user/group/world (chmod +x)
|
||||
# no-op on windows per python docs
|
||||
os.chmod(fn, (0o777-current_umask() | 0o111))
|
||||
finally:
|
||||
zipfp.close()
|
||||
|
||||
|
||||
def untar_file(filename, location):
|
||||
"""
|
||||
Untar the file (with path `filename`) to the destination `location`.
|
||||
All files are written based on system defaults and umask (i.e. permissions
|
||||
are not preserved), except that regular file members with any execute
|
||||
permissions (user, group, or world) have "chmod +x" applied after being
|
||||
written. Note that for windows, any execute changes using os.chmod are
|
||||
no-ops per the python docs.
|
||||
"""
|
||||
if not os.path.exists(location):
|
||||
os.makedirs(location)
|
||||
if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'):
|
||||
mode = 'r:gz'
|
||||
elif filename.lower().endswith('.bz2') or filename.lower().endswith('.tbz'):
|
||||
mode = 'r:bz2'
|
||||
elif filename.lower().endswith('.tar'):
|
||||
mode = 'r'
|
||||
else:
|
||||
logger.warn('Cannot determine compression type for file %s' % filename)
|
||||
mode = 'r:*'
|
||||
tar = tarfile.open(filename, mode)
|
||||
try:
|
||||
# note: python<=2.5 doesnt seem to know about pax headers, filter them
|
||||
leading = has_leading_dir([
|
||||
member.name for member in tar.getmembers()
|
||||
if member.name != 'pax_global_header'
|
||||
])
|
||||
for member in tar.getmembers():
|
||||
fn = member.name
|
||||
if fn == 'pax_global_header':
|
||||
continue
|
||||
if leading:
|
||||
fn = split_leading_dir(fn)[1]
|
||||
path = os.path.join(location, fn)
|
||||
if member.isdir():
|
||||
if not os.path.exists(path):
|
||||
os.makedirs(path)
|
||||
elif member.issym():
|
||||
try:
|
||||
tar._extract_member(member, path)
|
||||
except:
|
||||
e = sys.exc_info()[1]
|
||||
# Some corrupt tar files seem to produce this
|
||||
# (specifically bad symlinks)
|
||||
logger.warn(
|
||||
'In the tar file %s the member %s is invalid: %s'
|
||||
% (filename, member.name, e))
|
||||
continue
|
||||
else:
|
||||
try:
|
||||
fp = tar.extractfile(member)
|
||||
except (KeyError, AttributeError):
|
||||
e = sys.exc_info()[1]
|
||||
# Some corrupt tar files seem to produce this
|
||||
# (specifically bad symlinks)
|
||||
logger.warn(
|
||||
'In the tar file %s the member %s is invalid: %s'
|
||||
% (filename, member.name, e))
|
||||
continue
|
||||
if not os.path.exists(os.path.dirname(path)):
|
||||
os.makedirs(os.path.dirname(path))
|
||||
destfp = open(path, 'wb')
|
||||
try:
|
||||
shutil.copyfileobj(fp, destfp)
|
||||
finally:
|
||||
destfp.close()
|
||||
fp.close()
|
||||
# member have any execute permissions for user/group/world?
|
||||
if member.mode & 0o111:
|
||||
# make dest file have execute for user/group/world
|
||||
# no-op on windows per python docs
|
||||
os.chmod(path, (0o777-current_umask() | 0o111))
|
||||
finally:
|
||||
tar.close()
|
||||
|
||||
|
||||
def create_download_cache_folder(folder):
|
||||
logger.indent -= 2
|
||||
logger.notify('Creating supposed download cache at %s' % folder)
|
||||
logger.indent += 2
|
||||
os.makedirs(folder)
|
||||
|
||||
|
||||
def cache_download(target_file, temp_location, content_type):
|
||||
logger.notify('Storing download in cache at %s' % display_path(target_file))
|
||||
shutil.copyfile(temp_location, target_file)
|
||||
fp = open(target_file+'.content-type', 'w')
|
||||
fp.write(content_type)
|
||||
fp.close()
|
||||
|
||||
|
||||
def unpack_file(filename, location, content_type, link):
|
||||
filename = os.path.realpath(filename)
|
||||
if (content_type == 'application/zip'
|
||||
or filename.endswith('.zip')
|
||||
or filename.endswith('.pybundle')
|
||||
or filename.endswith('.whl')
|
||||
or zipfile.is_zipfile(filename)):
|
||||
unzip_file(filename, location, flatten=not filename.endswith(('.pybundle', '.whl')))
|
||||
elif (content_type == 'application/x-gzip'
|
||||
or tarfile.is_tarfile(filename)
|
||||
or splitext(filename)[1].lower() in ('.tar', '.tar.gz', '.tar.bz2', '.tgz', '.tbz')):
|
||||
untar_file(filename, location)
|
||||
elif (content_type and content_type.startswith('text/html')
|
||||
and is_svn_page(file_contents(filename))):
|
||||
# We don't really care about this
|
||||
from pip.vcs.subversion import Subversion
|
||||
Subversion('svn+' + link.url).unpack(location)
|
||||
else:
|
||||
## FIXME: handle?
|
||||
## FIXME: magic signatures?
|
||||
logger.fatal('Cannot unpack file %s (downloaded from %s, content-type: %s); cannot detect archive format'
|
||||
% (filename, location, content_type))
|
||||
raise InstallationError('Cannot determine archive format of %s' % location)
|
||||
|
||||
|
||||
def call_subprocess(cmd, show_stdout=True,
|
||||
filter_stdout=None, cwd=None,
|
||||
raise_on_returncode=True,
|
||||
command_level=logger.DEBUG, command_desc=None,
|
||||
extra_environ=None):
|
||||
if command_desc is None:
|
||||
cmd_parts = []
|
||||
for part in cmd:
|
||||
if ' ' in part or '\n' in part or '"' in part or "'" in part:
|
||||
part = '"%s"' % part.replace('"', '\\"')
|
||||
cmd_parts.append(part)
|
||||
command_desc = ' '.join(cmd_parts)
|
||||
if show_stdout:
|
||||
stdout = None
|
||||
else:
|
||||
stdout = subprocess.PIPE
|
||||
logger.log(command_level, "Running command %s" % command_desc)
|
||||
env = os.environ.copy()
|
||||
if extra_environ:
|
||||
env.update(extra_environ)
|
||||
try:
|
||||
proc = subprocess.Popen(
|
||||
cmd, stderr=subprocess.STDOUT, stdin=None, stdout=stdout,
|
||||
cwd=cwd, env=env)
|
||||
except Exception:
|
||||
e = sys.exc_info()[1]
|
||||
logger.fatal(
|
||||
"Error %s while executing command %s" % (e, command_desc))
|
||||
raise
|
||||
all_output = []
|
||||
if stdout is not None:
|
||||
stdout = proc.stdout
|
||||
while 1:
|
||||
line = console_to_str(stdout.readline())
|
||||
if not line:
|
||||
break
|
||||
line = line.rstrip()
|
||||
all_output.append(line + '\n')
|
||||
if filter_stdout:
|
||||
level = filter_stdout(line)
|
||||
if isinstance(level, tuple):
|
||||
level, line = level
|
||||
logger.log(level, line)
|
||||
if not logger.stdout_level_matches(level):
|
||||
logger.show_progress()
|
||||
else:
|
||||
logger.info(line)
|
||||
else:
|
||||
returned_stdout, returned_stderr = proc.communicate()
|
||||
all_output = [returned_stdout or '']
|
||||
proc.wait()
|
||||
if proc.returncode:
|
||||
if raise_on_returncode:
|
||||
if all_output:
|
||||
logger.notify('Complete output from command %s:' % command_desc)
|
||||
logger.notify('\n'.join(all_output) + '\n----------------------------------------')
|
||||
raise InstallationError(
|
||||
"Command %s failed with error code %s in %s"
|
||||
% (command_desc, proc.returncode, cwd))
|
||||
else:
|
||||
logger.warn(
|
||||
"Command %s had error code %s in %s"
|
||||
% (command_desc, proc.returncode, cwd))
|
||||
if stdout is not None:
|
||||
return ''.join(all_output)
|
||||
|
||||
|
||||
def is_prerelease(vers):
|
||||
"""
|
||||
Attempt to determine if this is a pre-release using PEP386/PEP426 rules.
|
||||
|
||||
Will return True if it is a pre-release and False if not. Versions are
|
||||
assumed to be a pre-release if they cannot be parsed.
|
||||
"""
|
||||
normalized = version._suggest_normalized_version(vers)
|
||||
|
||||
if normalized is None:
|
||||
# Cannot normalize, assume it is a pre-release
|
||||
return True
|
||||
|
||||
parsed = version._normalized_key(normalized)
|
||||
return any([any([y in set(["a", "b", "c", "rc", "dev"]) for y in x]) for x in parsed])
|
||||
|
||||
|
||||
class BuildDirectory(object):
|
||||
|
||||
def __init__(self, name=None, delete=None):
|
||||
# If we were not given an explicit directory, and we were not given an
|
||||
# explicit delete option, then we'll default to deleting.
|
||||
if name is None and delete is None:
|
||||
delete = True
|
||||
|
||||
if name is None:
|
||||
name = tempfile.mkdtemp(prefix="pip-build-")
|
||||
# If we were not given an explicit directory, and we were not given
|
||||
# an explicit delete option, then we'll default to deleting.
|
||||
if delete is None:
|
||||
delete = True
|
||||
|
||||
self.name = name
|
||||
self.delete = delete
|
||||
|
||||
def __repr__(self):
|
||||
return "<{} {!r}>".format(self.__class__.__name__, self.name)
|
||||
|
||||
def __enter__(self):
|
||||
return self.name
|
||||
|
||||
def __exit__(self, exc, value, tb):
|
||||
self.cleanup()
|
||||
|
||||
def cleanup(self):
|
||||
if self.delete:
|
||||
rmtree(self.name)
|
||||
251
lib/python3.4/site-packages/pip/vcs/__init__.py
Normal file
251
lib/python3.4/site-packages/pip/vcs/__init__.py
Normal file
|
|
@ -0,0 +1,251 @@
|
|||
"""Handles all VCS (version control) support"""
|
||||
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from pip.backwardcompat import urlparse, urllib
|
||||
from pip.log import logger
|
||||
from pip.util import (display_path, backup_dir, find_command,
|
||||
rmtree, ask_path_exists)
|
||||
|
||||
|
||||
__all__ = ['vcs', 'get_src_requirement']
|
||||
|
||||
|
||||
class VcsSupport(object):
|
||||
_registry = {}
|
||||
schemes = ['ssh', 'git', 'hg', 'bzr', 'sftp', 'svn']
|
||||
|
||||
def __init__(self):
|
||||
# Register more schemes with urlparse for various version control systems
|
||||
urlparse.uses_netloc.extend(self.schemes)
|
||||
# Python >= 2.7.4, 3.3 doesn't have uses_fragment
|
||||
if getattr(urlparse, 'uses_fragment', None):
|
||||
urlparse.uses_fragment.extend(self.schemes)
|
||||
super(VcsSupport, self).__init__()
|
||||
|
||||
def __iter__(self):
|
||||
return self._registry.__iter__()
|
||||
|
||||
@property
|
||||
def backends(self):
|
||||
return list(self._registry.values())
|
||||
|
||||
@property
|
||||
def dirnames(self):
|
||||
return [backend.dirname for backend in self.backends]
|
||||
|
||||
@property
|
||||
def all_schemes(self):
|
||||
schemes = []
|
||||
for backend in self.backends:
|
||||
schemes.extend(backend.schemes)
|
||||
return schemes
|
||||
|
||||
def register(self, cls):
|
||||
if not hasattr(cls, 'name'):
|
||||
logger.warn('Cannot register VCS %s' % cls.__name__)
|
||||
return
|
||||
if cls.name not in self._registry:
|
||||
self._registry[cls.name] = cls
|
||||
|
||||
def unregister(self, cls=None, name=None):
|
||||
if name in self._registry:
|
||||
del self._registry[name]
|
||||
elif cls in self._registry.values():
|
||||
del self._registry[cls.name]
|
||||
else:
|
||||
logger.warn('Cannot unregister because no class or name given')
|
||||
|
||||
def get_backend_name(self, location):
|
||||
"""
|
||||
Return the name of the version control backend if found at given
|
||||
location, e.g. vcs.get_backend_name('/path/to/vcs/checkout')
|
||||
"""
|
||||
for vc_type in self._registry.values():
|
||||
path = os.path.join(location, vc_type.dirname)
|
||||
if os.path.exists(path):
|
||||
return vc_type.name
|
||||
return None
|
||||
|
||||
def get_backend(self, name):
|
||||
name = name.lower()
|
||||
if name in self._registry:
|
||||
return self._registry[name]
|
||||
|
||||
def get_backend_from_location(self, location):
|
||||
vc_type = self.get_backend_name(location)
|
||||
if vc_type:
|
||||
return self.get_backend(vc_type)
|
||||
return None
|
||||
|
||||
|
||||
vcs = VcsSupport()
|
||||
|
||||
|
||||
class VersionControl(object):
|
||||
name = ''
|
||||
dirname = ''
|
||||
|
||||
def __init__(self, url=None, *args, **kwargs):
|
||||
self.url = url
|
||||
self._cmd = None
|
||||
super(VersionControl, self).__init__(*args, **kwargs)
|
||||
|
||||
def _filter(self, line):
|
||||
return (logger.INFO, line)
|
||||
|
||||
def _is_local_repository(self, repo):
|
||||
"""
|
||||
posix absolute paths start with os.path.sep,
|
||||
win32 ones ones start with drive (like c:\\folder)
|
||||
"""
|
||||
drive, tail = os.path.splitdrive(repo)
|
||||
return repo.startswith(os.path.sep) or drive
|
||||
|
||||
@property
|
||||
def cmd(self):
|
||||
if self._cmd is not None:
|
||||
return self._cmd
|
||||
command = find_command(self.name)
|
||||
logger.info('Found command %r at %r' % (self.name, command))
|
||||
self._cmd = command
|
||||
return command
|
||||
|
||||
def get_url_rev(self):
|
||||
"""
|
||||
Returns the correct repository URL and revision by parsing the given
|
||||
repository URL
|
||||
"""
|
||||
error_message = (
|
||||
"Sorry, '%s' is a malformed VCS url. "
|
||||
"The format is <vcs>+<protocol>://<url>, "
|
||||
"e.g. svn+http://myrepo/svn/MyApp#egg=MyApp")
|
||||
assert '+' in self.url, error_message % self.url
|
||||
url = self.url.split('+', 1)[1]
|
||||
scheme, netloc, path, query, frag = urlparse.urlsplit(url)
|
||||
rev = None
|
||||
if '@' in path:
|
||||
path, rev = path.rsplit('@', 1)
|
||||
url = urlparse.urlunsplit((scheme, netloc, path, query, ''))
|
||||
return url, rev
|
||||
|
||||
def get_info(self, location):
|
||||
"""
|
||||
Returns (url, revision), where both are strings
|
||||
"""
|
||||
assert not location.rstrip('/').endswith(self.dirname), 'Bad directory: %s' % location
|
||||
return self.get_url(location), self.get_revision(location)
|
||||
|
||||
def normalize_url(self, url):
|
||||
"""
|
||||
Normalize a URL for comparison by unquoting it and removing any trailing slash.
|
||||
"""
|
||||
return urllib.unquote(url).rstrip('/')
|
||||
|
||||
def compare_urls(self, url1, url2):
|
||||
"""
|
||||
Compare two repo URLs for identity, ignoring incidental differences.
|
||||
"""
|
||||
return (self.normalize_url(url1) == self.normalize_url(url2))
|
||||
|
||||
def parse_vcs_bundle_file(self, content):
|
||||
"""
|
||||
Takes the contents of the bundled text file that explains how to revert
|
||||
the stripped off version control data of the given package and returns
|
||||
the URL and revision of it.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def obtain(self, dest):
|
||||
"""
|
||||
Called when installing or updating an editable package, takes the
|
||||
source path of the checkout.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def switch(self, dest, url, rev_options):
|
||||
"""
|
||||
Switch the repo at ``dest`` to point to ``URL``.
|
||||
"""
|
||||
raise NotImplemented
|
||||
|
||||
def update(self, dest, rev_options):
|
||||
"""
|
||||
Update an already-existing repo to the given ``rev_options``.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def check_destination(self, dest, url, rev_options, rev_display):
|
||||
"""
|
||||
Prepare a location to receive a checkout/clone.
|
||||
|
||||
Return True if the location is ready for (and requires) a
|
||||
checkout/clone, False otherwise.
|
||||
"""
|
||||
checkout = True
|
||||
prompt = False
|
||||
if os.path.exists(dest):
|
||||
checkout = False
|
||||
if os.path.exists(os.path.join(dest, self.dirname)):
|
||||
existing_url = self.get_url(dest)
|
||||
if self.compare_urls(existing_url, url):
|
||||
logger.info('%s in %s exists, and has correct URL (%s)' %
|
||||
(self.repo_name.title(), display_path(dest),
|
||||
url))
|
||||
logger.notify('Updating %s %s%s' %
|
||||
(display_path(dest), self.repo_name,
|
||||
rev_display))
|
||||
self.update(dest, rev_options)
|
||||
else:
|
||||
logger.warn('%s %s in %s exists with URL %s' %
|
||||
(self.name, self.repo_name,
|
||||
display_path(dest), existing_url))
|
||||
prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ',
|
||||
('s', 'i', 'w', 'b'))
|
||||
else:
|
||||
logger.warn('Directory %s already exists, '
|
||||
'and is not a %s %s.' %
|
||||
(dest, self.name, self.repo_name))
|
||||
prompt = ('(i)gnore, (w)ipe, (b)ackup ', ('i', 'w', 'b'))
|
||||
if prompt:
|
||||
logger.warn('The plan is to install the %s repository %s' %
|
||||
(self.name, url))
|
||||
response = ask_path_exists('What to do? %s' % prompt[0],
|
||||
prompt[1])
|
||||
|
||||
if response == 's':
|
||||
logger.notify('Switching %s %s to %s%s' %
|
||||
(self.repo_name, display_path(dest), url,
|
||||
rev_display))
|
||||
self.switch(dest, url, rev_options)
|
||||
elif response == 'i':
|
||||
# do nothing
|
||||
pass
|
||||
elif response == 'w':
|
||||
logger.warn('Deleting %s' % display_path(dest))
|
||||
rmtree(dest)
|
||||
checkout = True
|
||||
elif response == 'b':
|
||||
dest_dir = backup_dir(dest)
|
||||
logger.warn('Backing up %s to %s'
|
||||
% (display_path(dest), dest_dir))
|
||||
shutil.move(dest, dest_dir)
|
||||
checkout = True
|
||||
return checkout
|
||||
|
||||
def unpack(self, location):
|
||||
if os.path.exists(location):
|
||||
rmtree(location)
|
||||
self.obtain(location)
|
||||
|
||||
def get_src_requirement(self, dist, location, find_tags=False):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
def get_src_requirement(dist, location, find_tags):
|
||||
version_control = vcs.get_backend_from_location(location)
|
||||
if version_control:
|
||||
return version_control().get_src_requirement(dist, location, find_tags)
|
||||
logger.warn('cannot determine version of editable source in %s (is not SVN checkout, Git clone, Mercurial clone or Bazaar branch)' % location)
|
||||
return dist.as_requirement()
|
||||
131
lib/python3.4/site-packages/pip/vcs/bazaar.py
Normal file
131
lib/python3.4/site-packages/pip/vcs/bazaar.py
Normal file
|
|
@ -0,0 +1,131 @@
|
|||
import os
|
||||
import tempfile
|
||||
import re
|
||||
from pip.backwardcompat import urlparse
|
||||
from pip.log import logger
|
||||
from pip.util import rmtree, display_path, call_subprocess
|
||||
from pip.vcs import vcs, VersionControl
|
||||
from pip.download import path_to_url
|
||||
|
||||
|
||||
class Bazaar(VersionControl):
|
||||
name = 'bzr'
|
||||
dirname = '.bzr'
|
||||
repo_name = 'branch'
|
||||
bundle_file = 'bzr-branch.txt'
|
||||
schemes = ('bzr', 'bzr+http', 'bzr+https', 'bzr+ssh', 'bzr+sftp', 'bzr+ftp', 'bzr+lp')
|
||||
guide = ('# This was a Bazaar branch; to make it a branch again run:\n'
|
||||
'bzr branch -r %(rev)s %(url)s .\n')
|
||||
|
||||
def __init__(self, url=None, *args, **kwargs):
|
||||
super(Bazaar, self).__init__(url, *args, **kwargs)
|
||||
# Python >= 2.7.4, 3.3 doesn't have uses_fragment or non_hierarchical
|
||||
# Register lp but do not expose as a scheme to support bzr+lp.
|
||||
if getattr(urlparse, 'uses_fragment', None):
|
||||
urlparse.uses_fragment.extend(['lp'])
|
||||
urlparse.non_hierarchical.extend(['lp'])
|
||||
|
||||
def parse_vcs_bundle_file(self, content):
|
||||
url = rev = None
|
||||
for line in content.splitlines():
|
||||
if not line.strip() or line.strip().startswith('#'):
|
||||
continue
|
||||
match = re.search(r'^bzr\s*branch\s*-r\s*(\d*)', line)
|
||||
if match:
|
||||
rev = match.group(1).strip()
|
||||
url = line[match.end():].strip().split(None, 1)[0]
|
||||
if url and rev:
|
||||
return url, rev
|
||||
return None, None
|
||||
|
||||
def export(self, location):
|
||||
"""Export the Bazaar repository at the url to the destination location"""
|
||||
temp_dir = tempfile.mkdtemp('-export', 'pip-')
|
||||
self.unpack(temp_dir)
|
||||
if os.path.exists(location):
|
||||
# Remove the location to make sure Bazaar can export it correctly
|
||||
rmtree(location)
|
||||
try:
|
||||
call_subprocess([self.cmd, 'export', location], cwd=temp_dir,
|
||||
filter_stdout=self._filter, show_stdout=False)
|
||||
finally:
|
||||
rmtree(temp_dir)
|
||||
|
||||
def switch(self, dest, url, rev_options):
|
||||
call_subprocess([self.cmd, 'switch', url], cwd=dest)
|
||||
|
||||
def update(self, dest, rev_options):
|
||||
call_subprocess(
|
||||
[self.cmd, 'pull', '-q'] + rev_options, cwd=dest)
|
||||
|
||||
def obtain(self, dest):
|
||||
url, rev = self.get_url_rev()
|
||||
if rev:
|
||||
rev_options = ['-r', rev]
|
||||
rev_display = ' (to revision %s)' % rev
|
||||
else:
|
||||
rev_options = []
|
||||
rev_display = ''
|
||||
if self.check_destination(dest, url, rev_options, rev_display):
|
||||
logger.notify('Checking out %s%s to %s'
|
||||
% (url, rev_display, display_path(dest)))
|
||||
call_subprocess(
|
||||
[self.cmd, 'branch', '-q'] + rev_options + [url, dest])
|
||||
|
||||
def get_url_rev(self):
|
||||
# hotfix the URL scheme after removing bzr+ from bzr+ssh:// readd it
|
||||
url, rev = super(Bazaar, self).get_url_rev()
|
||||
if url.startswith('ssh://'):
|
||||
url = 'bzr+' + url
|
||||
return url, rev
|
||||
|
||||
def get_url(self, location):
|
||||
urls = call_subprocess(
|
||||
[self.cmd, 'info'], show_stdout=False, cwd=location)
|
||||
for line in urls.splitlines():
|
||||
line = line.strip()
|
||||
for x in ('checkout of branch: ',
|
||||
'parent branch: '):
|
||||
if line.startswith(x):
|
||||
repo = line.split(x)[1]
|
||||
if self._is_local_repository(repo):
|
||||
return path_to_url(repo)
|
||||
return repo
|
||||
return None
|
||||
|
||||
def get_revision(self, location):
|
||||
revision = call_subprocess(
|
||||
[self.cmd, 'revno'], show_stdout=False, cwd=location)
|
||||
return revision.splitlines()[-1]
|
||||
|
||||
def get_tag_revs(self, location):
|
||||
tags = call_subprocess(
|
||||
[self.cmd, 'tags'], show_stdout=False, cwd=location)
|
||||
tag_revs = []
|
||||
for line in tags.splitlines():
|
||||
tags_match = re.search(r'([.\w-]+)\s*(.*)$', line)
|
||||
if tags_match:
|
||||
tag = tags_match.group(1)
|
||||
rev = tags_match.group(2)
|
||||
tag_revs.append((rev.strip(), tag.strip()))
|
||||
return dict(tag_revs)
|
||||
|
||||
def get_src_requirement(self, dist, location, find_tags):
|
||||
repo = self.get_url(location)
|
||||
if not repo.lower().startswith('bzr:'):
|
||||
repo = 'bzr+' + repo
|
||||
egg_project_name = dist.egg_name().split('-', 1)[0]
|
||||
if not repo:
|
||||
return None
|
||||
current_rev = self.get_revision(location)
|
||||
tag_revs = self.get_tag_revs(location)
|
||||
|
||||
if current_rev in tag_revs:
|
||||
# It's a tag
|
||||
full_egg_name = '%s-%s' % (egg_project_name, tag_revs[current_rev])
|
||||
else:
|
||||
full_egg_name = '%s-dev_r%s' % (dist.egg_name(), current_rev)
|
||||
return '%s@%s#egg=%s' % (repo, current_rev, full_egg_name)
|
||||
|
||||
|
||||
vcs.register(Bazaar)
|
||||
194
lib/python3.4/site-packages/pip/vcs/git.py
Normal file
194
lib/python3.4/site-packages/pip/vcs/git.py
Normal file
|
|
@ -0,0 +1,194 @@
|
|||
import tempfile
|
||||
import re
|
||||
import os.path
|
||||
from pip.util import call_subprocess
|
||||
from pip.util import display_path, rmtree
|
||||
from pip.vcs import vcs, VersionControl
|
||||
from pip.log import logger
|
||||
from pip.backwardcompat import url2pathname, urlparse
|
||||
urlsplit = urlparse.urlsplit
|
||||
urlunsplit = urlparse.urlunsplit
|
||||
|
||||
|
||||
class Git(VersionControl):
|
||||
name = 'git'
|
||||
dirname = '.git'
|
||||
repo_name = 'clone'
|
||||
schemes = ('git', 'git+http', 'git+https', 'git+ssh', 'git+git', 'git+file')
|
||||
bundle_file = 'git-clone.txt'
|
||||
guide = ('# This was a Git repo; to make it a repo again run:\n'
|
||||
'git init\ngit remote add origin %(url)s -f\ngit checkout %(rev)s\n')
|
||||
|
||||
def __init__(self, url=None, *args, **kwargs):
|
||||
|
||||
# Works around an apparent Git bug
|
||||
# (see http://article.gmane.org/gmane.comp.version-control.git/146500)
|
||||
if url:
|
||||
scheme, netloc, path, query, fragment = urlsplit(url)
|
||||
if scheme.endswith('file'):
|
||||
initial_slashes = path[:-len(path.lstrip('/'))]
|
||||
newpath = initial_slashes + url2pathname(path).replace('\\', '/').lstrip('/')
|
||||
url = urlunsplit((scheme, netloc, newpath, query, fragment))
|
||||
after_plus = scheme.find('+') + 1
|
||||
url = scheme[:after_plus] + urlunsplit((scheme[after_plus:], netloc, newpath, query, fragment))
|
||||
|
||||
super(Git, self).__init__(url, *args, **kwargs)
|
||||
|
||||
def parse_vcs_bundle_file(self, content):
|
||||
url = rev = None
|
||||
for line in content.splitlines():
|
||||
if not line.strip() or line.strip().startswith('#'):
|
||||
continue
|
||||
url_match = re.search(r'git\s*remote\s*add\s*origin(.*)\s*-f', line)
|
||||
if url_match:
|
||||
url = url_match.group(1).strip()
|
||||
rev_match = re.search(r'^git\s*checkout\s*-q\s*(.*)\s*', line)
|
||||
if rev_match:
|
||||
rev = rev_match.group(1).strip()
|
||||
if url and rev:
|
||||
return url, rev
|
||||
return None, None
|
||||
|
||||
def export(self, location):
|
||||
"""Export the Git repository at the url to the destination location"""
|
||||
temp_dir = tempfile.mkdtemp('-export', 'pip-')
|
||||
self.unpack(temp_dir)
|
||||
try:
|
||||
if not location.endswith('/'):
|
||||
location = location + '/'
|
||||
call_subprocess(
|
||||
[self.cmd, 'checkout-index', '-a', '-f', '--prefix', location],
|
||||
filter_stdout=self._filter, show_stdout=False, cwd=temp_dir)
|
||||
finally:
|
||||
rmtree(temp_dir)
|
||||
|
||||
def check_rev_options(self, rev, dest, rev_options):
|
||||
"""Check the revision options before checkout to compensate that tags
|
||||
and branches may need origin/ as a prefix.
|
||||
Returns the SHA1 of the branch or tag if found.
|
||||
"""
|
||||
revisions = self.get_refs(dest)
|
||||
|
||||
origin_rev = 'origin/%s' % rev
|
||||
if origin_rev in revisions:
|
||||
# remote branch
|
||||
return [revisions[origin_rev]]
|
||||
elif rev in revisions:
|
||||
# a local tag or branch name
|
||||
return [revisions[rev]]
|
||||
else:
|
||||
logger.warn("Could not find a tag or branch '%s', assuming commit." % rev)
|
||||
return rev_options
|
||||
|
||||
def switch(self, dest, url, rev_options):
|
||||
call_subprocess(
|
||||
[self.cmd, 'config', 'remote.origin.url', url], cwd=dest)
|
||||
call_subprocess(
|
||||
[self.cmd, 'checkout', '-q'] + rev_options, cwd=dest)
|
||||
|
||||
self.update_submodules(dest)
|
||||
|
||||
def update(self, dest, rev_options):
|
||||
# First fetch changes from the default remote
|
||||
call_subprocess([self.cmd, 'fetch', '-q'], cwd=dest)
|
||||
# Then reset to wanted revision (maby even origin/master)
|
||||
if rev_options:
|
||||
rev_options = self.check_rev_options(rev_options[0], dest, rev_options)
|
||||
call_subprocess([self.cmd, 'reset', '--hard', '-q'] + rev_options, cwd=dest)
|
||||
#: update submodules
|
||||
self.update_submodules(dest)
|
||||
|
||||
def obtain(self, dest):
|
||||
url, rev = self.get_url_rev()
|
||||
if rev:
|
||||
rev_options = [rev]
|
||||
rev_display = ' (to %s)' % rev
|
||||
else:
|
||||
rev_options = ['origin/master']
|
||||
rev_display = ''
|
||||
if self.check_destination(dest, url, rev_options, rev_display):
|
||||
logger.notify('Cloning %s%s to %s' % (url, rev_display, display_path(dest)))
|
||||
call_subprocess([self.cmd, 'clone', '-q', url, dest])
|
||||
#: repo may contain submodules
|
||||
self.update_submodules(dest)
|
||||
if rev:
|
||||
rev_options = self.check_rev_options(rev, dest, rev_options)
|
||||
# Only do a checkout if rev_options differs from HEAD
|
||||
if not self.get_revision(dest).startswith(rev_options[0]):
|
||||
call_subprocess([self.cmd, 'checkout', '-q'] + rev_options, cwd=dest)
|
||||
|
||||
def get_url(self, location):
|
||||
url = call_subprocess(
|
||||
[self.cmd, 'config', 'remote.origin.url'],
|
||||
show_stdout=False, cwd=location)
|
||||
return url.strip()
|
||||
|
||||
def get_revision(self, location):
|
||||
current_rev = call_subprocess(
|
||||
[self.cmd, 'rev-parse', 'HEAD'], show_stdout=False, cwd=location)
|
||||
return current_rev.strip()
|
||||
|
||||
def get_refs(self, location):
|
||||
"""Return map of named refs (branches or tags) to commit hashes."""
|
||||
output = call_subprocess([self.cmd, 'show-ref'],
|
||||
show_stdout=False, cwd=location)
|
||||
rv = {}
|
||||
for line in output.strip().splitlines():
|
||||
commit, ref = line.split(' ', 1)
|
||||
ref = ref.strip()
|
||||
ref_name = None
|
||||
if ref.startswith('refs/remotes/'):
|
||||
ref_name = ref[len('refs/remotes/'):]
|
||||
elif ref.startswith('refs/heads/'):
|
||||
ref_name = ref[len('refs/heads/'):]
|
||||
elif ref.startswith('refs/tags/'):
|
||||
ref_name = ref[len('refs/tags/'):]
|
||||
if ref_name is not None:
|
||||
rv[ref_name] = commit.strip()
|
||||
return rv
|
||||
|
||||
def get_src_requirement(self, dist, location, find_tags):
|
||||
repo = self.get_url(location)
|
||||
if not repo.lower().startswith('git:'):
|
||||
repo = 'git+' + repo
|
||||
egg_project_name = dist.egg_name().split('-', 1)[0]
|
||||
if not repo:
|
||||
return None
|
||||
current_rev = self.get_revision(location)
|
||||
refs = self.get_refs(location)
|
||||
# refs maps names to commit hashes; we need the inverse
|
||||
# if multiple names map to a single commit, this arbitrarily picks one
|
||||
names_by_commit = dict((commit, ref) for ref, commit in refs.items())
|
||||
|
||||
if current_rev in names_by_commit:
|
||||
# It's a tag
|
||||
full_egg_name = '%s-%s' % (egg_project_name, names_by_commit[current_rev])
|
||||
else:
|
||||
full_egg_name = '%s-dev' % egg_project_name
|
||||
|
||||
return '%s@%s#egg=%s' % (repo, current_rev, full_egg_name)
|
||||
|
||||
def get_url_rev(self):
|
||||
"""
|
||||
Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'.
|
||||
That's required because although they use SSH they sometimes doesn't
|
||||
work with a ssh:// scheme (e.g. Github). But we need a scheme for
|
||||
parsing. Hence we remove it again afterwards and return it as a stub.
|
||||
"""
|
||||
if not '://' in self.url:
|
||||
assert not 'file:' in self.url
|
||||
self.url = self.url.replace('git+', 'git+ssh://')
|
||||
url, rev = super(Git, self).get_url_rev()
|
||||
url = url.replace('ssh://', '')
|
||||
else:
|
||||
url, rev = super(Git, self).get_url_rev()
|
||||
|
||||
return url, rev
|
||||
|
||||
def update_submodules(self, location):
|
||||
if not os.path.exists(os.path.join(location, '.gitmodules')):
|
||||
return
|
||||
call_subprocess([self.cmd, 'submodule', 'update', '--init', '--recursive', '-q'],
|
||||
cwd=location)
|
||||
|
||||
vcs.register(Git)
|
||||
151
lib/python3.4/site-packages/pip/vcs/mercurial.py
Normal file
151
lib/python3.4/site-packages/pip/vcs/mercurial.py
Normal file
|
|
@ -0,0 +1,151 @@
|
|||
import os
|
||||
import tempfile
|
||||
import re
|
||||
import sys
|
||||
from pip.util import call_subprocess
|
||||
from pip.util import display_path, rmtree
|
||||
from pip.log import logger
|
||||
from pip.vcs import vcs, VersionControl
|
||||
from pip.download import path_to_url
|
||||
from pip.backwardcompat import ConfigParser
|
||||
|
||||
|
||||
class Mercurial(VersionControl):
|
||||
name = 'hg'
|
||||
dirname = '.hg'
|
||||
repo_name = 'clone'
|
||||
schemes = ('hg', 'hg+http', 'hg+https', 'hg+ssh', 'hg+static-http')
|
||||
bundle_file = 'hg-clone.txt'
|
||||
guide = ('# This was a Mercurial repo; to make it a repo again run:\n'
|
||||
'hg init\nhg pull %(url)s\nhg update -r %(rev)s\n')
|
||||
|
||||
def parse_vcs_bundle_file(self, content):
|
||||
url = rev = None
|
||||
for line in content.splitlines():
|
||||
if not line.strip() or line.strip().startswith('#'):
|
||||
continue
|
||||
url_match = re.search(r'hg\s*pull\s*(.*)\s*', line)
|
||||
if url_match:
|
||||
url = url_match.group(1).strip()
|
||||
rev_match = re.search(r'^hg\s*update\s*-r\s*(.*)\s*', line)
|
||||
if rev_match:
|
||||
rev = rev_match.group(1).strip()
|
||||
if url and rev:
|
||||
return url, rev
|
||||
return None, None
|
||||
|
||||
def export(self, location):
|
||||
"""Export the Hg repository at the url to the destination location"""
|
||||
temp_dir = tempfile.mkdtemp('-export', 'pip-')
|
||||
self.unpack(temp_dir)
|
||||
try:
|
||||
call_subprocess(
|
||||
[self.cmd, 'archive', location],
|
||||
filter_stdout=self._filter, show_stdout=False, cwd=temp_dir)
|
||||
finally:
|
||||
rmtree(temp_dir)
|
||||
|
||||
def switch(self, dest, url, rev_options):
|
||||
repo_config = os.path.join(dest, self.dirname, 'hgrc')
|
||||
config = ConfigParser.SafeConfigParser()
|
||||
try:
|
||||
config.read(repo_config)
|
||||
config.set('paths', 'default', url)
|
||||
config_file = open(repo_config, 'w')
|
||||
config.write(config_file)
|
||||
config_file.close()
|
||||
except (OSError, ConfigParser.NoSectionError):
|
||||
e = sys.exc_info()[1]
|
||||
logger.warn(
|
||||
'Could not switch Mercurial repository to %s: %s'
|
||||
% (url, e))
|
||||
else:
|
||||
call_subprocess([self.cmd, 'update', '-q'] + rev_options, cwd=dest)
|
||||
|
||||
def update(self, dest, rev_options):
|
||||
call_subprocess([self.cmd, 'pull', '-q'], cwd=dest)
|
||||
call_subprocess(
|
||||
[self.cmd, 'update', '-q'] + rev_options, cwd=dest)
|
||||
|
||||
def obtain(self, dest):
|
||||
url, rev = self.get_url_rev()
|
||||
if rev:
|
||||
rev_options = [rev]
|
||||
rev_display = ' (to revision %s)' % rev
|
||||
else:
|
||||
rev_options = []
|
||||
rev_display = ''
|
||||
if self.check_destination(dest, url, rev_options, rev_display):
|
||||
logger.notify('Cloning hg %s%s to %s'
|
||||
% (url, rev_display, display_path(dest)))
|
||||
call_subprocess([self.cmd, 'clone', '--noupdate', '-q', url, dest])
|
||||
call_subprocess([self.cmd, 'update', '-q'] + rev_options, cwd=dest)
|
||||
|
||||
def get_url(self, location):
|
||||
url = call_subprocess(
|
||||
[self.cmd, 'showconfig', 'paths.default'],
|
||||
show_stdout=False, cwd=location).strip()
|
||||
if self._is_local_repository(url):
|
||||
url = path_to_url(url)
|
||||
return url.strip()
|
||||
|
||||
def get_tag_revs(self, location):
|
||||
tags = call_subprocess(
|
||||
[self.cmd, 'tags'], show_stdout=False, cwd=location)
|
||||
tag_revs = []
|
||||
for line in tags.splitlines():
|
||||
tags_match = re.search(r'([\w\d\.-]+)\s*([\d]+):.*$', line)
|
||||
if tags_match:
|
||||
tag = tags_match.group(1)
|
||||
rev = tags_match.group(2)
|
||||
if "tip" != tag:
|
||||
tag_revs.append((rev.strip(), tag.strip()))
|
||||
return dict(tag_revs)
|
||||
|
||||
def get_branch_revs(self, location):
|
||||
branches = call_subprocess(
|
||||
[self.cmd, 'branches'], show_stdout=False, cwd=location)
|
||||
branch_revs = []
|
||||
for line in branches.splitlines():
|
||||
branches_match = re.search(r'([\w\d\.-]+)\s*([\d]+):.*$', line)
|
||||
if branches_match:
|
||||
branch = branches_match.group(1)
|
||||
rev = branches_match.group(2)
|
||||
if "default" != branch:
|
||||
branch_revs.append((rev.strip(), branch.strip()))
|
||||
return dict(branch_revs)
|
||||
|
||||
def get_revision(self, location):
|
||||
current_revision = call_subprocess(
|
||||
[self.cmd, 'parents', '--template={rev}'],
|
||||
show_stdout=False, cwd=location).strip()
|
||||
return current_revision
|
||||
|
||||
def get_revision_hash(self, location):
|
||||
current_rev_hash = call_subprocess(
|
||||
[self.cmd, 'parents', '--template={node}'],
|
||||
show_stdout=False, cwd=location).strip()
|
||||
return current_rev_hash
|
||||
|
||||
def get_src_requirement(self, dist, location, find_tags):
|
||||
repo = self.get_url(location)
|
||||
if not repo.lower().startswith('hg:'):
|
||||
repo = 'hg+' + repo
|
||||
egg_project_name = dist.egg_name().split('-', 1)[0]
|
||||
if not repo:
|
||||
return None
|
||||
current_rev = self.get_revision(location)
|
||||
current_rev_hash = self.get_revision_hash(location)
|
||||
tag_revs = self.get_tag_revs(location)
|
||||
branch_revs = self.get_branch_revs(location)
|
||||
if current_rev in tag_revs:
|
||||
# It's a tag
|
||||
full_egg_name = '%s-%s' % (egg_project_name, tag_revs[current_rev])
|
||||
elif current_rev in branch_revs:
|
||||
# It's the tip of a branch
|
||||
full_egg_name = '%s-%s' % (egg_project_name, branch_revs[current_rev])
|
||||
else:
|
||||
full_egg_name = '%s-dev' % egg_project_name
|
||||
return '%s@%s#egg=%s' % (repo, current_rev_hash, full_egg_name)
|
||||
|
||||
vcs.register(Mercurial)
|
||||
273
lib/python3.4/site-packages/pip/vcs/subversion.py
Normal file
273
lib/python3.4/site-packages/pip/vcs/subversion.py
Normal file
|
|
@ -0,0 +1,273 @@
|
|||
import os
|
||||
import re
|
||||
from pip.backwardcompat import urlparse
|
||||
from pip.index import Link
|
||||
from pip.util import rmtree, display_path, call_subprocess
|
||||
from pip.log import logger
|
||||
from pip.vcs import vcs, VersionControl
|
||||
|
||||
_svn_xml_url_re = re.compile('url="([^"]+)"')
|
||||
_svn_rev_re = re.compile('committed-rev="(\d+)"')
|
||||
_svn_url_re = re.compile(r'URL: (.+)')
|
||||
_svn_revision_re = re.compile(r'Revision: (.+)')
|
||||
_svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"')
|
||||
_svn_info_xml_url_re = re.compile(r'<url>(.*)</url>')
|
||||
|
||||
|
||||
class Subversion(VersionControl):
|
||||
name = 'svn'
|
||||
dirname = '.svn'
|
||||
repo_name = 'checkout'
|
||||
schemes = ('svn', 'svn+ssh', 'svn+http', 'svn+https', 'svn+svn')
|
||||
bundle_file = 'svn-checkout.txt'
|
||||
guide = ('# This was an svn checkout; to make it a checkout again run:\n'
|
||||
'svn checkout --force -r %(rev)s %(url)s .\n')
|
||||
|
||||
def get_info(self, location):
|
||||
"""Returns (url, revision), where both are strings"""
|
||||
assert not location.rstrip('/').endswith(self.dirname), 'Bad directory: %s' % location
|
||||
output = call_subprocess(
|
||||
[self.cmd, 'info', location], show_stdout=False, extra_environ={'LANG': 'C'})
|
||||
match = _svn_url_re.search(output)
|
||||
if not match:
|
||||
logger.warn('Cannot determine URL of svn checkout %s' % display_path(location))
|
||||
logger.info('Output that cannot be parsed: \n%s' % output)
|
||||
return None, None
|
||||
url = match.group(1).strip()
|
||||
match = _svn_revision_re.search(output)
|
||||
if not match:
|
||||
logger.warn('Cannot determine revision of svn checkout %s' % display_path(location))
|
||||
logger.info('Output that cannot be parsed: \n%s' % output)
|
||||
return url, None
|
||||
return url, match.group(1)
|
||||
|
||||
def parse_vcs_bundle_file(self, content):
|
||||
for line in content.splitlines():
|
||||
if not line.strip() or line.strip().startswith('#'):
|
||||
continue
|
||||
match = re.search(r'^-r\s*([^ ])?', line)
|
||||
if not match:
|
||||
return None, None
|
||||
rev = match.group(1)
|
||||
rest = line[match.end():].strip().split(None, 1)[0]
|
||||
return rest, rev
|
||||
return None, None
|
||||
|
||||
def export(self, location):
|
||||
"""Export the svn repository at the url to the destination location"""
|
||||
url, rev = self.get_url_rev()
|
||||
rev_options = get_rev_options(url, rev)
|
||||
logger.notify('Exporting svn repository %s to %s' % (url, location))
|
||||
logger.indent += 2
|
||||
try:
|
||||
if os.path.exists(location):
|
||||
# Subversion doesn't like to check out over an existing directory
|
||||
# --force fixes this, but was only added in svn 1.5
|
||||
rmtree(location)
|
||||
call_subprocess(
|
||||
[self.cmd, 'export'] + rev_options + [url, location],
|
||||
filter_stdout=self._filter, show_stdout=False)
|
||||
finally:
|
||||
logger.indent -= 2
|
||||
|
||||
def switch(self, dest, url, rev_options):
|
||||
call_subprocess(
|
||||
[self.cmd, 'switch'] + rev_options + [url, dest])
|
||||
|
||||
def update(self, dest, rev_options):
|
||||
call_subprocess(
|
||||
[self.cmd, 'update'] + rev_options + [dest])
|
||||
|
||||
def obtain(self, dest):
|
||||
url, rev = self.get_url_rev()
|
||||
rev_options = get_rev_options(url, rev)
|
||||
if rev:
|
||||
rev_display = ' (to revision %s)' % rev
|
||||
else:
|
||||
rev_display = ''
|
||||
if self.check_destination(dest, url, rev_options, rev_display):
|
||||
logger.notify('Checking out %s%s to %s'
|
||||
% (url, rev_display, display_path(dest)))
|
||||
call_subprocess(
|
||||
[self.cmd, 'checkout', '-q'] + rev_options + [url, dest])
|
||||
|
||||
def get_location(self, dist, dependency_links):
|
||||
for url in dependency_links:
|
||||
egg_fragment = Link(url).egg_fragment
|
||||
if not egg_fragment:
|
||||
continue
|
||||
if '-' in egg_fragment:
|
||||
## FIXME: will this work when a package has - in the name?
|
||||
key = '-'.join(egg_fragment.split('-')[:-1]).lower()
|
||||
else:
|
||||
key = egg_fragment
|
||||
if key == dist.key:
|
||||
return url.split('#', 1)[0]
|
||||
return None
|
||||
|
||||
def get_revision(self, location):
|
||||
"""
|
||||
Return the maximum revision for all files under a given location
|
||||
"""
|
||||
# Note: taken from setuptools.command.egg_info
|
||||
revision = 0
|
||||
|
||||
for base, dirs, files in os.walk(location):
|
||||
if self.dirname not in dirs:
|
||||
dirs[:] = []
|
||||
continue # no sense walking uncontrolled subdirs
|
||||
dirs.remove(self.dirname)
|
||||
entries_fn = os.path.join(base, self.dirname, 'entries')
|
||||
if not os.path.exists(entries_fn):
|
||||
## FIXME: should we warn?
|
||||
continue
|
||||
|
||||
dirurl, localrev = self._get_svn_url_rev(base)
|
||||
|
||||
if base == location:
|
||||
base_url = dirurl + '/' # save the root url
|
||||
elif not dirurl or not dirurl.startswith(base_url):
|
||||
dirs[:] = []
|
||||
continue # not part of the same svn tree, skip it
|
||||
revision = max(revision, localrev)
|
||||
return revision
|
||||
|
||||
def get_url_rev(self):
|
||||
# hotfix the URL scheme after removing svn+ from svn+ssh:// readd it
|
||||
url, rev = super(Subversion, self).get_url_rev()
|
||||
if url.startswith('ssh://'):
|
||||
url = 'svn+' + url
|
||||
return url, rev
|
||||
|
||||
def get_url(self, location):
|
||||
# In cases where the source is in a subdirectory, not alongside setup.py
|
||||
# we have to look up in the location until we find a real setup.py
|
||||
orig_location = location
|
||||
while not os.path.exists(os.path.join(location, 'setup.py')):
|
||||
last_location = location
|
||||
location = os.path.dirname(location)
|
||||
if location == last_location:
|
||||
# We've traversed up to the root of the filesystem without finding setup.py
|
||||
logger.warn("Could not find setup.py for directory %s (tried all parent directories)"
|
||||
% orig_location)
|
||||
return None
|
||||
|
||||
return self._get_svn_url_rev(location)[0]
|
||||
|
||||
def _get_svn_url_rev(self, location):
|
||||
from pip.exceptions import InstallationError
|
||||
|
||||
f = open(os.path.join(location, self.dirname, 'entries'))
|
||||
data = f.read()
|
||||
f.close()
|
||||
if data.startswith('8') or data.startswith('9') or data.startswith('10'):
|
||||
data = list(map(str.splitlines, data.split('\n\x0c\n')))
|
||||
del data[0][0] # get rid of the '8'
|
||||
url = data[0][3]
|
||||
revs = [int(d[9]) for d in data if len(d) > 9 and d[9]] + [0]
|
||||
elif data.startswith('<?xml'):
|
||||
match = _svn_xml_url_re.search(data)
|
||||
if not match:
|
||||
raise ValueError('Badly formatted data: %r' % data)
|
||||
url = match.group(1) # get repository URL
|
||||
revs = [int(m.group(1)) for m in _svn_rev_re.finditer(data)] + [0]
|
||||
else:
|
||||
try:
|
||||
# subversion >= 1.7
|
||||
xml = call_subprocess([self.cmd, 'info', '--xml', location], show_stdout=False)
|
||||
url = _svn_info_xml_url_re.search(xml).group(1)
|
||||
revs = [int(m.group(1)) for m in _svn_info_xml_rev_re.finditer(xml)]
|
||||
except InstallationError:
|
||||
url, revs = None, []
|
||||
|
||||
if revs:
|
||||
rev = max(revs)
|
||||
else:
|
||||
rev = 0
|
||||
|
||||
return url, rev
|
||||
|
||||
def get_tag_revs(self, svn_tag_url):
|
||||
stdout = call_subprocess(
|
||||
[self.cmd, 'ls', '-v', svn_tag_url], show_stdout=False)
|
||||
results = []
|
||||
for line in stdout.splitlines():
|
||||
parts = line.split()
|
||||
rev = int(parts[0])
|
||||
tag = parts[-1].strip('/')
|
||||
results.append((tag, rev))
|
||||
return results
|
||||
|
||||
def find_tag_match(self, rev, tag_revs):
|
||||
best_match_rev = None
|
||||
best_tag = None
|
||||
for tag, tag_rev in tag_revs:
|
||||
if (tag_rev > rev and
|
||||
(best_match_rev is None or best_match_rev > tag_rev)):
|
||||
# FIXME: Is best_match > tag_rev really possible?
|
||||
# or is it a sign something is wacky?
|
||||
best_match_rev = tag_rev
|
||||
best_tag = tag
|
||||
return best_tag
|
||||
|
||||
def get_src_requirement(self, dist, location, find_tags=False):
|
||||
repo = self.get_url(location)
|
||||
if repo is None:
|
||||
return None
|
||||
parts = repo.split('/')
|
||||
## FIXME: why not project name?
|
||||
egg_project_name = dist.egg_name().split('-', 1)[0]
|
||||
rev = self.get_revision(location)
|
||||
if parts[-2] in ('tags', 'tag'):
|
||||
# It's a tag, perfect!
|
||||
full_egg_name = '%s-%s' % (egg_project_name, parts[-1])
|
||||
elif parts[-2] in ('branches', 'branch'):
|
||||
# It's a branch :(
|
||||
full_egg_name = '%s-%s-r%s' % (dist.egg_name(), parts[-1], rev)
|
||||
elif parts[-1] == 'trunk':
|
||||
# Trunk :-/
|
||||
full_egg_name = '%s-dev_r%s' % (dist.egg_name(), rev)
|
||||
if find_tags:
|
||||
tag_url = '/'.join(parts[:-1]) + '/tags'
|
||||
tag_revs = self.get_tag_revs(tag_url)
|
||||
match = self.find_tag_match(rev, tag_revs)
|
||||
if match:
|
||||
logger.notify('trunk checkout %s seems to be equivalent to tag %s' % match)
|
||||
repo = '%s/%s' % (tag_url, match)
|
||||
full_egg_name = '%s-%s' % (egg_project_name, match)
|
||||
else:
|
||||
# Don't know what it is
|
||||
logger.warn('svn URL does not fit normal structure (tags/branches/trunk): %s' % repo)
|
||||
full_egg_name = '%s-dev_r%s' % (egg_project_name, rev)
|
||||
return 'svn+%s@%s#egg=%s' % (repo, rev, full_egg_name)
|
||||
|
||||
|
||||
def get_rev_options(url, rev):
|
||||
if rev:
|
||||
rev_options = ['-r', rev]
|
||||
else:
|
||||
rev_options = []
|
||||
|
||||
r = urlparse.urlsplit(url)
|
||||
if hasattr(r, 'username'):
|
||||
# >= Python-2.5
|
||||
username, password = r.username, r.password
|
||||
else:
|
||||
netloc = r[1]
|
||||
if '@' in netloc:
|
||||
auth = netloc.split('@')[0]
|
||||
if ':' in auth:
|
||||
username, password = auth.split(':', 1)
|
||||
else:
|
||||
username, password = auth, None
|
||||
else:
|
||||
username, password = None, None
|
||||
|
||||
if username:
|
||||
rev_options += ['--username', username]
|
||||
if password:
|
||||
rev_options += ['--password', password]
|
||||
return rev_options
|
||||
|
||||
|
||||
vcs.register(Subversion)
|
||||
559
lib/python3.4/site-packages/pip/wheel.py
Normal file
559
lib/python3.4/site-packages/pip/wheel.py
Normal file
|
|
@ -0,0 +1,559 @@
|
|||
"""
|
||||
Support for installing and building the "wheel" binary package format.
|
||||
"""
|
||||
from __future__ import with_statement
|
||||
|
||||
import compileall
|
||||
import csv
|
||||
import functools
|
||||
import hashlib
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
from base64 import urlsafe_b64encode
|
||||
from email.parser import Parser
|
||||
|
||||
from pip.backwardcompat import ConfigParser, StringIO
|
||||
from pip.exceptions import InvalidWheelFilename, UnsupportedWheel
|
||||
from pip.locations import distutils_scheme
|
||||
from pip.log import logger
|
||||
from pip import pep425tags
|
||||
from pip.util import call_subprocess, normalize_path, make_path_relative
|
||||
import pkg_resources
|
||||
from distlib.scripts import ScriptMaker
|
||||
|
||||
|
||||
wheel_ext = '.whl'
|
||||
|
||||
VERSION_COMPATIBLE = (1, 0)
|
||||
|
||||
|
||||
def rehash(path, algo='sha256', blocksize=1<<20):
|
||||
"""Return (hash, length) for path using hashlib.new(algo)"""
|
||||
h = hashlib.new(algo)
|
||||
length = 0
|
||||
with open(path, 'rb') as f:
|
||||
block = f.read(blocksize)
|
||||
while block:
|
||||
length += len(block)
|
||||
h.update(block)
|
||||
block = f.read(blocksize)
|
||||
digest = 'sha256='+urlsafe_b64encode(h.digest()).decode('latin1').rstrip('=')
|
||||
return (digest, length)
|
||||
|
||||
try:
|
||||
unicode
|
||||
def binary(s):
|
||||
if isinstance(s, unicode):
|
||||
return s.encode('ascii')
|
||||
return s
|
||||
except NameError:
|
||||
def binary(s):
|
||||
if isinstance(s, str):
|
||||
return s.encode('ascii')
|
||||
|
||||
def open_for_csv(name, mode):
|
||||
if sys.version_info[0] < 3:
|
||||
nl = {}
|
||||
bin = 'b'
|
||||
else:
|
||||
nl = { 'newline': '' }
|
||||
bin = ''
|
||||
return open(name, mode + bin, **nl)
|
||||
|
||||
def fix_script(path):
|
||||
"""Replace #!python with #!/path/to/python
|
||||
Return True if file was changed."""
|
||||
# XXX RECORD hashes will need to be updated
|
||||
if os.path.isfile(path):
|
||||
script = open(path, 'rb')
|
||||
try:
|
||||
firstline = script.readline()
|
||||
if not firstline.startswith(binary('#!python')):
|
||||
return False
|
||||
exename = sys.executable.encode(sys.getfilesystemencoding())
|
||||
firstline = binary('#!') + exename + binary(os.linesep)
|
||||
rest = script.read()
|
||||
finally:
|
||||
script.close()
|
||||
script = open(path, 'wb')
|
||||
try:
|
||||
script.write(firstline)
|
||||
script.write(rest)
|
||||
finally:
|
||||
script.close()
|
||||
return True
|
||||
|
||||
dist_info_re = re.compile(r"""^(?P<namever>(?P<name>.+?)(-(?P<ver>\d.+?))?)
|
||||
\.dist-info$""", re.VERBOSE)
|
||||
|
||||
def root_is_purelib(name, wheeldir):
|
||||
"""
|
||||
Return True if the extracted wheel in wheeldir should go into purelib.
|
||||
"""
|
||||
name_folded = name.replace("-", "_")
|
||||
for item in os.listdir(wheeldir):
|
||||
match = dist_info_re.match(item)
|
||||
if match and match.group('name') == name_folded:
|
||||
with open(os.path.join(wheeldir, item, 'WHEEL')) as wheel:
|
||||
for line in wheel:
|
||||
line = line.lower().rstrip()
|
||||
if line == "root-is-purelib: true":
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def get_entrypoints(filename):
|
||||
if not os.path.exists(filename):
|
||||
return {}, {}
|
||||
|
||||
# This is done because you can pass a string to entry_points wrappers which
|
||||
# means that they may or may not be valid INI files. The attempt here is to
|
||||
# strip leading and trailing whitespace in order to make them valid INI
|
||||
# files.
|
||||
with open(filename) as fp:
|
||||
data = StringIO()
|
||||
for line in fp:
|
||||
data.write(line.strip())
|
||||
data.write("\n")
|
||||
data.seek(0)
|
||||
|
||||
cp = ConfigParser.RawConfigParser()
|
||||
cp.readfp(data)
|
||||
|
||||
console = {}
|
||||
gui = {}
|
||||
if cp.has_section('console_scripts'):
|
||||
console = dict(cp.items('console_scripts'))
|
||||
if cp.has_section('gui_scripts'):
|
||||
gui = dict(cp.items('gui_scripts'))
|
||||
return console, gui
|
||||
|
||||
|
||||
def move_wheel_files(name, req, wheeldir, user=False, home=None, root=None,
|
||||
pycompile=True, scheme=None):
|
||||
"""Install a wheel"""
|
||||
|
||||
if not scheme:
|
||||
scheme = distutils_scheme(name, user=user, home=home, root=root)
|
||||
|
||||
if root_is_purelib(name, wheeldir):
|
||||
lib_dir = scheme['purelib']
|
||||
else:
|
||||
lib_dir = scheme['platlib']
|
||||
|
||||
info_dir = []
|
||||
data_dirs = []
|
||||
source = wheeldir.rstrip(os.path.sep) + os.path.sep
|
||||
|
||||
# Record details of the files moved
|
||||
# installed = files copied from the wheel to the destination
|
||||
# changed = files changed while installing (scripts #! line typically)
|
||||
# generated = files newly generated during the install (script wrappers)
|
||||
installed = {}
|
||||
changed = set()
|
||||
generated = []
|
||||
|
||||
# Compile all of the pyc files that we're going to be installing
|
||||
if pycompile:
|
||||
compileall.compile_dir(source, force=True, quiet=True)
|
||||
|
||||
def normpath(src, p):
|
||||
return make_path_relative(src, p).replace(os.path.sep, '/')
|
||||
|
||||
def record_installed(srcfile, destfile, modified=False):
|
||||
"""Map archive RECORD paths to installation RECORD paths."""
|
||||
oldpath = normpath(srcfile, wheeldir)
|
||||
newpath = normpath(destfile, lib_dir)
|
||||
installed[oldpath] = newpath
|
||||
if modified:
|
||||
changed.add(destfile)
|
||||
|
||||
def clobber(source, dest, is_base, fixer=None, filter=None):
|
||||
if not os.path.exists(dest): # common for the 'include' path
|
||||
os.makedirs(dest)
|
||||
|
||||
for dir, subdirs, files in os.walk(source):
|
||||
basedir = dir[len(source):].lstrip(os.path.sep)
|
||||
destdir = os.path.join(dest, basedir)
|
||||
if is_base and basedir.split(os.path.sep, 1)[0].endswith('.data'):
|
||||
continue
|
||||
for s in subdirs:
|
||||
destsubdir = os.path.join(dest, basedir, s)
|
||||
if is_base and basedir == '' and destsubdir.endswith('.data'):
|
||||
data_dirs.append(s)
|
||||
continue
|
||||
elif (is_base
|
||||
and s.endswith('.dist-info')
|
||||
# is self.req.project_name case preserving?
|
||||
and s.lower().startswith(req.project_name.replace('-', '_').lower())):
|
||||
assert not info_dir, 'Multiple .dist-info directories'
|
||||
info_dir.append(destsubdir)
|
||||
for f in files:
|
||||
# Skip unwanted files
|
||||
if filter and filter(f):
|
||||
continue
|
||||
srcfile = os.path.join(dir, f)
|
||||
destfile = os.path.join(dest, basedir, f)
|
||||
# directory creation is lazy and after the file filtering above
|
||||
# to ensure we don't install empty dirs; empty dirs can't be
|
||||
# uninstalled.
|
||||
if not os.path.exists(destdir):
|
||||
os.makedirs(destdir)
|
||||
# use copy2 (not move) to be extra sure we're not moving
|
||||
# directories over; copy2 fails for directories. this would
|
||||
# fail tests (not during released/user execution)
|
||||
shutil.copy2(srcfile, destfile)
|
||||
changed = False
|
||||
if fixer:
|
||||
changed = fixer(destfile)
|
||||
record_installed(srcfile, destfile, changed)
|
||||
|
||||
clobber(source, lib_dir, True)
|
||||
|
||||
assert info_dir, "%s .dist-info directory not found" % req
|
||||
|
||||
# Get the defined entry points
|
||||
ep_file = os.path.join(info_dir[0], 'entry_points.txt')
|
||||
console, gui = get_entrypoints(ep_file)
|
||||
|
||||
def is_entrypoint_wrapper(name):
|
||||
# EP, EP.exe and EP-script.py are scripts generated for
|
||||
# entry point EP by setuptools
|
||||
if name.lower().endswith('.exe'):
|
||||
matchname = name[:-4]
|
||||
elif name.lower().endswith('-script.py'):
|
||||
matchname = name[:-10]
|
||||
elif name.lower().endswith(".pya"):
|
||||
matchname = name[:-4]
|
||||
else:
|
||||
matchname = name
|
||||
# Ignore setuptools-generated scripts
|
||||
return (matchname in console or matchname in gui)
|
||||
|
||||
for datadir in data_dirs:
|
||||
fixer = None
|
||||
filter = None
|
||||
for subdir in os.listdir(os.path.join(wheeldir, datadir)):
|
||||
fixer = None
|
||||
if subdir == 'scripts':
|
||||
fixer = fix_script
|
||||
filter = is_entrypoint_wrapper
|
||||
source = os.path.join(wheeldir, datadir, subdir)
|
||||
dest = scheme[subdir]
|
||||
clobber(source, dest, False, fixer=fixer, filter=filter)
|
||||
|
||||
maker = ScriptMaker(None, scheme['scripts'])
|
||||
|
||||
# Ensure we don't generate any variants for scripts because this is almost
|
||||
# never what somebody wants.
|
||||
# See https://bitbucket.org/pypa/distlib/issue/35/
|
||||
maker.variants = set(('', ))
|
||||
|
||||
# This is required because otherwise distlib creates scripts that are not
|
||||
# executable.
|
||||
# See https://bitbucket.org/pypa/distlib/issue/32/
|
||||
maker.set_mode = True
|
||||
|
||||
# Simplify the script and fix the fact that the default script swallows
|
||||
# every single stack trace.
|
||||
# See https://bitbucket.org/pypa/distlib/issue/34/
|
||||
# See https://bitbucket.org/pypa/distlib/issue/33/
|
||||
def _get_script_text(entry):
|
||||
return maker.script_template % {
|
||||
"module": entry.prefix,
|
||||
"import_name": entry.suffix.split(".")[0],
|
||||
"func": entry.suffix,
|
||||
}
|
||||
|
||||
maker._get_script_text = _get_script_text
|
||||
maker.script_template = """# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
|
||||
from %(module)s import %(import_name)s
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(%(func)s())
|
||||
"""
|
||||
|
||||
# Special case pip and setuptools to generate versioned wrappers
|
||||
#
|
||||
# The issue is that some projects (specifically, pip and setuptools) use
|
||||
# code in setup.py to create "versioned" entry points - pip2.7 on Python
|
||||
# 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into
|
||||
# the wheel metadata at build time, and so if the wheel is installed with
|
||||
# a *different* version of Python the entry points will be wrong. The
|
||||
# correct fix for this is to enhance the metadata to be able to describe
|
||||
# such versioned entry points, but that won't happen till Metadata 2.0 is
|
||||
# available.
|
||||
# In the meantime, projects using versioned entry points will either have
|
||||
# incorrect versioned entry points, or they will not be able to distribute
|
||||
# "universal" wheels (i.e., they will need a wheel per Python version).
|
||||
#
|
||||
# Because setuptools and pip are bundled with _ensurepip and virtualenv,
|
||||
# we need to use universal wheels. So, as a stopgap until Metadata 2.0, we
|
||||
# override the versioned entry points in the wheel and generate the
|
||||
# correct ones. This code is purely a short-term measure until Metadat 2.0
|
||||
# is available.
|
||||
#
|
||||
# To add the level of hack in this section of code, in order to support
|
||||
# ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment
|
||||
# variable which will control which version scripts get installed.
|
||||
#
|
||||
# ENSUREPIP_OPTIONS=altinstall
|
||||
# - Only pipX.Y and easy_install-X.Y will be generated and installed
|
||||
# ENSUREPIP_OPTIONS=install
|
||||
# - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note
|
||||
# that this option is technically if ENSUREPIP_OPTIONS is set and is
|
||||
# not altinstall
|
||||
# DEFAULT
|
||||
# - The default behavior is to install pip, pipX, pipX.Y, easy_install
|
||||
# and easy_install-X.Y.
|
||||
pip_script = console.pop('pip', None)
|
||||
if pip_script:
|
||||
if "ENSUREPIP_OPTIONS" not in os.environ:
|
||||
spec = 'pip = ' + pip_script
|
||||
generated.extend(maker.make(spec))
|
||||
|
||||
if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall":
|
||||
spec = 'pip%s = %s' % (sys.version[:1], pip_script)
|
||||
generated.extend(maker.make(spec))
|
||||
|
||||
spec = 'pip%s = %s' % (sys.version[:3], pip_script)
|
||||
generated.extend(maker.make(spec))
|
||||
# Delete any other versioned pip entry points
|
||||
pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)]
|
||||
for k in pip_ep:
|
||||
del console[k]
|
||||
easy_install_script = console.pop('easy_install', None)
|
||||
if easy_install_script:
|
||||
if "ENSUREPIP_OPTIONS" not in os.environ:
|
||||
spec = 'easy_install = ' + easy_install_script
|
||||
generated.extend(maker.make(spec))
|
||||
|
||||
spec = 'easy_install-%s = %s' % (sys.version[:3], easy_install_script)
|
||||
generated.extend(maker.make(spec))
|
||||
# Delete any other versioned easy_install entry points
|
||||
easy_install_ep = [k for k in console
|
||||
if re.match(r'easy_install(-\d\.\d)?$', k)]
|
||||
for k in easy_install_ep:
|
||||
del console[k]
|
||||
|
||||
# Generate the console and GUI entry points specified in the wheel
|
||||
if len(console) > 0:
|
||||
generated.extend(maker.make_multiple(['%s = %s' % kv for kv in console.items()]))
|
||||
if len(gui) > 0:
|
||||
generated.extend(maker.make_multiple(['%s = %s' % kv for kv in gui.items()], {'gui': True}))
|
||||
|
||||
record = os.path.join(info_dir[0], 'RECORD')
|
||||
temp_record = os.path.join(info_dir[0], 'RECORD.pip')
|
||||
with open_for_csv(record, 'r') as record_in:
|
||||
with open_for_csv(temp_record, 'w+') as record_out:
|
||||
reader = csv.reader(record_in)
|
||||
writer = csv.writer(record_out)
|
||||
for row in reader:
|
||||
row[0] = installed.pop(row[0], row[0])
|
||||
if row[0] in changed:
|
||||
row[1], row[2] = rehash(row[0])
|
||||
writer.writerow(row)
|
||||
for f in generated:
|
||||
h, l = rehash(f)
|
||||
writer.writerow((f, h, l))
|
||||
for f in installed:
|
||||
writer.writerow((installed[f], '', ''))
|
||||
shutil.move(temp_record, record)
|
||||
|
||||
def _unique(fn):
|
||||
@functools.wraps(fn)
|
||||
def unique(*args, **kw):
|
||||
seen = set()
|
||||
for item in fn(*args, **kw):
|
||||
if item not in seen:
|
||||
seen.add(item)
|
||||
yield item
|
||||
return unique
|
||||
|
||||
# TODO: this goes somewhere besides the wheel module
|
||||
@_unique
|
||||
def uninstallation_paths(dist):
|
||||
"""
|
||||
Yield all the uninstallation paths for dist based on RECORD-without-.pyc
|
||||
|
||||
Yield paths to all the files in RECORD. For each .py file in RECORD, add
|
||||
the .pyc in the same directory.
|
||||
|
||||
UninstallPathSet.add() takes care of the __pycache__ .pyc.
|
||||
"""
|
||||
from pip.req import FakeFile # circular import
|
||||
r = csv.reader(FakeFile(dist.get_metadata_lines('RECORD')))
|
||||
for row in r:
|
||||
path = os.path.join(dist.location, row[0])
|
||||
yield path
|
||||
if path.endswith('.py'):
|
||||
dn, fn = os.path.split(path)
|
||||
base = fn[:-3]
|
||||
path = os.path.join(dn, base+'.pyc')
|
||||
yield path
|
||||
|
||||
|
||||
def wheel_version(source_dir):
|
||||
"""
|
||||
Return the Wheel-Version of an extracted wheel, if possible.
|
||||
|
||||
Otherwise, return False if we couldn't parse / extract it.
|
||||
"""
|
||||
try:
|
||||
dist = [d for d in pkg_resources.find_on_path(None, source_dir)][0]
|
||||
|
||||
wheel_data = dist.get_metadata('WHEEL')
|
||||
wheel_data = Parser().parsestr(wheel_data)
|
||||
|
||||
version = wheel_data['Wheel-Version'].strip()
|
||||
version = tuple(map(int, version.split('.')))
|
||||
return version
|
||||
except:
|
||||
return False
|
||||
|
||||
|
||||
def check_compatibility(version, name):
|
||||
"""
|
||||
Raises errors or warns if called with an incompatible Wheel-Version.
|
||||
|
||||
Pip should refuse to install a Wheel-Version that's a major series
|
||||
ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when
|
||||
installing a version only minor version ahead (e.g 1.2 > 1.1).
|
||||
|
||||
version: a 2-tuple representing a Wheel-Version (Major, Minor)
|
||||
name: name of wheel or package to raise exception about
|
||||
|
||||
:raises UnsupportedWheel: when an incompatible Wheel-Version is given
|
||||
"""
|
||||
if not version:
|
||||
raise UnsupportedWheel(
|
||||
"%s is in an unsupported or invalid wheel" % name
|
||||
)
|
||||
if version[0] > VERSION_COMPATIBLE[0]:
|
||||
raise UnsupportedWheel(
|
||||
"%s's Wheel-Version (%s) is not compatible with this version "
|
||||
"of pip" % (name, '.'.join(map(str, version)))
|
||||
)
|
||||
elif version > VERSION_COMPATIBLE:
|
||||
logger.warn('Installing from a newer Wheel-Version (%s)'
|
||||
% '.'.join(map(str, version)))
|
||||
|
||||
|
||||
class Wheel(object):
|
||||
"""A wheel file"""
|
||||
|
||||
# TODO: maybe move the install code into this class
|
||||
|
||||
wheel_file_re = re.compile(
|
||||
r"""^(?P<namever>(?P<name>.+?)-(?P<ver>\d.*?))
|
||||
((-(?P<build>\d.*?))?-(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?)
|
||||
\.whl|\.dist-info)$""",
|
||||
re.VERBOSE)
|
||||
|
||||
def __init__(self, filename):
|
||||
"""
|
||||
:raises InvalidWheelFilename: when the filename is invalid for a wheel
|
||||
"""
|
||||
wheel_info = self.wheel_file_re.match(filename)
|
||||
if not wheel_info:
|
||||
raise InvalidWheelFilename("%s is not a valid wheel filename." % filename)
|
||||
self.filename = filename
|
||||
self.name = wheel_info.group('name').replace('_', '-')
|
||||
# we'll assume "_" means "-" due to wheel naming scheme
|
||||
# (https://github.com/pypa/pip/issues/1150)
|
||||
self.version = wheel_info.group('ver').replace('_', '-')
|
||||
self.pyversions = wheel_info.group('pyver').split('.')
|
||||
self.abis = wheel_info.group('abi').split('.')
|
||||
self.plats = wheel_info.group('plat').split('.')
|
||||
|
||||
# All the tag combinations from this file
|
||||
self.file_tags = set((x, y, z) for x in self.pyversions for y
|
||||
in self.abis for z in self.plats)
|
||||
|
||||
def support_index_min(self, tags=None):
|
||||
"""
|
||||
Return the lowest index that one of the wheel's file_tag combinations
|
||||
achieves in the supported_tags list e.g. if there are 8 supported tags,
|
||||
and one of the file tags is first in the list, then return 0. Returns
|
||||
None is the wheel is not supported.
|
||||
"""
|
||||
if tags is None: # for mock
|
||||
tags = pep425tags.supported_tags
|
||||
indexes = [tags.index(c) for c in self.file_tags if c in tags]
|
||||
return min(indexes) if indexes else None
|
||||
|
||||
def supported(self, tags=None):
|
||||
"""Is this wheel supported on this system?"""
|
||||
if tags is None: # for mock
|
||||
tags = pep425tags.supported_tags
|
||||
return bool(set(tags).intersection(self.file_tags))
|
||||
|
||||
|
||||
class WheelBuilder(object):
|
||||
"""Build wheels from a RequirementSet."""
|
||||
|
||||
def __init__(self, requirement_set, finder, wheel_dir, build_options=[], global_options=[]):
|
||||
self.requirement_set = requirement_set
|
||||
self.finder = finder
|
||||
self.wheel_dir = normalize_path(wheel_dir)
|
||||
self.build_options = build_options
|
||||
self.global_options = global_options
|
||||
|
||||
def _build_one(self, req):
|
||||
"""Build one wheel."""
|
||||
|
||||
base_args = [
|
||||
sys.executable, '-c',
|
||||
"import setuptools;__file__=%r;"\
|
||||
"exec(compile(open(__file__).read().replace('\\r\\n', '\\n'), __file__, 'exec'))" % req.setup_py] + \
|
||||
list(self.global_options)
|
||||
|
||||
logger.notify('Running setup.py bdist_wheel for %s' % req.name)
|
||||
logger.notify('Destination directory: %s' % self.wheel_dir)
|
||||
wheel_args = base_args + ['bdist_wheel', '-d', self.wheel_dir] + self.build_options
|
||||
try:
|
||||
call_subprocess(wheel_args, cwd=req.source_dir, show_stdout=False)
|
||||
return True
|
||||
except:
|
||||
logger.error('Failed building wheel for %s' % req.name)
|
||||
return False
|
||||
|
||||
def build(self):
|
||||
"""Build wheels."""
|
||||
|
||||
#unpack and constructs req set
|
||||
self.requirement_set.prepare_files(self.finder)
|
||||
|
||||
reqset = self.requirement_set.requirements.values()
|
||||
|
||||
buildset = [req for req in reqset if not req.is_wheel]
|
||||
|
||||
if not buildset:
|
||||
return
|
||||
|
||||
#build the wheels
|
||||
logger.notify(
|
||||
'Building wheels for collected packages: %s' %
|
||||
','.join([req.name for req in buildset])
|
||||
)
|
||||
logger.indent += 2
|
||||
build_success, build_failure = [], []
|
||||
for req in buildset:
|
||||
if self._build_one(req):
|
||||
build_success.append(req)
|
||||
else:
|
||||
build_failure.append(req)
|
||||
logger.indent -= 2
|
||||
|
||||
#notify sucess/failure
|
||||
if build_success:
|
||||
logger.notify('Successfully built %s' % ' '.join([req.name for req in build_success]))
|
||||
if build_failure:
|
||||
logger.notify('Failed to build %s' % ' '.join([req.name for req in build_failure]))
|
||||
Loading…
Add table
Add a link
Reference in a new issue