2013-10-11 17:28:32 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
|
|
|
"""
|
|
|
|
requests.adapters
|
|
|
|
~~~~~~~~~~~~~~~~~
|
|
|
|
|
|
|
|
This module contains the transport adapters that Requests uses to define
|
|
|
|
and maintain connections.
|
|
|
|
"""
|
|
|
|
|
2016-02-23 06:06:55 +00:00
|
|
|
import os.path
|
2013-10-11 17:28:32 +00:00
|
|
|
import socket
|
|
|
|
|
2019-01-13 08:01:53 +00:00
|
|
|
from urllib3.poolmanager import PoolManager, proxy_from_url
|
|
|
|
from urllib3.response import HTTPResponse
|
|
|
|
from urllib3.util import parse_url
|
|
|
|
from urllib3.util import Timeout as TimeoutSauce
|
|
|
|
from urllib3.util.retry import Retry
|
|
|
|
from urllib3.exceptions import ClosedPoolError
|
|
|
|
from urllib3.exceptions import ConnectTimeoutError
|
|
|
|
from urllib3.exceptions import HTTPError as _HTTPError
|
|
|
|
from urllib3.exceptions import MaxRetryError
|
|
|
|
from urllib3.exceptions import NewConnectionError
|
|
|
|
from urllib3.exceptions import ProxyError as _ProxyError
|
|
|
|
from urllib3.exceptions import ProtocolError
|
|
|
|
from urllib3.exceptions import ReadTimeoutError
|
|
|
|
from urllib3.exceptions import SSLError as _SSLError
|
|
|
|
from urllib3.exceptions import ResponseError
|
|
|
|
from urllib3.exceptions import LocationValueError
|
|
|
|
|
2013-10-11 17:28:32 +00:00
|
|
|
from .models import Response
|
2016-02-23 06:06:55 +00:00
|
|
|
from .compat import urlparse, basestring
|
2019-01-13 08:01:53 +00:00
|
|
|
from .utils import (DEFAULT_CA_BUNDLE_PATH, extract_zipped_paths,
|
|
|
|
get_encoding_from_headers, prepend_scheme_if_needed,
|
|
|
|
get_auth_from_url, urldefragauth, select_proxy)
|
2013-10-11 17:28:32 +00:00
|
|
|
from .structures import CaseInsensitiveDict
|
|
|
|
from .cookies import extract_cookies_to_jar
|
2016-02-23 06:06:55 +00:00
|
|
|
from .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError,
|
2019-01-13 08:01:53 +00:00
|
|
|
ProxyError, RetryError, InvalidSchema, InvalidProxyURL,
|
|
|
|
InvalidURL)
|
2013-10-11 17:28:32 +00:00
|
|
|
from .auth import _basic_auth_str
|
|
|
|
|
2019-01-13 08:01:53 +00:00
|
|
|
try:
|
|
|
|
from urllib3.contrib.socks import SOCKSProxyManager
|
|
|
|
except ImportError:
|
|
|
|
def SOCKSProxyManager(*args, **kwargs):
|
|
|
|
raise InvalidSchema("Missing dependencies for SOCKS support.")
|
|
|
|
|
2013-10-11 17:28:32 +00:00
|
|
|
DEFAULT_POOLBLOCK = False
|
|
|
|
DEFAULT_POOLSIZE = 10
|
|
|
|
DEFAULT_RETRIES = 0
|
2016-02-23 06:06:55 +00:00
|
|
|
DEFAULT_POOL_TIMEOUT = None
|
2013-10-11 17:28:32 +00:00
|
|
|
|
|
|
|
|
|
|
|
class BaseAdapter(object):
|
|
|
|
"""The Base Transport Adapter"""
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
super(BaseAdapter, self).__init__()
|
|
|
|
|
2019-01-13 08:01:53 +00:00
|
|
|
def send(self, request, stream=False, timeout=None, verify=True,
|
|
|
|
cert=None, proxies=None):
|
|
|
|
"""Sends PreparedRequest object. Returns Response object.
|
|
|
|
|
|
|
|
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
|
|
|
|
:param stream: (optional) Whether to stream the request content.
|
|
|
|
:param timeout: (optional) How long to wait for the server to send
|
|
|
|
data before giving up, as a float, or a :ref:`(connect timeout,
|
|
|
|
read timeout) <timeouts>` tuple.
|
|
|
|
:type timeout: float or tuple
|
|
|
|
:param verify: (optional) Either a boolean, in which case it controls whether we verify
|
|
|
|
the server's TLS certificate, or a string, in which case it must be a path
|
|
|
|
to a CA bundle to use
|
|
|
|
:param cert: (optional) Any user-provided SSL certificate to be trusted.
|
|
|
|
:param proxies: (optional) The proxies dictionary to apply to the request.
|
|
|
|
"""
|
2013-10-11 17:28:32 +00:00
|
|
|
raise NotImplementedError
|
|
|
|
|
|
|
|
def close(self):
|
2019-01-13 08:01:53 +00:00
|
|
|
"""Cleans up adapter specific items."""
|
2013-10-11 17:28:32 +00:00
|
|
|
raise NotImplementedError
|
|
|
|
|
|
|
|
|
|
|
|
class HTTPAdapter(BaseAdapter):
|
|
|
|
"""The built-in HTTP Adapter for urllib3.
|
|
|
|
|
|
|
|
Provides a general-case interface for Requests sessions to contact HTTP and
|
|
|
|
HTTPS urls by implementing the Transport Adapter interface. This class will
|
|
|
|
usually be created by the :class:`Session <Session>` class under the
|
|
|
|
covers.
|
|
|
|
|
|
|
|
:param pool_connections: The number of urllib3 connection pools to cache.
|
|
|
|
:param pool_maxsize: The maximum number of connections to save in the pool.
|
2019-01-13 08:01:53 +00:00
|
|
|
:param max_retries: The maximum number of retries each connection
|
2016-02-23 06:06:55 +00:00
|
|
|
should attempt. Note, this applies only to failed DNS lookups, socket
|
|
|
|
connections and connection timeouts, never to requests where data has
|
|
|
|
made it to the server. By default, Requests does not retry failed
|
|
|
|
connections. If you need granular control over the conditions under
|
|
|
|
which we retry a request, import urllib3's ``Retry`` class and pass
|
|
|
|
that instead.
|
2013-10-11 17:28:32 +00:00
|
|
|
:param pool_block: Whether the connection pool should block for connections.
|
|
|
|
|
|
|
|
Usage::
|
|
|
|
|
|
|
|
>>> import requests
|
|
|
|
>>> s = requests.Session()
|
|
|
|
>>> a = requests.adapters.HTTPAdapter(max_retries=3)
|
|
|
|
>>> s.mount('http://', a)
|
|
|
|
"""
|
|
|
|
__attrs__ = ['max_retries', 'config', '_pool_connections', '_pool_maxsize',
|
|
|
|
'_pool_block']
|
|
|
|
|
|
|
|
def __init__(self, pool_connections=DEFAULT_POOLSIZE,
|
|
|
|
pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES,
|
|
|
|
pool_block=DEFAULT_POOLBLOCK):
|
2016-02-23 06:06:55 +00:00
|
|
|
if max_retries == DEFAULT_RETRIES:
|
|
|
|
self.max_retries = Retry(0, read=False)
|
|
|
|
else:
|
|
|
|
self.max_retries = Retry.from_int(max_retries)
|
2013-10-11 17:28:32 +00:00
|
|
|
self.config = {}
|
|
|
|
self.proxy_manager = {}
|
|
|
|
|
|
|
|
super(HTTPAdapter, self).__init__()
|
|
|
|
|
|
|
|
self._pool_connections = pool_connections
|
|
|
|
self._pool_maxsize = pool_maxsize
|
|
|
|
self._pool_block = pool_block
|
|
|
|
|
|
|
|
self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block)
|
|
|
|
|
|
|
|
def __getstate__(self):
|
2019-01-13 08:01:53 +00:00
|
|
|
return {attr: getattr(self, attr, None) for attr in self.__attrs__}
|
2013-10-11 17:28:32 +00:00
|
|
|
|
|
|
|
def __setstate__(self, state):
|
|
|
|
# Can't handle by adding 'proxy_manager' to self.__attrs__ because
|
2016-02-23 06:06:55 +00:00
|
|
|
# self.poolmanager uses a lambda function, which isn't pickleable.
|
2013-10-11 17:28:32 +00:00
|
|
|
self.proxy_manager = {}
|
|
|
|
self.config = {}
|
|
|
|
|
|
|
|
for attr, value in state.items():
|
|
|
|
setattr(self, attr, value)
|
|
|
|
|
|
|
|
self.init_poolmanager(self._pool_connections, self._pool_maxsize,
|
|
|
|
block=self._pool_block)
|
|
|
|
|
2016-02-23 06:06:55 +00:00
|
|
|
def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs):
|
|
|
|
"""Initializes a urllib3 PoolManager.
|
|
|
|
|
|
|
|
This method should not be called from user code, and is only
|
|
|
|
exposed for use when subclassing the
|
2013-10-11 17:28:32 +00:00
|
|
|
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
|
|
|
|
|
|
|
:param connections: The number of urllib3 connection pools to cache.
|
|
|
|
:param maxsize: The maximum number of connections to save in the pool.
|
|
|
|
:param block: Block when no free connections are available.
|
2016-02-23 06:06:55 +00:00
|
|
|
:param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager.
|
2013-10-11 17:28:32 +00:00
|
|
|
"""
|
|
|
|
# save these values for pickling
|
|
|
|
self._pool_connections = connections
|
|
|
|
self._pool_maxsize = maxsize
|
|
|
|
self._pool_block = block
|
|
|
|
|
|
|
|
self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize,
|
2016-02-23 06:06:55 +00:00
|
|
|
block=block, strict=True, **pool_kwargs)
|
|
|
|
|
|
|
|
def proxy_manager_for(self, proxy, **proxy_kwargs):
|
|
|
|
"""Return urllib3 ProxyManager for the given proxy.
|
|
|
|
|
|
|
|
This method should not be called from user code, and is only
|
|
|
|
exposed for use when subclassing the
|
|
|
|
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
|
|
|
|
|
|
|
:param proxy: The proxy to return a urllib3 ProxyManager for.
|
|
|
|
:param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager.
|
|
|
|
:returns: ProxyManager
|
2019-01-13 08:01:53 +00:00
|
|
|
:rtype: urllib3.ProxyManager
|
2016-02-23 06:06:55 +00:00
|
|
|
"""
|
2019-01-13 08:01:53 +00:00
|
|
|
if proxy in self.proxy_manager:
|
|
|
|
manager = self.proxy_manager[proxy]
|
|
|
|
elif proxy.lower().startswith('socks'):
|
|
|
|
username, password = get_auth_from_url(proxy)
|
|
|
|
manager = self.proxy_manager[proxy] = SOCKSProxyManager(
|
|
|
|
proxy,
|
|
|
|
username=username,
|
|
|
|
password=password,
|
|
|
|
num_pools=self._pool_connections,
|
|
|
|
maxsize=self._pool_maxsize,
|
|
|
|
block=self._pool_block,
|
|
|
|
**proxy_kwargs
|
|
|
|
)
|
|
|
|
else:
|
2016-02-23 06:06:55 +00:00
|
|
|
proxy_headers = self.proxy_headers(proxy)
|
2019-01-13 08:01:53 +00:00
|
|
|
manager = self.proxy_manager[proxy] = proxy_from_url(
|
2016-02-23 06:06:55 +00:00
|
|
|
proxy,
|
|
|
|
proxy_headers=proxy_headers,
|
|
|
|
num_pools=self._pool_connections,
|
|
|
|
maxsize=self._pool_maxsize,
|
|
|
|
block=self._pool_block,
|
|
|
|
**proxy_kwargs)
|
|
|
|
|
2019-01-13 08:01:53 +00:00
|
|
|
return manager
|
2013-10-11 17:28:32 +00:00
|
|
|
|
|
|
|
def cert_verify(self, conn, url, verify, cert):
|
|
|
|
"""Verify a SSL certificate. This method should not be called from user
|
|
|
|
code, and is only exposed for use when subclassing the
|
|
|
|
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
|
|
|
|
|
|
|
:param conn: The urllib3 connection object associated with the cert.
|
|
|
|
:param url: The requested URL.
|
2019-01-13 08:01:53 +00:00
|
|
|
:param verify: Either a boolean, in which case it controls whether we verify
|
|
|
|
the server's TLS certificate, or a string, in which case it must be a path
|
|
|
|
to a CA bundle to use
|
2013-10-11 17:28:32 +00:00
|
|
|
:param cert: The SSL certificate to verify.
|
|
|
|
"""
|
|
|
|
if url.lower().startswith('https') and verify:
|
|
|
|
|
|
|
|
cert_loc = None
|
|
|
|
|
|
|
|
# Allow self-specified cert location.
|
|
|
|
if verify is not True:
|
|
|
|
cert_loc = verify
|
|
|
|
|
|
|
|
if not cert_loc:
|
2019-01-13 08:01:53 +00:00
|
|
|
cert_loc = extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH)
|
2013-10-11 17:28:32 +00:00
|
|
|
|
2019-01-13 08:01:53 +00:00
|
|
|
if not cert_loc or not os.path.exists(cert_loc):
|
|
|
|
raise IOError("Could not find a suitable TLS CA certificate bundle, "
|
|
|
|
"invalid path: {}".format(cert_loc))
|
2013-10-11 17:28:32 +00:00
|
|
|
|
|
|
|
conn.cert_reqs = 'CERT_REQUIRED'
|
2016-02-23 06:06:55 +00:00
|
|
|
|
|
|
|
if not os.path.isdir(cert_loc):
|
|
|
|
conn.ca_certs = cert_loc
|
|
|
|
else:
|
|
|
|
conn.ca_cert_dir = cert_loc
|
2013-10-11 17:28:32 +00:00
|
|
|
else:
|
|
|
|
conn.cert_reqs = 'CERT_NONE'
|
|
|
|
conn.ca_certs = None
|
2016-02-23 06:06:55 +00:00
|
|
|
conn.ca_cert_dir = None
|
2013-10-11 17:28:32 +00:00
|
|
|
|
|
|
|
if cert:
|
|
|
|
if not isinstance(cert, basestring):
|
|
|
|
conn.cert_file = cert[0]
|
|
|
|
conn.key_file = cert[1]
|
|
|
|
else:
|
|
|
|
conn.cert_file = cert
|
2019-01-13 08:01:53 +00:00
|
|
|
conn.key_file = None
|
|
|
|
if conn.cert_file and not os.path.exists(conn.cert_file):
|
|
|
|
raise IOError("Could not find the TLS certificate file, "
|
|
|
|
"invalid path: {}".format(conn.cert_file))
|
|
|
|
if conn.key_file and not os.path.exists(conn.key_file):
|
|
|
|
raise IOError("Could not find the TLS key file, "
|
|
|
|
"invalid path: {}".format(conn.key_file))
|
2013-10-11 17:28:32 +00:00
|
|
|
|
|
|
|
def build_response(self, req, resp):
|
|
|
|
"""Builds a :class:`Response <requests.Response>` object from a urllib3
|
|
|
|
response. This should not be called from user code, and is only exposed
|
|
|
|
for use when subclassing the
|
|
|
|
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`
|
|
|
|
|
|
|
|
:param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response.
|
|
|
|
:param resp: The urllib3 response object.
|
2019-01-13 08:01:53 +00:00
|
|
|
:rtype: requests.Response
|
2013-10-11 17:28:32 +00:00
|
|
|
"""
|
|
|
|
response = Response()
|
|
|
|
|
|
|
|
# Fallback to None if there's no status_code, for whatever reason.
|
|
|
|
response.status_code = getattr(resp, 'status', None)
|
|
|
|
|
|
|
|
# Make headers case-insensitive.
|
|
|
|
response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {}))
|
|
|
|
|
|
|
|
# Set encoding.
|
|
|
|
response.encoding = get_encoding_from_headers(response.headers)
|
|
|
|
response.raw = resp
|
|
|
|
response.reason = response.raw.reason
|
|
|
|
|
|
|
|
if isinstance(req.url, bytes):
|
|
|
|
response.url = req.url.decode('utf-8')
|
|
|
|
else:
|
|
|
|
response.url = req.url
|
|
|
|
|
|
|
|
# Add new cookies from the server.
|
|
|
|
extract_cookies_to_jar(response.cookies, req, resp)
|
|
|
|
|
|
|
|
# Give the Response some context.
|
|
|
|
response.request = req
|
|
|
|
response.connection = self
|
|
|
|
|
|
|
|
return response
|
|
|
|
|
|
|
|
def get_connection(self, url, proxies=None):
|
|
|
|
"""Returns a urllib3 connection for the given URL. This should not be
|
|
|
|
called from user code, and is only exposed for use when subclassing the
|
|
|
|
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
|
|
|
|
|
|
|
:param url: The URL to connect to.
|
|
|
|
:param proxies: (optional) A Requests-style dictionary of proxies used on this request.
|
2019-01-13 08:01:53 +00:00
|
|
|
:rtype: urllib3.ConnectionPool
|
2013-10-11 17:28:32 +00:00
|
|
|
"""
|
2016-02-23 06:06:55 +00:00
|
|
|
proxy = select_proxy(url, proxies)
|
2013-10-11 17:28:32 +00:00
|
|
|
|
|
|
|
if proxy:
|
2014-09-05 16:01:36 +00:00
|
|
|
proxy = prepend_scheme_if_needed(proxy, 'http')
|
2019-01-13 08:01:53 +00:00
|
|
|
proxy_url = parse_url(proxy)
|
|
|
|
if not proxy_url.host:
|
|
|
|
raise InvalidProxyURL("Please check proxy URL. It is malformed"
|
|
|
|
" and could be missing the host.")
|
2016-02-23 06:06:55 +00:00
|
|
|
proxy_manager = self.proxy_manager_for(proxy)
|
|
|
|
conn = proxy_manager.connection_from_url(url)
|
2013-10-11 17:28:32 +00:00
|
|
|
else:
|
|
|
|
# Only scheme should be lower case
|
|
|
|
parsed = urlparse(url)
|
|
|
|
url = parsed.geturl()
|
|
|
|
conn = self.poolmanager.connection_from_url(url)
|
|
|
|
|
|
|
|
return conn
|
|
|
|
|
|
|
|
def close(self):
|
|
|
|
"""Disposes of any internal state.
|
|
|
|
|
2019-01-13 08:01:53 +00:00
|
|
|
Currently, this closes the PoolManager and any active ProxyManager,
|
|
|
|
which closes any pooled connections.
|
2013-10-11 17:28:32 +00:00
|
|
|
"""
|
|
|
|
self.poolmanager.clear()
|
2019-01-13 08:01:53 +00:00
|
|
|
for proxy in self.proxy_manager.values():
|
|
|
|
proxy.clear()
|
2013-10-11 17:28:32 +00:00
|
|
|
|
|
|
|
def request_url(self, request, proxies):
|
|
|
|
"""Obtain the url to use when making the final request.
|
|
|
|
|
|
|
|
If the message is being sent through a HTTP proxy, the full URL has to
|
|
|
|
be used. Otherwise, we should only use the path portion of the URL.
|
|
|
|
|
|
|
|
This should not be called from user code, and is only exposed for use
|
|
|
|
when subclassing the
|
|
|
|
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
|
|
|
|
|
|
|
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
|
2016-02-23 06:06:55 +00:00
|
|
|
:param proxies: A dictionary of schemes or schemes and hosts to proxy URLs.
|
2019-01-13 08:01:53 +00:00
|
|
|
:rtype: str
|
2013-10-11 17:28:32 +00:00
|
|
|
"""
|
2016-02-23 06:06:55 +00:00
|
|
|
proxy = select_proxy(request.url, proxies)
|
2013-10-11 17:28:32 +00:00
|
|
|
scheme = urlparse(request.url).scheme
|
2019-01-13 08:01:53 +00:00
|
|
|
|
|
|
|
is_proxied_http_request = (proxy and scheme != 'https')
|
|
|
|
using_socks_proxy = False
|
|
|
|
if proxy:
|
|
|
|
proxy_scheme = urlparse(proxy).scheme.lower()
|
|
|
|
using_socks_proxy = proxy_scheme.startswith('socks')
|
|
|
|
|
|
|
|
url = request.path_url
|
|
|
|
if is_proxied_http_request and not using_socks_proxy:
|
2016-02-23 06:06:55 +00:00
|
|
|
url = urldefragauth(request.url)
|
2013-10-11 17:28:32 +00:00
|
|
|
|
|
|
|
return url
|
|
|
|
|
|
|
|
def add_headers(self, request, **kwargs):
|
|
|
|
"""Add any headers needed by the connection. As of v2.0 this does
|
|
|
|
nothing by default, but is left for overriding by users that subclass
|
|
|
|
the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
|
|
|
|
|
|
|
This should not be called from user code, and is only exposed for use
|
|
|
|
when subclassing the
|
|
|
|
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
|
|
|
|
|
|
|
:param request: The :class:`PreparedRequest <PreparedRequest>` to add headers to.
|
|
|
|
:param kwargs: The keyword arguments from the call to send().
|
|
|
|
"""
|
|
|
|
pass
|
|
|
|
|
|
|
|
def proxy_headers(self, proxy):
|
|
|
|
"""Returns a dictionary of the headers to add to any request sent
|
|
|
|
through a proxy. This works with urllib3 magic to ensure that they are
|
|
|
|
correctly sent to the proxy, rather than in a tunnelled request if
|
|
|
|
CONNECT is being used.
|
|
|
|
|
|
|
|
This should not be called from user code, and is only exposed for use
|
|
|
|
when subclassing the
|
|
|
|
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
|
|
|
|
2019-01-13 08:01:53 +00:00
|
|
|
:param proxy: The url of the proxy being used for this request.
|
|
|
|
:rtype: dict
|
2013-10-11 17:28:32 +00:00
|
|
|
"""
|
|
|
|
headers = {}
|
|
|
|
username, password = get_auth_from_url(proxy)
|
|
|
|
|
2019-01-13 08:01:53 +00:00
|
|
|
if username:
|
2013-10-11 17:28:32 +00:00
|
|
|
headers['Proxy-Authorization'] = _basic_auth_str(username,
|
|
|
|
password)
|
|
|
|
|
|
|
|
return headers
|
|
|
|
|
|
|
|
def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None):
|
|
|
|
"""Sends PreparedRequest object. Returns Response object.
|
|
|
|
|
|
|
|
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
|
|
|
|
:param stream: (optional) Whether to stream the request content.
|
2016-02-23 06:06:55 +00:00
|
|
|
:param timeout: (optional) How long to wait for the server to send
|
|
|
|
data before giving up, as a float, or a :ref:`(connect timeout,
|
|
|
|
read timeout) <timeouts>` tuple.
|
2019-01-13 08:01:53 +00:00
|
|
|
:type timeout: float or tuple or urllib3 Timeout object
|
|
|
|
:param verify: (optional) Either a boolean, in which case it controls whether
|
|
|
|
we verify the server's TLS certificate, or a string, in which case it
|
|
|
|
must be a path to a CA bundle to use
|
2013-10-11 17:28:32 +00:00
|
|
|
:param cert: (optional) Any user-provided SSL certificate to be trusted.
|
|
|
|
:param proxies: (optional) The proxies dictionary to apply to the request.
|
2019-01-13 08:01:53 +00:00
|
|
|
:rtype: requests.Response
|
2013-10-11 17:28:32 +00:00
|
|
|
"""
|
|
|
|
|
2019-01-13 08:01:53 +00:00
|
|
|
try:
|
|
|
|
conn = self.get_connection(request.url, proxies)
|
|
|
|
except LocationValueError as e:
|
|
|
|
raise InvalidURL(e, request=request)
|
2013-10-11 17:28:32 +00:00
|
|
|
|
|
|
|
self.cert_verify(conn, request.url, verify, cert)
|
|
|
|
url = self.request_url(request, proxies)
|
2019-01-13 08:01:53 +00:00
|
|
|
self.add_headers(request, stream=stream, timeout=timeout, verify=verify, cert=cert, proxies=proxies)
|
2013-10-11 17:28:32 +00:00
|
|
|
|
|
|
|
chunked = not (request.body is None or 'Content-Length' in request.headers)
|
|
|
|
|
2016-02-23 06:06:55 +00:00
|
|
|
if isinstance(timeout, tuple):
|
|
|
|
try:
|
|
|
|
connect, read = timeout
|
|
|
|
timeout = TimeoutSauce(connect=connect, read=read)
|
|
|
|
except ValueError as e:
|
|
|
|
# this may raise a string formatting error.
|
2019-01-13 08:01:53 +00:00
|
|
|
err = ("Invalid timeout {}. Pass a (connect, read) "
|
2016-02-23 06:06:55 +00:00
|
|
|
"timeout tuple, or a single float to set "
|
|
|
|
"both timeouts to the same value".format(timeout))
|
|
|
|
raise ValueError(err)
|
2019-01-13 08:01:53 +00:00
|
|
|
elif isinstance(timeout, TimeoutSauce):
|
|
|
|
pass
|
2016-02-23 06:06:55 +00:00
|
|
|
else:
|
|
|
|
timeout = TimeoutSauce(connect=timeout, read=timeout)
|
2013-10-11 17:28:32 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
if not chunked:
|
|
|
|
resp = conn.urlopen(
|
|
|
|
method=request.method,
|
|
|
|
url=url,
|
|
|
|
body=request.body,
|
|
|
|
headers=request.headers,
|
|
|
|
redirect=False,
|
|
|
|
assert_same_host=False,
|
|
|
|
preload_content=False,
|
|
|
|
decode_content=False,
|
|
|
|
retries=self.max_retries,
|
|
|
|
timeout=timeout
|
|
|
|
)
|
|
|
|
|
|
|
|
# Send the request.
|
|
|
|
else:
|
|
|
|
if hasattr(conn, 'proxy_pool'):
|
|
|
|
conn = conn.proxy_pool
|
|
|
|
|
2016-02-23 06:06:55 +00:00
|
|
|
low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT)
|
2013-10-11 17:28:32 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
low_conn.putrequest(request.method,
|
|
|
|
url,
|
|
|
|
skip_accept_encoding=True)
|
|
|
|
|
|
|
|
for header, value in request.headers.items():
|
|
|
|
low_conn.putheader(header, value)
|
|
|
|
|
|
|
|
low_conn.endheaders()
|
|
|
|
|
|
|
|
for i in request.body:
|
|
|
|
low_conn.send(hex(len(i))[2:].encode('utf-8'))
|
|
|
|
low_conn.send(b'\r\n')
|
|
|
|
low_conn.send(i)
|
|
|
|
low_conn.send(b'\r\n')
|
|
|
|
low_conn.send(b'0\r\n\r\n')
|
|
|
|
|
2016-02-23 06:06:55 +00:00
|
|
|
# Receive the response from the server
|
|
|
|
try:
|
2019-01-13 08:01:53 +00:00
|
|
|
# For Python 2.7, use buffering of HTTP responses
|
2016-02-23 06:06:55 +00:00
|
|
|
r = low_conn.getresponse(buffering=True)
|
|
|
|
except TypeError:
|
2019-01-13 08:01:53 +00:00
|
|
|
# For compatibility with Python 3.3+
|
2016-02-23 06:06:55 +00:00
|
|
|
r = low_conn.getresponse()
|
|
|
|
|
2013-10-11 17:28:32 +00:00
|
|
|
resp = HTTPResponse.from_httplib(
|
|
|
|
r,
|
|
|
|
pool=conn,
|
|
|
|
connection=low_conn,
|
|
|
|
preload_content=False,
|
|
|
|
decode_content=False
|
|
|
|
)
|
|
|
|
except:
|
|
|
|
# If we hit any problems here, clean up the connection.
|
|
|
|
# Then, reraise so that we can handle the actual exception.
|
|
|
|
low_conn.close()
|
|
|
|
raise
|
|
|
|
|
2016-02-23 06:06:55 +00:00
|
|
|
except (ProtocolError, socket.error) as err:
|
|
|
|
raise ConnectionError(err, request=request)
|
2013-10-11 17:28:32 +00:00
|
|
|
|
|
|
|
except MaxRetryError as e:
|
2016-02-23 06:06:55 +00:00
|
|
|
if isinstance(e.reason, ConnectTimeoutError):
|
|
|
|
# TODO: Remove this in 3.0.0: see #2811
|
|
|
|
if not isinstance(e.reason, NewConnectionError):
|
|
|
|
raise ConnectTimeout(e, request=request)
|
|
|
|
|
|
|
|
if isinstance(e.reason, ResponseError):
|
|
|
|
raise RetryError(e, request=request)
|
|
|
|
|
2019-01-13 08:01:53 +00:00
|
|
|
if isinstance(e.reason, _ProxyError):
|
|
|
|
raise ProxyError(e, request=request)
|
|
|
|
|
|
|
|
if isinstance(e.reason, _SSLError):
|
|
|
|
# This branch is for urllib3 v1.22 and later.
|
|
|
|
raise SSLError(e, request=request)
|
|
|
|
|
2016-02-23 06:06:55 +00:00
|
|
|
raise ConnectionError(e, request=request)
|
|
|
|
|
|
|
|
except ClosedPoolError as e:
|
2014-09-05 16:01:36 +00:00
|
|
|
raise ConnectionError(e, request=request)
|
2013-10-11 17:28:32 +00:00
|
|
|
|
|
|
|
except _ProxyError as e:
|
|
|
|
raise ProxyError(e)
|
|
|
|
|
|
|
|
except (_SSLError, _HTTPError) as e:
|
|
|
|
if isinstance(e, _SSLError):
|
2019-01-13 08:01:53 +00:00
|
|
|
# This branch is for urllib3 versions earlier than v1.22
|
2014-09-05 16:01:36 +00:00
|
|
|
raise SSLError(e, request=request)
|
2016-02-23 06:06:55 +00:00
|
|
|
elif isinstance(e, ReadTimeoutError):
|
|
|
|
raise ReadTimeout(e, request=request)
|
2013-10-11 17:28:32 +00:00
|
|
|
else:
|
|
|
|
raise
|
|
|
|
|
2014-09-05 16:01:36 +00:00
|
|
|
return self.build_response(request, resp)
|