1# -*- coding: utf-8 -*-
2
3"""
4requests.adapters
5~~~~~~~~~~~~~~~~~
6
7This module contains the transport adapters that Requests uses to define
8and maintain connections.
9"""
10
11import os.path
12import socket
13
14from pip9._vendor.urllib3.poolmanager import PoolManager, proxy_from_url
15from pip9._vendor.urllib3.response import HTTPResponse
16from pip9._vendor.urllib3.util import Timeout as TimeoutSauce
17from pip9._vendor.urllib3.util.retry import Retry
18from pip9._vendor.urllib3.exceptions import ClosedPoolError
19from pip9._vendor.urllib3.exceptions import ConnectTimeoutError
20from pip9._vendor.urllib3.exceptions import HTTPError as _HTTPError
21from pip9._vendor.urllib3.exceptions import MaxRetryError
22from pip9._vendor.urllib3.exceptions import NewConnectionError
23from pip9._vendor.urllib3.exceptions import ProxyError as _ProxyError
24from pip9._vendor.urllib3.exceptions import ProtocolError
25from pip9._vendor.urllib3.exceptions import ReadTimeoutError
26from pip9._vendor.urllib3.exceptions import SSLError as _SSLError
27from pip9._vendor.urllib3.exceptions import ResponseError
28
29from .models import Response
30from .compat import urlparse, basestring
31from .utils import (DEFAULT_CA_BUNDLE_PATH, get_encoding_from_headers,
32                    prepend_scheme_if_needed, get_auth_from_url, urldefragauth,
33                    select_proxy)
34from .structures import CaseInsensitiveDict
35from .cookies import extract_cookies_to_jar
36from .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError,
37                         ProxyError, RetryError, InvalidSchema)
38from .auth import _basic_auth_str
39
40try:
41    from pip9._vendor.urllib3.contrib.socks import SOCKSProxyManager
42except ImportError:
43    def SOCKSProxyManager(*args, **kwargs):
44        raise InvalidSchema("Missing dependencies for SOCKS support.")
45
46DEFAULT_POOLBLOCK = False
47DEFAULT_POOLSIZE = 10
48DEFAULT_RETRIES = 0
49DEFAULT_POOL_TIMEOUT = None
50
51
52class BaseAdapter(object):
53    """The Base Transport Adapter"""
54
55    def __init__(self):
56        super(BaseAdapter, self).__init__()
57
58    def send(self, request, stream=False, timeout=None, verify=True,
59             cert=None, proxies=None):
60        """Sends PreparedRequest object. Returns Response object.
61
62        :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
63        :param stream: (optional) Whether to stream the request content.
64        :param timeout: (optional) How long to wait for the server to send
65            data before giving up, as a float, or a :ref:`(connect timeout,
66            read timeout) <timeouts>` tuple.
67        :type timeout: float or tuple
68        :param verify: (optional) Either a boolean, in which case it controls whether we verify
69            the server's TLS certificate, or a string, in which case it must be a path
70            to a CA bundle to use
71        :param cert: (optional) Any user-provided SSL certificate to be trusted.
72        :param proxies: (optional) The proxies dictionary to apply to the request.
73        """
74        raise NotImplementedError
75
76    def close(self):
77        """Cleans up adapter specific items."""
78        raise NotImplementedError
79
80
81class HTTPAdapter(BaseAdapter):
82    """The built-in HTTP Adapter for urllib3.
83
84    Provides a general-case interface for Requests sessions to contact HTTP and
85    HTTPS urls by implementing the Transport Adapter interface. This class will
86    usually be created by the :class:`Session <Session>` class under the
87    covers.
88
89    :param pool_connections: The number of urllib3 connection pools to cache.
90    :param pool_maxsize: The maximum number of connections to save in the pool.
91    :param max_retries: The maximum number of retries each connection
92        should attempt. Note, this applies only to failed DNS lookups, socket
93        connections and connection timeouts, never to requests where data has
94        made it to the server. By default, Requests does not retry failed
95        connections. If you need granular control over the conditions under
96        which we retry a request, import urllib3's ``Retry`` class and pass
97        that instead.
98    :param pool_block: Whether the connection pool should block for connections.
99
100    Usage::
101
102      >>> import requests
103      >>> s = requests.Session()
104      >>> a = requests.adapters.HTTPAdapter(max_retries=3)
105      >>> s.mount('http://', a)
106    """
107    __attrs__ = ['max_retries', 'config', '_pool_connections', '_pool_maxsize',
108                 '_pool_block']
109
110    def __init__(self, pool_connections=DEFAULT_POOLSIZE,
111                 pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES,
112                 pool_block=DEFAULT_POOLBLOCK):
113        if max_retries == DEFAULT_RETRIES:
114            self.max_retries = Retry(0, read=False)
115        else:
116            self.max_retries = Retry.from_int(max_retries)
117        self.config = {}
118        self.proxy_manager = {}
119
120        super(HTTPAdapter, self).__init__()
121
122        self._pool_connections = pool_connections
123        self._pool_maxsize = pool_maxsize
124        self._pool_block = pool_block
125
126        self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block)
127
128    def __getstate__(self):
129        return dict((attr, getattr(self, attr, None)) for attr in
130                    self.__attrs__)
131
132    def __setstate__(self, state):
133        # Can't handle by adding 'proxy_manager' to self.__attrs__ because
134        # self.poolmanager uses a lambda function, which isn't pickleable.
135        self.proxy_manager = {}
136        self.config = {}
137
138        for attr, value in state.items():
139            setattr(self, attr, value)
140
141        self.init_poolmanager(self._pool_connections, self._pool_maxsize,
142                              block=self._pool_block)
143
144    def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs):
145        """Initializes a urllib3 PoolManager.
146
147        This method should not be called from user code, and is only
148        exposed for use when subclassing the
149        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
150
151        :param connections: The number of urllib3 connection pools to cache.
152        :param maxsize: The maximum number of connections to save in the pool.
153        :param block: Block when no free connections are available.
154        :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager.
155        """
156        # save these values for pickling
157        self._pool_connections = connections
158        self._pool_maxsize = maxsize
159        self._pool_block = block
160
161        self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize,
162                                       block=block, strict=True, **pool_kwargs)
163
164    def proxy_manager_for(self, proxy, **proxy_kwargs):
165        """Return urllib3 ProxyManager for the given proxy.
166
167        This method should not be called from user code, and is only
168        exposed for use when subclassing the
169        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
170
171        :param proxy: The proxy to return a urllib3 ProxyManager for.
172        :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager.
173        :returns: ProxyManager
174        :rtype: urllib3.ProxyManager
175        """
176        if proxy in self.proxy_manager:
177            manager = self.proxy_manager[proxy]
178        elif proxy.lower().startswith('socks'):
179            username, password = get_auth_from_url(proxy)
180            manager = self.proxy_manager[proxy] = SOCKSProxyManager(
181                proxy,
182                username=username,
183                password=password,
184                num_pools=self._pool_connections,
185                maxsize=self._pool_maxsize,
186                block=self._pool_block,
187                **proxy_kwargs
188            )
189        else:
190            proxy_headers = self.proxy_headers(proxy)
191            manager = self.proxy_manager[proxy] = proxy_from_url(
192                proxy,
193                proxy_headers=proxy_headers,
194                num_pools=self._pool_connections,
195                maxsize=self._pool_maxsize,
196                block=self._pool_block,
197                **proxy_kwargs)
198
199        return manager
200
201    def cert_verify(self, conn, url, verify, cert):
202        """Verify a SSL certificate. This method should not be called from user
203        code, and is only exposed for use when subclassing the
204        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
205
206        :param conn: The urllib3 connection object associated with the cert.
207        :param url: The requested URL.
208        :param verify: Either a boolean, in which case it controls whether we verify
209            the server's TLS certificate, or a string, in which case it must be a path
210            to a CA bundle to use
211        :param cert: The SSL certificate to verify.
212        """
213        if url.lower().startswith('https') and verify:
214
215            cert_loc = None
216
217            # Allow self-specified cert location.
218            if verify is not True:
219                cert_loc = verify
220
221            if not cert_loc:
222                cert_loc = DEFAULT_CA_BUNDLE_PATH
223
224            if not cert_loc or not os.path.exists(cert_loc):
225                raise IOError("Could not find a suitable TLS CA certificate bundle, "
226                              "invalid path: {0}".format(cert_loc))
227
228            conn.cert_reqs = 'CERT_REQUIRED'
229
230            if not os.path.isdir(cert_loc):
231                conn.ca_certs = cert_loc
232            else:
233                conn.ca_cert_dir = cert_loc
234        else:
235            conn.cert_reqs = 'CERT_NONE'
236            conn.ca_certs = None
237            conn.ca_cert_dir = None
238
239        if cert:
240            if not isinstance(cert, basestring):
241                conn.cert_file = cert[0]
242                conn.key_file = cert[1]
243            else:
244                conn.cert_file = cert
245                conn.key_file = None
246            if conn.cert_file and not os.path.exists(conn.cert_file):
247                raise IOError("Could not find the TLS certificate file, "
248                              "invalid path: {0}".format(conn.cert_file))
249            if conn.key_file and not os.path.exists(conn.key_file):
250                raise IOError("Could not find the TLS key file, "
251                              "invalid path: {0}".format(conn.key_file))
252
253    def build_response(self, req, resp):
254        """Builds a :class:`Response <requests.Response>` object from a urllib3
255        response. This should not be called from user code, and is only exposed
256        for use when subclassing the
257        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`
258
259        :param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response.
260        :param resp: The urllib3 response object.
261        :rtype: requests.Response
262        """
263        response = Response()
264
265        # Fallback to None if there's no status_code, for whatever reason.
266        response.status_code = getattr(resp, 'status', None)
267
268        # Make headers case-insensitive.
269        response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {}))
270
271        # Set encoding.
272        response.encoding = get_encoding_from_headers(response.headers)
273        response.raw = resp
274        response.reason = response.raw.reason
275
276        if isinstance(req.url, bytes):
277            response.url = req.url.decode('utf-8')
278        else:
279            response.url = req.url
280
281        # Add new cookies from the server.
282        extract_cookies_to_jar(response.cookies, req, resp)
283
284        # Give the Response some context.
285        response.request = req
286        response.connection = self
287
288        return response
289
290    def get_connection(self, url, proxies=None):
291        """Returns a urllib3 connection for the given URL. This should not be
292        called from user code, and is only exposed for use when subclassing the
293        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
294
295        :param url: The URL to connect to.
296        :param proxies: (optional) A Requests-style dictionary of proxies used on this request.
297        :rtype: urllib3.ConnectionPool
298        """
299        proxy = select_proxy(url, proxies)
300
301        if proxy:
302            proxy = prepend_scheme_if_needed(proxy, 'http')
303            proxy_manager = self.proxy_manager_for(proxy)
304            conn = proxy_manager.connection_from_url(url)
305        else:
306            # Only scheme should be lower case
307            parsed = urlparse(url)
308            url = parsed.geturl()
309            conn = self.poolmanager.connection_from_url(url)
310
311        return conn
312
313    def close(self):
314        """Disposes of any internal state.
315
316        Currently, this closes the PoolManager and any active ProxyManager,
317        which closes any pooled connections.
318        """
319        self.poolmanager.clear()
320        for proxy in self.proxy_manager.values():
321            proxy.clear()
322
323    def request_url(self, request, proxies):
324        """Obtain the url to use when making the final request.
325
326        If the message is being sent through a HTTP proxy, the full URL has to
327        be used. Otherwise, we should only use the path portion of the URL.
328
329        This should not be called from user code, and is only exposed for use
330        when subclassing the
331        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
332
333        :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
334        :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs.
335        :rtype: str
336        """
337        proxy = select_proxy(request.url, proxies)
338        scheme = urlparse(request.url).scheme
339
340        is_proxied_http_request = (proxy and scheme != 'https')
341        using_socks_proxy = False
342        if proxy:
343            proxy_scheme = urlparse(proxy).scheme.lower()
344            using_socks_proxy = proxy_scheme.startswith('socks')
345
346        url = request.path_url
347        if is_proxied_http_request and not using_socks_proxy:
348            url = urldefragauth(request.url)
349
350        return url
351
352    def add_headers(self, request, **kwargs):
353        """Add any headers needed by the connection. As of v2.0 this does
354        nothing by default, but is left for overriding by users that subclass
355        the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
356
357        This should not be called from user code, and is only exposed for use
358        when subclassing the
359        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
360
361        :param request: The :class:`PreparedRequest <PreparedRequest>` to add headers to.
362        :param kwargs: The keyword arguments from the call to send().
363        """
364        pass
365
366    def proxy_headers(self, proxy):
367        """Returns a dictionary of the headers to add to any request sent
368        through a proxy. This works with urllib3 magic to ensure that they are
369        correctly sent to the proxy, rather than in a tunnelled request if
370        CONNECT is being used.
371
372        This should not be called from user code, and is only exposed for use
373        when subclassing the
374        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
375
376        :param proxies: The url of the proxy being used for this request.
377        :rtype: dict
378        """
379        headers = {}
380        username, password = get_auth_from_url(proxy)
381
382        if username:
383            headers['Proxy-Authorization'] = _basic_auth_str(username,
384                                                             password)
385
386        return headers
387
388    def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None):
389        """Sends PreparedRequest object. Returns Response object.
390
391        :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
392        :param stream: (optional) Whether to stream the request content.
393        :param timeout: (optional) How long to wait for the server to send
394            data before giving up, as a float, or a :ref:`(connect timeout,
395            read timeout) <timeouts>` tuple.
396        :type timeout: float or tuple or urllib3 Timeout object
397        :param verify: (optional) Either a boolean, in which case it controls whether
398            we verify the server's TLS certificate, or a string, in which case it
399            must be a path to a CA bundle to use
400        :param cert: (optional) Any user-provided SSL certificate to be trusted.
401        :param proxies: (optional) The proxies dictionary to apply to the request.
402        :rtype: requests.Response
403        """
404
405        conn = self.get_connection(request.url, proxies)
406
407        self.cert_verify(conn, request.url, verify, cert)
408        url = self.request_url(request, proxies)
409        self.add_headers(request)
410
411        chunked = not (request.body is None or 'Content-Length' in request.headers)
412
413        if isinstance(timeout, tuple):
414            try:
415                connect, read = timeout
416                timeout = TimeoutSauce(connect=connect, read=read)
417            except ValueError as e:
418                # this may raise a string formatting error.
419                err = ("Invalid timeout {0}. Pass a (connect, read) "
420                       "timeout tuple, or a single float to set "
421                       "both timeouts to the same value".format(timeout))
422                raise ValueError(err)
423        elif isinstance(timeout, TimeoutSauce):
424            pass
425        else:
426            timeout = TimeoutSauce(connect=timeout, read=timeout)
427
428        try:
429            if not chunked:
430                resp = conn.urlopen(
431                    method=request.method,
432                    url=url,
433                    body=request.body,
434                    headers=request.headers,
435                    redirect=False,
436                    assert_same_host=False,
437                    preload_content=False,
438                    decode_content=False,
439                    retries=self.max_retries,
440                    timeout=timeout
441                )
442
443            # Send the request.
444            else:
445                if hasattr(conn, 'proxy_pool'):
446                    conn = conn.proxy_pool
447
448                low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT)
449
450                try:
451                    low_conn.putrequest(request.method,
452                                        url,
453                                        skip_accept_encoding=True)
454
455                    for header, value in request.headers.items():
456                        low_conn.putheader(header, value)
457
458                    low_conn.endheaders()
459
460                    for i in request.body:
461                        low_conn.send(hex(len(i))[2:].encode('utf-8'))
462                        low_conn.send(b'\r\n')
463                        low_conn.send(i)
464                        low_conn.send(b'\r\n')
465                    low_conn.send(b'0\r\n\r\n')
466
467                    # Receive the response from the server
468                    try:
469                        # For Python 2.7+ versions, use buffering of HTTP
470                        # responses
471                        r = low_conn.getresponse(buffering=True)
472                    except TypeError:
473                        # For compatibility with Python 2.6 versions and back
474                        r = low_conn.getresponse()
475
476                    resp = HTTPResponse.from_httplib(
477                        r,
478                        pool=conn,
479                        connection=low_conn,
480                        preload_content=False,
481                        decode_content=False
482                    )
483                except:
484                    # If we hit any problems here, clean up the connection.
485                    # Then, reraise so that we can handle the actual exception.
486                    low_conn.close()
487                    raise
488
489        except (ProtocolError, socket.error) as err:
490            raise ConnectionError(err, request=request)
491
492        except MaxRetryError as e:
493            if isinstance(e.reason, ConnectTimeoutError):
494                # TODO: Remove this in 3.0.0: see #2811
495                if not isinstance(e.reason, NewConnectionError):
496                    raise ConnectTimeout(e, request=request)
497
498            if isinstance(e.reason, ResponseError):
499                raise RetryError(e, request=request)
500
501            if isinstance(e.reason, _ProxyError):
502                raise ProxyError(e, request=request)
503
504            if isinstance(e.reason, _SSLError):
505                # This branch is for urllib3 v1.22 and later.
506                raise SSLError(e, request=request)
507
508            raise ConnectionError(e, request=request)
509
510        except ClosedPoolError as e:
511            raise ConnectionError(e, request=request)
512
513        except _ProxyError as e:
514            raise ProxyError(e)
515
516        except (_SSLError, _HTTPError) as e:
517            if isinstance(e, _SSLError):
518                # This branch is for urllib3 versions earlier than v1.22
519                raise SSLError(e, request=request)
520            elif isinstance(e, ReadTimeoutError):
521                raise ReadTimeout(e, request=request)
522            else:
523                raise
524
525        return self.build_response(request, resp)
526