1import os.path 2import logging 3import socket 4from base64 import b64encode 5 6from urllib3 import PoolManager, ProxyManager, proxy_from_url, Timeout 7from urllib3.util.retry import Retry 8from urllib3.util.ssl_ import ( 9 ssl, OP_NO_SSLv2, OP_NO_SSLv3, OP_NO_COMPRESSION, DEFAULT_CIPHERS, 10) 11from urllib3.exceptions import SSLError as URLLib3SSLError 12from urllib3.exceptions import ReadTimeoutError as URLLib3ReadTimeoutError 13from urllib3.exceptions import ConnectTimeoutError as URLLib3ConnectTimeoutError 14from urllib3.exceptions import NewConnectionError, ProtocolError, ProxyError 15try: 16 # Always import the original SSLContext, even if it has been patched 17 from urllib3.contrib.pyopenssl import orig_util_SSLContext as SSLContext 18except ImportError: 19 from urllib3.util.ssl_ import SSLContext 20 21import botocore.awsrequest 22from botocore.vendored import six 23from botocore.vendored.six.moves.urllib_parse import unquote 24from botocore.compat import filter_ssl_warnings, urlparse 25from botocore.exceptions import ( 26 ConnectionClosedError, EndpointConnectionError, HTTPClientError, 27 ReadTimeoutError, ProxyConnectionError, ConnectTimeoutError, SSLError 28) 29 30filter_ssl_warnings() 31logger = logging.getLogger(__name__) 32DEFAULT_TIMEOUT = 60 33MAX_POOL_CONNECTIONS = 10 34DEFAULT_CA_BUNDLE = os.path.join(os.path.dirname(__file__), 'cacert.pem') 35 36try: 37 from certifi import where 38except ImportError: 39 def where(): 40 return DEFAULT_CA_BUNDLE 41 42 43def get_cert_path(verify): 44 if verify is not True: 45 return verify 46 47 return where() 48 49 50def create_urllib3_context(ssl_version=None, cert_reqs=None, 51 options=None, ciphers=None): 52 """ This function is a vendored version of the same function in urllib3 53 54 We vendor this function to ensure that the SSL contexts we construct 55 always use the std lib SSLContext instead of pyopenssl. 56 """ 57 context = SSLContext(ssl_version or ssl.PROTOCOL_SSLv23) 58 59 # Setting the default here, as we may have no ssl module on import 60 cert_reqs = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqs 61 62 if options is None: 63 options = 0 64 # SSLv2 is easily broken and is considered harmful and dangerous 65 options |= OP_NO_SSLv2 66 # SSLv3 has several problems and is now dangerous 67 options |= OP_NO_SSLv3 68 # Disable compression to prevent CRIME attacks for OpenSSL 1.0+ 69 # (issue urllib3#309) 70 options |= OP_NO_COMPRESSION 71 72 context.options |= options 73 74 if getattr(context, 'supports_set_ciphers', True): 75 # Platform-specific: Python 2.6 76 context.set_ciphers(ciphers or DEFAULT_CIPHERS) 77 78 context.verify_mode = cert_reqs 79 if getattr(context, 'check_hostname', None) is not None: 80 # Platform-specific: Python 3.2 81 # We do our own verification, including fingerprints and alternative 82 # hostnames. So disable it here 83 context.check_hostname = False 84 return context 85 86 87class ProxyConfiguration(object): 88 """Represents a proxy configuration dictionary. 89 90 This class represents a proxy configuration dictionary and provides utility 91 functions to retreive well structured proxy urls and proxy headers from the 92 proxy configuration dictionary. 93 """ 94 def __init__(self, proxies=None): 95 if proxies is None: 96 proxies = {} 97 self._proxies = proxies 98 99 def proxy_url_for(self, url): 100 """Retrieves the corresponding proxy url for a given url. """ 101 parsed_url = urlparse(url) 102 proxy = self._proxies.get(parsed_url.scheme) 103 if proxy: 104 proxy = self._fix_proxy_url(proxy) 105 return proxy 106 107 def proxy_headers_for(self, proxy_url): 108 """Retrieves the corresponding proxy headers for a given proxy url. """ 109 headers = {} 110 username, password = self._get_auth_from_url(proxy_url) 111 if username and password: 112 basic_auth = self._construct_basic_auth(username, password) 113 headers['Proxy-Authorization'] = basic_auth 114 return headers 115 116 def _fix_proxy_url(self, proxy_url): 117 if proxy_url.startswith('http:') or proxy_url.startswith('https:'): 118 return proxy_url 119 elif proxy_url.startswith('//'): 120 return 'http:' + proxy_url 121 else: 122 return 'http://' + proxy_url 123 124 def _construct_basic_auth(self, username, password): 125 auth_str = '{0}:{1}'.format(username, password) 126 encoded_str = b64encode(auth_str.encode('ascii')).strip().decode() 127 return 'Basic {0}'.format(encoded_str) 128 129 def _get_auth_from_url(self, url): 130 parsed_url = urlparse(url) 131 try: 132 return unquote(parsed_url.username), unquote(parsed_url.password) 133 except (AttributeError, TypeError): 134 return None, None 135 136 137class URLLib3Session(object): 138 """A basic HTTP client that supports connection pooling and proxies. 139 140 This class is inspired by requests.adapters.HTTPAdapter, but has been 141 boiled down to meet the use cases needed by botocore. For the most part 142 this classes matches the functionality of HTTPAdapter in requests v2.7.0 143 (the same as our vendored version). The only major difference of note is 144 that we currently do not support sending chunked requests. While requests 145 v2.7.0 implemented this themselves, later version urllib3 support this 146 directly via a flag to urlopen so enabling it if needed should be trivial. 147 """ 148 def __init__(self, 149 verify=True, 150 proxies=None, 151 timeout=None, 152 max_pool_connections=MAX_POOL_CONNECTIONS, 153 socket_options=None, 154 client_cert=None, 155 ): 156 self._verify = verify 157 self._proxy_config = ProxyConfiguration(proxies=proxies) 158 self._pool_classes_by_scheme = { 159 'http': botocore.awsrequest.AWSHTTPConnectionPool, 160 'https': botocore.awsrequest.AWSHTTPSConnectionPool, 161 } 162 if timeout is None: 163 timeout = DEFAULT_TIMEOUT 164 if not isinstance(timeout, (int, float)): 165 timeout = Timeout(connect=timeout[0], read=timeout[1]) 166 167 self._cert_file = None 168 self._key_file = None 169 if isinstance(client_cert, str): 170 self._cert_file = client_cert 171 elif isinstance(client_cert, tuple): 172 self._cert_file, self._key_file = client_cert 173 174 self._timeout = timeout 175 self._max_pool_connections = max_pool_connections 176 self._socket_options = socket_options 177 if socket_options is None: 178 self._socket_options = [] 179 self._proxy_managers = {} 180 self._manager = PoolManager(**self._get_pool_manager_kwargs()) 181 self._manager.pool_classes_by_scheme = self._pool_classes_by_scheme 182 183 def _get_pool_manager_kwargs(self, **extra_kwargs): 184 pool_manager_kwargs = { 185 'strict': True, 186 'timeout': self._timeout, 187 'maxsize': self._max_pool_connections, 188 'ssl_context': self._get_ssl_context(), 189 'socket_options': self._socket_options, 190 'cert_file': self._cert_file, 191 'key_file': self._key_file, 192 } 193 pool_manager_kwargs.update(**extra_kwargs) 194 return pool_manager_kwargs 195 196 def _get_ssl_context(self): 197 return create_urllib3_context() 198 199 def _get_proxy_manager(self, proxy_url): 200 if proxy_url not in self._proxy_managers: 201 proxy_headers = self._proxy_config.proxy_headers_for(proxy_url) 202 proxy_manager_kwargs = self._get_pool_manager_kwargs( 203 proxy_headers=proxy_headers) 204 proxy_manager = proxy_from_url(proxy_url, **proxy_manager_kwargs) 205 proxy_manager.pool_classes_by_scheme = self._pool_classes_by_scheme 206 self._proxy_managers[proxy_url] = proxy_manager 207 208 return self._proxy_managers[proxy_url] 209 210 def _path_url(self, url): 211 parsed_url = urlparse(url) 212 path = parsed_url.path 213 if not path: 214 path = '/' 215 if parsed_url.query: 216 path = path + '?' + parsed_url.query 217 return path 218 219 def _setup_ssl_cert(self, conn, url, verify): 220 if url.lower().startswith('https') and verify: 221 conn.cert_reqs = 'CERT_REQUIRED' 222 conn.ca_certs = get_cert_path(verify) 223 else: 224 conn.cert_reqs = 'CERT_NONE' 225 conn.ca_certs = None 226 227 def _get_connection_manager(self, url, proxy_url=None): 228 if proxy_url: 229 manager = self._get_proxy_manager(proxy_url) 230 else: 231 manager = self._manager 232 return manager 233 234 def _get_request_target(self, url, proxy_url): 235 if proxy_url and url.startswith('http:'): 236 # HTTP proxies expect the request_target to be the absolute url to 237 # know which host to establish a connection to 238 return url 239 else: 240 # otherwise just set the request target to the url path 241 return self._path_url(url) 242 243 def _chunked(self, headers): 244 return headers.get('Transfer-Encoding', '') == 'chunked' 245 246 def send(self, request): 247 try: 248 proxy_url = self._proxy_config.proxy_url_for(request.url) 249 manager = self._get_connection_manager(request.url, proxy_url) 250 conn = manager.connection_from_url(request.url) 251 self._setup_ssl_cert(conn, request.url, self._verify) 252 253 request_target = self._get_request_target(request.url, proxy_url) 254 urllib_response = conn.urlopen( 255 method=request.method, 256 url=request_target, 257 body=request.body, 258 headers=request.headers, 259 retries=Retry(False), 260 assert_same_host=False, 261 preload_content=False, 262 decode_content=False, 263 chunked=self._chunked(request.headers), 264 ) 265 266 http_response = botocore.awsrequest.AWSResponse( 267 request.url, 268 urllib_response.status, 269 urllib_response.headers, 270 urllib_response, 271 ) 272 273 if not request.stream_output: 274 # Cause the raw stream to be exhausted immediately. We do it 275 # this way instead of using preload_content because 276 # preload_content will never buffer chunked responses 277 http_response.content 278 279 return http_response 280 except URLLib3SSLError as e: 281 raise SSLError(endpoint_url=request.url, error=e) 282 except (NewConnectionError, socket.gaierror) as e: 283 raise EndpointConnectionError(endpoint_url=request.url, error=e) 284 except ProxyError as e: 285 raise ProxyConnectionError(proxy_url=proxy_url, error=e) 286 except URLLib3ConnectTimeoutError as e: 287 raise ConnectTimeoutError(endpoint_url=request.url, error=e) 288 except URLLib3ReadTimeoutError as e: 289 raise ReadTimeoutError(endpoint_url=request.url, error=e) 290 except ProtocolError as e: 291 raise ConnectionClosedError( 292 error=e, 293 request=request, 294 endpoint_url=request.url 295 ) 296 except Exception as e: 297 message = 'Exception received when sending urllib3 HTTP request' 298 logger.debug(message, exc_info=True) 299 raise HTTPClientError(error=e) 300