1r"""HTTP/1.1 client library 2 3<intro stuff goes here> 4<other stuff, too> 5 6HTTPConnection goes through a number of "states", which define when a client 7may legally make another request or fetch the response for a particular 8request. This diagram details these state transitions: 9 10 (null) 11 | 12 | HTTPConnection() 13 v 14 Idle 15 | 16 | putrequest() 17 v 18 Request-started 19 | 20 | ( putheader() )* endheaders() 21 v 22 Request-sent 23 |\_____________________________ 24 | | getresponse() raises 25 | response = getresponse() | ConnectionError 26 v v 27 Unread-response Idle 28 [Response-headers-read] 29 |\____________________ 30 | | 31 | response.read() | putrequest() 32 v v 33 Idle Req-started-unread-response 34 ______/| 35 / | 36 response.read() | | ( putheader() )* endheaders() 37 v v 38 Request-started Req-sent-unread-response 39 | 40 | response.read() 41 v 42 Request-sent 43 44This diagram presents the following rules: 45 -- a second request may not be started until {response-headers-read} 46 -- a response [object] cannot be retrieved until {request-sent} 47 -- there is no differentiation between an unread response body and a 48 partially read response body 49 50Note: this enforcement is applied by the HTTPConnection class. The 51 HTTPResponse class does not enforce this state machine, which 52 implies sophisticated clients may accelerate the request/response 53 pipeline. Caution should be taken, though: accelerating the states 54 beyond the above pattern may imply knowledge of the server's 55 connection-close behavior for certain requests. For example, it 56 is impossible to tell whether the server will close the connection 57 UNTIL the response headers have been read; this means that further 58 requests cannot be placed into the pipeline until it is known that 59 the server will NOT be closing the connection. 60 61Logical State __state __response 62------------- ------- ---------- 63Idle _CS_IDLE None 64Request-started _CS_REQ_STARTED None 65Request-sent _CS_REQ_SENT None 66Unread-response _CS_IDLE <response_class> 67Req-started-unread-response _CS_REQ_STARTED <response_class> 68Req-sent-unread-response _CS_REQ_SENT <response_class> 69""" 70 71import email.parser 72import email.message 73import errno 74import http 75import io 76import re 77import socket 78import collections.abc 79from urllib.parse import urlsplit 80 81# HTTPMessage, parse_headers(), and the HTTP status code constants are 82# intentionally omitted for simplicity 83__all__ = ["HTTPResponse", "HTTPConnection", 84 "HTTPException", "NotConnected", "UnknownProtocol", 85 "UnknownTransferEncoding", "UnimplementedFileMode", 86 "IncompleteRead", "InvalidURL", "ImproperConnectionState", 87 "CannotSendRequest", "CannotSendHeader", "ResponseNotReady", 88 "BadStatusLine", "LineTooLong", "RemoteDisconnected", "error", 89 "responses"] 90 91HTTP_PORT = 80 92HTTPS_PORT = 443 93 94_UNKNOWN = 'UNKNOWN' 95 96# connection states 97_CS_IDLE = 'Idle' 98_CS_REQ_STARTED = 'Request-started' 99_CS_REQ_SENT = 'Request-sent' 100 101 102# hack to maintain backwards compatibility 103globals().update(http.HTTPStatus.__members__) 104 105# another hack to maintain backwards compatibility 106# Mapping status codes to official W3C names 107responses = {v: v.phrase for v in http.HTTPStatus.__members__.values()} 108 109# maximal amount of data to read at one time in _safe_read 110MAXAMOUNT = 1048576 111 112# maximal line length when calling readline(). 113_MAXLINE = 65536 114_MAXHEADERS = 100 115 116# Header name/value ABNF (http://tools.ietf.org/html/rfc7230#section-3.2) 117# 118# VCHAR = %x21-7E 119# obs-text = %x80-FF 120# header-field = field-name ":" OWS field-value OWS 121# field-name = token 122# field-value = *( field-content / obs-fold ) 123# field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ] 124# field-vchar = VCHAR / obs-text 125# 126# obs-fold = CRLF 1*( SP / HTAB ) 127# ; obsolete line folding 128# ; see Section 3.2.4 129 130# token = 1*tchar 131# 132# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" 133# / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~" 134# / DIGIT / ALPHA 135# ; any VCHAR, except delimiters 136# 137# VCHAR defined in http://tools.ietf.org/html/rfc5234#appendix-B.1 138 139# the patterns for both name and value are more lenient than RFC 140# definitions to allow for backwards compatibility 141_is_legal_header_name = re.compile(rb'[^:\s][^:\r\n]*').fullmatch 142_is_illegal_header_value = re.compile(rb'\n(?![ \t])|\r(?![ \t\n])').search 143 144# These characters are not allowed within HTTP URL paths. 145# See https://tools.ietf.org/html/rfc3986#section-3.3 and the 146# https://tools.ietf.org/html/rfc3986#appendix-A pchar definition. 147# Prevents CVE-2019-9740. Includes control characters such as \r\n. 148# We don't restrict chars above \x7f as putrequest() limits us to ASCII. 149_contains_disallowed_url_pchar_re = re.compile('[\x00-\x20\x7f]') 150# Arguably only these _should_ allowed: 151# _is_allowed_url_pchars_re = re.compile(r"^[/!$&'()*+,;=:@%a-zA-Z0-9._~-]+$") 152# We are more lenient for assumed real world compatibility purposes. 153 154# These characters are not allowed within HTTP method names 155# to prevent http header injection. 156_contains_disallowed_method_pchar_re = re.compile('[\x00-\x1f]') 157 158# We always set the Content-Length header for these methods because some 159# servers will otherwise respond with a 411 160_METHODS_EXPECTING_BODY = {'PATCH', 'POST', 'PUT'} 161 162 163def _encode(data, name='data'): 164 """Call data.encode("latin-1") but show a better error message.""" 165 try: 166 return data.encode("latin-1") 167 except UnicodeEncodeError as err: 168 raise UnicodeEncodeError( 169 err.encoding, 170 err.object, 171 err.start, 172 err.end, 173 "%s (%.20r) is not valid Latin-1. Use %s.encode('utf-8') " 174 "if you want to send it encoded in UTF-8." % 175 (name.title(), data[err.start:err.end], name)) from None 176 177 178class HTTPMessage(email.message.Message): 179 # XXX The only usage of this method is in 180 # http.server.CGIHTTPRequestHandler. Maybe move the code there so 181 # that it doesn't need to be part of the public API. The API has 182 # never been defined so this could cause backwards compatibility 183 # issues. 184 185 def getallmatchingheaders(self, name): 186 """Find all header lines matching a given header name. 187 188 Look through the list of headers and find all lines matching a given 189 header name (and their continuation lines). A list of the lines is 190 returned, without interpretation. If the header does not occur, an 191 empty list is returned. If the header occurs multiple times, all 192 occurrences are returned. Case is not important in the header name. 193 194 """ 195 name = name.lower() + ':' 196 n = len(name) 197 lst = [] 198 hit = 0 199 for line in self.keys(): 200 if line[:n].lower() == name: 201 hit = 1 202 elif not line[:1].isspace(): 203 hit = 0 204 if hit: 205 lst.append(line) 206 return lst 207 208def _read_headers(fp): 209 """Reads potential header lines into a list from a file pointer. 210 211 Length of line is limited by _MAXLINE, and number of 212 headers is limited by _MAXHEADERS. 213 """ 214 headers = [] 215 while True: 216 line = fp.readline(_MAXLINE + 1) 217 if len(line) > _MAXLINE: 218 raise LineTooLong("header line") 219 headers.append(line) 220 if len(headers) > _MAXHEADERS: 221 raise HTTPException("got more than %d headers" % _MAXHEADERS) 222 if line in (b'\r\n', b'\n', b''): 223 break 224 return headers 225 226def parse_headers(fp, _class=HTTPMessage): 227 """Parses only RFC2822 headers from a file pointer. 228 229 email Parser wants to see strings rather than bytes. 230 But a TextIOWrapper around self.rfile would buffer too many bytes 231 from the stream, bytes which we later need to read as bytes. 232 So we read the correct bytes here, as bytes, for email Parser 233 to parse. 234 235 """ 236 headers = _read_headers(fp) 237 hstring = b''.join(headers).decode('iso-8859-1') 238 return email.parser.Parser(_class=_class).parsestr(hstring) 239 240 241class HTTPResponse(io.BufferedIOBase): 242 243 # See RFC 2616 sec 19.6 and RFC 1945 sec 6 for details. 244 245 # The bytes from the socket object are iso-8859-1 strings. 246 # See RFC 2616 sec 2.2 which notes an exception for MIME-encoded 247 # text following RFC 2047. The basic status line parsing only 248 # accepts iso-8859-1. 249 250 def __init__(self, sock, debuglevel=0, method=None, url=None): 251 # If the response includes a content-length header, we need to 252 # make sure that the client doesn't read more than the 253 # specified number of bytes. If it does, it will block until 254 # the server times out and closes the connection. This will 255 # happen if a self.fp.read() is done (without a size) whether 256 # self.fp is buffered or not. So, no self.fp.read() by 257 # clients unless they know what they are doing. 258 self.fp = sock.makefile("rb") 259 self.debuglevel = debuglevel 260 self._method = method 261 262 # The HTTPResponse object is returned via urllib. The clients 263 # of http and urllib expect different attributes for the 264 # headers. headers is used here and supports urllib. msg is 265 # provided as a backwards compatibility layer for http 266 # clients. 267 268 self.headers = self.msg = None 269 270 # from the Status-Line of the response 271 self.version = _UNKNOWN # HTTP-Version 272 self.status = _UNKNOWN # Status-Code 273 self.reason = _UNKNOWN # Reason-Phrase 274 275 self.chunked = _UNKNOWN # is "chunked" being used? 276 self.chunk_left = _UNKNOWN # bytes left to read in current chunk 277 self.length = _UNKNOWN # number of bytes left in response 278 self.will_close = _UNKNOWN # conn will close at end of response 279 280 def _read_status(self): 281 line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1") 282 if len(line) > _MAXLINE: 283 raise LineTooLong("status line") 284 if self.debuglevel > 0: 285 print("reply:", repr(line)) 286 if not line: 287 # Presumably, the server closed the connection before 288 # sending a valid response. 289 raise RemoteDisconnected("Remote end closed connection without" 290 " response") 291 try: 292 version, status, reason = line.split(None, 2) 293 except ValueError: 294 try: 295 version, status = line.split(None, 1) 296 reason = "" 297 except ValueError: 298 # empty version will cause next test to fail. 299 version = "" 300 if not version.startswith("HTTP/"): 301 self._close_conn() 302 raise BadStatusLine(line) 303 304 # The status code is a three-digit number 305 try: 306 status = int(status) 307 if status < 100 or status > 999: 308 raise BadStatusLine(line) 309 except ValueError: 310 raise BadStatusLine(line) 311 return version, status, reason 312 313 def begin(self): 314 if self.headers is not None: 315 # we've already started reading the response 316 return 317 318 # read until we get a non-100 response 319 while True: 320 version, status, reason = self._read_status() 321 if status != CONTINUE: 322 break 323 # skip the header from the 100 response 324 skipped_headers = _read_headers(self.fp) 325 if self.debuglevel > 0: 326 print("headers:", skipped_headers) 327 del skipped_headers 328 329 self.code = self.status = status 330 self.reason = reason.strip() 331 if version in ("HTTP/1.0", "HTTP/0.9"): 332 # Some servers might still return "0.9", treat it as 1.0 anyway 333 self.version = 10 334 elif version.startswith("HTTP/1."): 335 self.version = 11 # use HTTP/1.1 code for HTTP/1.x where x>=1 336 else: 337 raise UnknownProtocol(version) 338 339 self.headers = self.msg = parse_headers(self.fp) 340 341 if self.debuglevel > 0: 342 for hdr, val in self.headers.items(): 343 print("header:", hdr + ":", val) 344 345 # are we using the chunked-style of transfer encoding? 346 tr_enc = self.headers.get("transfer-encoding") 347 if tr_enc and tr_enc.lower() == "chunked": 348 self.chunked = True 349 self.chunk_left = None 350 else: 351 self.chunked = False 352 353 # will the connection close at the end of the response? 354 self.will_close = self._check_close() 355 356 # do we have a Content-Length? 357 # NOTE: RFC 2616, S4.4, #3 says we ignore this if tr_enc is "chunked" 358 self.length = None 359 length = self.headers.get("content-length") 360 if length and not self.chunked: 361 try: 362 self.length = int(length) 363 except ValueError: 364 self.length = None 365 else: 366 if self.length < 0: # ignore nonsensical negative lengths 367 self.length = None 368 else: 369 self.length = None 370 371 # does the body have a fixed length? (of zero) 372 if (status == NO_CONTENT or status == NOT_MODIFIED or 373 100 <= status < 200 or # 1xx codes 374 self._method == "HEAD"): 375 self.length = 0 376 377 # if the connection remains open, and we aren't using chunked, and 378 # a content-length was not provided, then assume that the connection 379 # WILL close. 380 if (not self.will_close and 381 not self.chunked and 382 self.length is None): 383 self.will_close = True 384 385 def _check_close(self): 386 conn = self.headers.get("connection") 387 if self.version == 11: 388 # An HTTP/1.1 proxy is assumed to stay open unless 389 # explicitly closed. 390 if conn and "close" in conn.lower(): 391 return True 392 return False 393 394 # Some HTTP/1.0 implementations have support for persistent 395 # connections, using rules different than HTTP/1.1. 396 397 # For older HTTP, Keep-Alive indicates persistent connection. 398 if self.headers.get("keep-alive"): 399 return False 400 401 # At least Akamai returns a "Connection: Keep-Alive" header, 402 # which was supposed to be sent by the client. 403 if conn and "keep-alive" in conn.lower(): 404 return False 405 406 # Proxy-Connection is a netscape hack. 407 pconn = self.headers.get("proxy-connection") 408 if pconn and "keep-alive" in pconn.lower(): 409 return False 410 411 # otherwise, assume it will close 412 return True 413 414 def _close_conn(self): 415 fp = self.fp 416 self.fp = None 417 fp.close() 418 419 def close(self): 420 try: 421 super().close() # set "closed" flag 422 finally: 423 if self.fp: 424 self._close_conn() 425 426 # These implementations are for the benefit of io.BufferedReader. 427 428 # XXX This class should probably be revised to act more like 429 # the "raw stream" that BufferedReader expects. 430 431 def flush(self): 432 super().flush() 433 if self.fp: 434 self.fp.flush() 435 436 def readable(self): 437 """Always returns True""" 438 return True 439 440 # End of "raw stream" methods 441 442 def isclosed(self): 443 """True if the connection is closed.""" 444 # NOTE: it is possible that we will not ever call self.close(). This 445 # case occurs when will_close is TRUE, length is None, and we 446 # read up to the last byte, but NOT past it. 447 # 448 # IMPLIES: if will_close is FALSE, then self.close() will ALWAYS be 449 # called, meaning self.isclosed() is meaningful. 450 return self.fp is None 451 452 def read(self, amt=None): 453 if self.fp is None: 454 return b"" 455 456 if self._method == "HEAD": 457 self._close_conn() 458 return b"" 459 460 if amt is not None: 461 # Amount is given, implement using readinto 462 b = bytearray(amt) 463 n = self.readinto(b) 464 return memoryview(b)[:n].tobytes() 465 else: 466 # Amount is not given (unbounded read) so we must check self.length 467 # and self.chunked 468 469 if self.chunked: 470 return self._readall_chunked() 471 472 if self.length is None: 473 s = self.fp.read() 474 else: 475 try: 476 s = self._safe_read(self.length) 477 except IncompleteRead: 478 self._close_conn() 479 raise 480 self.length = 0 481 self._close_conn() # we read everything 482 return s 483 484 def readinto(self, b): 485 """Read up to len(b) bytes into bytearray b and return the number 486 of bytes read. 487 """ 488 489 if self.fp is None: 490 return 0 491 492 if self._method == "HEAD": 493 self._close_conn() 494 return 0 495 496 if self.chunked: 497 return self._readinto_chunked(b) 498 499 if self.length is not None: 500 if len(b) > self.length: 501 # clip the read to the "end of response" 502 b = memoryview(b)[0:self.length] 503 504 # we do not use _safe_read() here because this may be a .will_close 505 # connection, and the user is reading more bytes than will be provided 506 # (for example, reading in 1k chunks) 507 n = self.fp.readinto(b) 508 if not n and b: 509 # Ideally, we would raise IncompleteRead if the content-length 510 # wasn't satisfied, but it might break compatibility. 511 self._close_conn() 512 elif self.length is not None: 513 self.length -= n 514 if not self.length: 515 self._close_conn() 516 return n 517 518 def _read_next_chunk_size(self): 519 # Read the next chunk size from the file 520 line = self.fp.readline(_MAXLINE + 1) 521 if len(line) > _MAXLINE: 522 raise LineTooLong("chunk size") 523 i = line.find(b";") 524 if i >= 0: 525 line = line[:i] # strip chunk-extensions 526 try: 527 return int(line, 16) 528 except ValueError: 529 # close the connection as protocol synchronisation is 530 # probably lost 531 self._close_conn() 532 raise 533 534 def _read_and_discard_trailer(self): 535 # read and discard trailer up to the CRLF terminator 536 ### note: we shouldn't have any trailers! 537 while True: 538 line = self.fp.readline(_MAXLINE + 1) 539 if len(line) > _MAXLINE: 540 raise LineTooLong("trailer line") 541 if not line: 542 # a vanishingly small number of sites EOF without 543 # sending the trailer 544 break 545 if line in (b'\r\n', b'\n', b''): 546 break 547 548 def _get_chunk_left(self): 549 # return self.chunk_left, reading a new chunk if necessary. 550 # chunk_left == 0: at the end of the current chunk, need to close it 551 # chunk_left == None: No current chunk, should read next. 552 # This function returns non-zero or None if the last chunk has 553 # been read. 554 chunk_left = self.chunk_left 555 if not chunk_left: # Can be 0 or None 556 if chunk_left is not None: 557 # We are at the end of chunk, discard chunk end 558 self._safe_read(2) # toss the CRLF at the end of the chunk 559 try: 560 chunk_left = self._read_next_chunk_size() 561 except ValueError: 562 raise IncompleteRead(b'') 563 if chunk_left == 0: 564 # last chunk: 1*("0") [ chunk-extension ] CRLF 565 self._read_and_discard_trailer() 566 # we read everything; close the "file" 567 self._close_conn() 568 chunk_left = None 569 self.chunk_left = chunk_left 570 return chunk_left 571 572 def _readall_chunked(self): 573 assert self.chunked != _UNKNOWN 574 value = [] 575 try: 576 while True: 577 chunk_left = self._get_chunk_left() 578 if chunk_left is None: 579 break 580 value.append(self._safe_read(chunk_left)) 581 self.chunk_left = 0 582 return b''.join(value) 583 except IncompleteRead: 584 raise IncompleteRead(b''.join(value)) 585 586 def _readinto_chunked(self, b): 587 assert self.chunked != _UNKNOWN 588 total_bytes = 0 589 mvb = memoryview(b) 590 try: 591 while True: 592 chunk_left = self._get_chunk_left() 593 if chunk_left is None: 594 return total_bytes 595 596 if len(mvb) <= chunk_left: 597 n = self._safe_readinto(mvb) 598 self.chunk_left = chunk_left - n 599 return total_bytes + n 600 601 temp_mvb = mvb[:chunk_left] 602 n = self._safe_readinto(temp_mvb) 603 mvb = mvb[n:] 604 total_bytes += n 605 self.chunk_left = 0 606 607 except IncompleteRead: 608 raise IncompleteRead(bytes(b[0:total_bytes])) 609 610 def _safe_read(self, amt): 611 """Read the number of bytes requested, compensating for partial reads. 612 613 Normally, we have a blocking socket, but a read() can be interrupted 614 by a signal (resulting in a partial read). 615 616 Note that we cannot distinguish between EOF and an interrupt when zero 617 bytes have been read. IncompleteRead() will be raised in this 618 situation. 619 620 This function should be used when <amt> bytes "should" be present for 621 reading. If the bytes are truly not available (due to EOF), then the 622 IncompleteRead exception can be used to detect the problem. 623 """ 624 s = [] 625 while amt > 0: 626 chunk = self.fp.read(min(amt, MAXAMOUNT)) 627 if not chunk: 628 raise IncompleteRead(b''.join(s), amt) 629 s.append(chunk) 630 amt -= len(chunk) 631 return b"".join(s) 632 633 def _safe_readinto(self, b): 634 """Same as _safe_read, but for reading into a buffer.""" 635 total_bytes = 0 636 mvb = memoryview(b) 637 while total_bytes < len(b): 638 if MAXAMOUNT < len(mvb): 639 temp_mvb = mvb[0:MAXAMOUNT] 640 n = self.fp.readinto(temp_mvb) 641 else: 642 n = self.fp.readinto(mvb) 643 if not n: 644 raise IncompleteRead(bytes(mvb[0:total_bytes]), len(b)) 645 mvb = mvb[n:] 646 total_bytes += n 647 return total_bytes 648 649 def read1(self, n=-1): 650 """Read with at most one underlying system call. If at least one 651 byte is buffered, return that instead. 652 """ 653 if self.fp is None or self._method == "HEAD": 654 return b"" 655 if self.chunked: 656 return self._read1_chunked(n) 657 if self.length is not None and (n < 0 or n > self.length): 658 n = self.length 659 result = self.fp.read1(n) 660 if not result and n: 661 self._close_conn() 662 elif self.length is not None: 663 self.length -= len(result) 664 return result 665 666 def peek(self, n=-1): 667 # Having this enables IOBase.readline() to read more than one 668 # byte at a time 669 if self.fp is None or self._method == "HEAD": 670 return b"" 671 if self.chunked: 672 return self._peek_chunked(n) 673 return self.fp.peek(n) 674 675 def readline(self, limit=-1): 676 if self.fp is None or self._method == "HEAD": 677 return b"" 678 if self.chunked: 679 # Fallback to IOBase readline which uses peek() and read() 680 return super().readline(limit) 681 if self.length is not None and (limit < 0 or limit > self.length): 682 limit = self.length 683 result = self.fp.readline(limit) 684 if not result and limit: 685 self._close_conn() 686 elif self.length is not None: 687 self.length -= len(result) 688 return result 689 690 def _read1_chunked(self, n): 691 # Strictly speaking, _get_chunk_left() may cause more than one read, 692 # but that is ok, since that is to satisfy the chunked protocol. 693 chunk_left = self._get_chunk_left() 694 if chunk_left is None or n == 0: 695 return b'' 696 if not (0 <= n <= chunk_left): 697 n = chunk_left # if n is negative or larger than chunk_left 698 read = self.fp.read1(n) 699 self.chunk_left -= len(read) 700 if not read: 701 raise IncompleteRead(b"") 702 return read 703 704 def _peek_chunked(self, n): 705 # Strictly speaking, _get_chunk_left() may cause more than one read, 706 # but that is ok, since that is to satisfy the chunked protocol. 707 try: 708 chunk_left = self._get_chunk_left() 709 except IncompleteRead: 710 return b'' # peek doesn't worry about protocol 711 if chunk_left is None: 712 return b'' # eof 713 # peek is allowed to return more than requested. Just request the 714 # entire chunk, and truncate what we get. 715 return self.fp.peek(chunk_left)[:chunk_left] 716 717 def fileno(self): 718 return self.fp.fileno() 719 720 def getheader(self, name, default=None): 721 '''Returns the value of the header matching *name*. 722 723 If there are multiple matching headers, the values are 724 combined into a single string separated by commas and spaces. 725 726 If no matching header is found, returns *default* or None if 727 the *default* is not specified. 728 729 If the headers are unknown, raises http.client.ResponseNotReady. 730 731 ''' 732 if self.headers is None: 733 raise ResponseNotReady() 734 headers = self.headers.get_all(name) or default 735 if isinstance(headers, str) or not hasattr(headers, '__iter__'): 736 return headers 737 else: 738 return ', '.join(headers) 739 740 def getheaders(self): 741 """Return list of (header, value) tuples.""" 742 if self.headers is None: 743 raise ResponseNotReady() 744 return list(self.headers.items()) 745 746 # We override IOBase.__iter__ so that it doesn't check for closed-ness 747 748 def __iter__(self): 749 return self 750 751 # For compatibility with old-style urllib responses. 752 753 def info(self): 754 '''Returns an instance of the class mimetools.Message containing 755 meta-information associated with the URL. 756 757 When the method is HTTP, these headers are those returned by 758 the server at the head of the retrieved HTML page (including 759 Content-Length and Content-Type). 760 761 When the method is FTP, a Content-Length header will be 762 present if (as is now usual) the server passed back a file 763 length in response to the FTP retrieval request. A 764 Content-Type header will be present if the MIME type can be 765 guessed. 766 767 When the method is local-file, returned headers will include 768 a Date representing the file's last-modified time, a 769 Content-Length giving file size, and a Content-Type 770 containing a guess at the file's type. See also the 771 description of the mimetools module. 772 773 ''' 774 return self.headers 775 776 def geturl(self): 777 '''Return the real URL of the page. 778 779 In some cases, the HTTP server redirects a client to another 780 URL. The urlopen() function handles this transparently, but in 781 some cases the caller needs to know which URL the client was 782 redirected to. The geturl() method can be used to get at this 783 redirected URL. 784 785 ''' 786 return self.url 787 788 def getcode(self): 789 '''Return the HTTP status code that was sent with the response, 790 or None if the URL is not an HTTP URL. 791 792 ''' 793 return self.status 794 795class HTTPConnection: 796 797 _http_vsn = 11 798 _http_vsn_str = 'HTTP/1.1' 799 800 response_class = HTTPResponse 801 default_port = HTTP_PORT 802 auto_open = 1 803 debuglevel = 0 804 805 @staticmethod 806 def _is_textIO(stream): 807 """Test whether a file-like object is a text or a binary stream. 808 """ 809 return isinstance(stream, io.TextIOBase) 810 811 @staticmethod 812 def _get_content_length(body, method): 813 """Get the content-length based on the body. 814 815 If the body is None, we set Content-Length: 0 for methods that expect 816 a body (RFC 7230, Section 3.3.2). We also set the Content-Length for 817 any method if the body is a str or bytes-like object and not a file. 818 """ 819 if body is None: 820 # do an explicit check for not None here to distinguish 821 # between unset and set but empty 822 if method.upper() in _METHODS_EXPECTING_BODY: 823 return 0 824 else: 825 return None 826 827 if hasattr(body, 'read'): 828 # file-like object. 829 return None 830 831 try: 832 # does it implement the buffer protocol (bytes, bytearray, array)? 833 mv = memoryview(body) 834 return mv.nbytes 835 except TypeError: 836 pass 837 838 if isinstance(body, str): 839 return len(body) 840 841 return None 842 843 def __init__(self, host, port=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, 844 source_address=None, blocksize=8192): 845 self.timeout = timeout 846 self.source_address = source_address 847 self.blocksize = blocksize 848 self.sock = None 849 self._buffer = [] 850 self.__response = None 851 self.__state = _CS_IDLE 852 self._method = None 853 self._tunnel_host = None 854 self._tunnel_port = None 855 self._tunnel_headers = {} 856 857 (self.host, self.port) = self._get_hostport(host, port) 858 859 self._validate_host(self.host) 860 861 # This is stored as an instance variable to allow unit 862 # tests to replace it with a suitable mockup 863 self._create_connection = socket.create_connection 864 865 def set_tunnel(self, host, port=None, headers=None): 866 """Set up host and port for HTTP CONNECT tunnelling. 867 868 In a connection that uses HTTP CONNECT tunneling, the host passed to the 869 constructor is used as a proxy server that relays all communication to 870 the endpoint passed to `set_tunnel`. This done by sending an HTTP 871 CONNECT request to the proxy server when the connection is established. 872 873 This method must be called before the HTTP connection has been 874 established. 875 876 The headers argument should be a mapping of extra HTTP headers to send 877 with the CONNECT request. 878 """ 879 880 if self.sock: 881 raise RuntimeError("Can't set up tunnel for established connection") 882 883 self._tunnel_host, self._tunnel_port = self._get_hostport(host, port) 884 if headers: 885 self._tunnel_headers = headers 886 else: 887 self._tunnel_headers.clear() 888 889 def _get_hostport(self, host, port): 890 if port is None: 891 i = host.rfind(':') 892 j = host.rfind(']') # ipv6 addresses have [...] 893 if i > j: 894 try: 895 port = int(host[i+1:]) 896 except ValueError: 897 if host[i+1:] == "": # http://foo.com:/ == http://foo.com/ 898 port = self.default_port 899 else: 900 raise InvalidURL("nonnumeric port: '%s'" % host[i+1:]) 901 host = host[:i] 902 else: 903 port = self.default_port 904 if host and host[0] == '[' and host[-1] == ']': 905 host = host[1:-1] 906 907 return (host, port) 908 909 def set_debuglevel(self, level): 910 self.debuglevel = level 911 912 def _tunnel(self): 913 connect = b"CONNECT %s:%d HTTP/1.0\r\n" % ( 914 self._tunnel_host.encode("ascii"), self._tunnel_port) 915 headers = [connect] 916 for header, value in self._tunnel_headers.items(): 917 headers.append(f"{header}: {value}\r\n".encode("latin-1")) 918 headers.append(b"\r\n") 919 # Making a single send() call instead of one per line encourages 920 # the host OS to use a more optimal packet size instead of 921 # potentially emitting a series of small packets. 922 self.send(b"".join(headers)) 923 del headers 924 925 response = self.response_class(self.sock, method=self._method) 926 (version, code, message) = response._read_status() 927 928 if code != http.HTTPStatus.OK: 929 self.close() 930 raise OSError(f"Tunnel connection failed: {code} {message.strip()}") 931 while True: 932 line = response.fp.readline(_MAXLINE + 1) 933 if len(line) > _MAXLINE: 934 raise LineTooLong("header line") 935 if not line: 936 # for sites which EOF without sending a trailer 937 break 938 if line in (b'\r\n', b'\n', b''): 939 break 940 941 if self.debuglevel > 0: 942 print('header:', line.decode()) 943 944 def connect(self): 945 """Connect to the host and port specified in __init__.""" 946 self.sock = self._create_connection( 947 (self.host,self.port), self.timeout, self.source_address) 948 # Might fail in OSs that don't implement TCP_NODELAY 949 try: 950 self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) 951 except OSError as e: 952 if e.errno != errno.ENOPROTOOPT: 953 raise 954 955 if self._tunnel_host: 956 self._tunnel() 957 958 def close(self): 959 """Close the connection to the HTTP server.""" 960 self.__state = _CS_IDLE 961 try: 962 sock = self.sock 963 if sock: 964 self.sock = None 965 sock.close() # close it manually... there may be other refs 966 finally: 967 response = self.__response 968 if response: 969 self.__response = None 970 response.close() 971 972 def send(self, data): 973 """Send `data' to the server. 974 ``data`` can be a string object, a bytes object, an array object, a 975 file-like object that supports a .read() method, or an iterable object. 976 """ 977 978 if self.sock is None: 979 if self.auto_open: 980 self.connect() 981 else: 982 raise NotConnected() 983 984 if self.debuglevel > 0: 985 print("send:", repr(data)) 986 if hasattr(data, "read") : 987 if self.debuglevel > 0: 988 print("sendIng a read()able") 989 encode = self._is_textIO(data) 990 if encode and self.debuglevel > 0: 991 print("encoding file using iso-8859-1") 992 while 1: 993 datablock = data.read(self.blocksize) 994 if not datablock: 995 break 996 if encode: 997 datablock = datablock.encode("iso-8859-1") 998 self.sock.sendall(datablock) 999 return 1000 try: 1001 self.sock.sendall(data) 1002 except TypeError: 1003 if isinstance(data, collections.abc.Iterable): 1004 for d in data: 1005 self.sock.sendall(d) 1006 else: 1007 raise TypeError("data should be a bytes-like object " 1008 "or an iterable, got %r" % type(data)) 1009 1010 def _output(self, s): 1011 """Add a line of output to the current request buffer. 1012 1013 Assumes that the line does *not* end with \\r\\n. 1014 """ 1015 self._buffer.append(s) 1016 1017 def _read_readable(self, readable): 1018 if self.debuglevel > 0: 1019 print("sendIng a read()able") 1020 encode = self._is_textIO(readable) 1021 if encode and self.debuglevel > 0: 1022 print("encoding file using iso-8859-1") 1023 while True: 1024 datablock = readable.read(self.blocksize) 1025 if not datablock: 1026 break 1027 if encode: 1028 datablock = datablock.encode("iso-8859-1") 1029 yield datablock 1030 1031 def _send_output(self, message_body=None, encode_chunked=False): 1032 """Send the currently buffered request and clear the buffer. 1033 1034 Appends an extra \\r\\n to the buffer. 1035 A message_body may be specified, to be appended to the request. 1036 """ 1037 self._buffer.extend((b"", b"")) 1038 msg = b"\r\n".join(self._buffer) 1039 del self._buffer[:] 1040 self.send(msg) 1041 1042 if message_body is not None: 1043 1044 # create a consistent interface to message_body 1045 if hasattr(message_body, 'read'): 1046 # Let file-like take precedence over byte-like. This 1047 # is needed to allow the current position of mmap'ed 1048 # files to be taken into account. 1049 chunks = self._read_readable(message_body) 1050 else: 1051 try: 1052 # this is solely to check to see if message_body 1053 # implements the buffer API. it /would/ be easier 1054 # to capture if PyObject_CheckBuffer was exposed 1055 # to Python. 1056 memoryview(message_body) 1057 except TypeError: 1058 try: 1059 chunks = iter(message_body) 1060 except TypeError: 1061 raise TypeError("message_body should be a bytes-like " 1062 "object or an iterable, got %r" 1063 % type(message_body)) 1064 else: 1065 # the object implements the buffer interface and 1066 # can be passed directly into socket methods 1067 chunks = (message_body,) 1068 1069 for chunk in chunks: 1070 if not chunk: 1071 if self.debuglevel > 0: 1072 print('Zero length chunk ignored') 1073 continue 1074 1075 if encode_chunked and self._http_vsn == 11: 1076 # chunked encoding 1077 chunk = f'{len(chunk):X}\r\n'.encode('ascii') + chunk \ 1078 + b'\r\n' 1079 self.send(chunk) 1080 1081 if encode_chunked and self._http_vsn == 11: 1082 # end chunked transfer 1083 self.send(b'0\r\n\r\n') 1084 1085 def putrequest(self, method, url, skip_host=False, 1086 skip_accept_encoding=False): 1087 """Send a request to the server. 1088 1089 `method' specifies an HTTP request method, e.g. 'GET'. 1090 `url' specifies the object being requested, e.g. '/index.html'. 1091 `skip_host' if True does not add automatically a 'Host:' header 1092 `skip_accept_encoding' if True does not add automatically an 1093 'Accept-Encoding:' header 1094 """ 1095 1096 # if a prior response has been completed, then forget about it. 1097 if self.__response and self.__response.isclosed(): 1098 self.__response = None 1099 1100 1101 # in certain cases, we cannot issue another request on this connection. 1102 # this occurs when: 1103 # 1) we are in the process of sending a request. (_CS_REQ_STARTED) 1104 # 2) a response to a previous request has signalled that it is going 1105 # to close the connection upon completion. 1106 # 3) the headers for the previous response have not been read, thus 1107 # we cannot determine whether point (2) is true. (_CS_REQ_SENT) 1108 # 1109 # if there is no prior response, then we can request at will. 1110 # 1111 # if point (2) is true, then we will have passed the socket to the 1112 # response (effectively meaning, "there is no prior response"), and 1113 # will open a new one when a new request is made. 1114 # 1115 # Note: if a prior response exists, then we *can* start a new request. 1116 # We are not allowed to begin fetching the response to this new 1117 # request, however, until that prior response is complete. 1118 # 1119 if self.__state == _CS_IDLE: 1120 self.__state = _CS_REQ_STARTED 1121 else: 1122 raise CannotSendRequest(self.__state) 1123 1124 self._validate_method(method) 1125 1126 # Save the method for use later in the response phase 1127 self._method = method 1128 1129 url = url or '/' 1130 self._validate_path(url) 1131 1132 request = '%s %s %s' % (method, url, self._http_vsn_str) 1133 1134 self._output(self._encode_request(request)) 1135 1136 if self._http_vsn == 11: 1137 # Issue some standard headers for better HTTP/1.1 compliance 1138 1139 if not skip_host: 1140 # this header is issued *only* for HTTP/1.1 1141 # connections. more specifically, this means it is 1142 # only issued when the client uses the new 1143 # HTTPConnection() class. backwards-compat clients 1144 # will be using HTTP/1.0 and those clients may be 1145 # issuing this header themselves. we should NOT issue 1146 # it twice; some web servers (such as Apache) barf 1147 # when they see two Host: headers 1148 1149 # If we need a non-standard port,include it in the 1150 # header. If the request is going through a proxy, 1151 # but the host of the actual URL, not the host of the 1152 # proxy. 1153 1154 netloc = '' 1155 if url.startswith('http'): 1156 nil, netloc, nil, nil, nil = urlsplit(url) 1157 1158 if netloc: 1159 try: 1160 netloc_enc = netloc.encode("ascii") 1161 except UnicodeEncodeError: 1162 netloc_enc = netloc.encode("idna") 1163 self.putheader('Host', netloc_enc) 1164 else: 1165 if self._tunnel_host: 1166 host = self._tunnel_host 1167 port = self._tunnel_port 1168 else: 1169 host = self.host 1170 port = self.port 1171 1172 try: 1173 host_enc = host.encode("ascii") 1174 except UnicodeEncodeError: 1175 host_enc = host.encode("idna") 1176 1177 # As per RFC 273, IPv6 address should be wrapped with [] 1178 # when used as Host header 1179 1180 if host.find(':') >= 0: 1181 host_enc = b'[' + host_enc + b']' 1182 1183 if port == self.default_port: 1184 self.putheader('Host', host_enc) 1185 else: 1186 host_enc = host_enc.decode("ascii") 1187 self.putheader('Host', "%s:%s" % (host_enc, port)) 1188 1189 # note: we are assuming that clients will not attempt to set these 1190 # headers since *this* library must deal with the 1191 # consequences. this also means that when the supporting 1192 # libraries are updated to recognize other forms, then this 1193 # code should be changed (removed or updated). 1194 1195 # we only want a Content-Encoding of "identity" since we don't 1196 # support encodings such as x-gzip or x-deflate. 1197 if not skip_accept_encoding: 1198 self.putheader('Accept-Encoding', 'identity') 1199 1200 # we can accept "chunked" Transfer-Encodings, but no others 1201 # NOTE: no TE header implies *only* "chunked" 1202 #self.putheader('TE', 'chunked') 1203 1204 # if TE is supplied in the header, then it must appear in a 1205 # Connection header. 1206 #self.putheader('Connection', 'TE') 1207 1208 else: 1209 # For HTTP/1.0, the server will assume "not chunked" 1210 pass 1211 1212 def _encode_request(self, request): 1213 # ASCII also helps prevent CVE-2019-9740. 1214 return request.encode('ascii') 1215 1216 def _validate_method(self, method): 1217 """Validate a method name for putrequest.""" 1218 # prevent http header injection 1219 match = _contains_disallowed_method_pchar_re.search(method) 1220 if match: 1221 raise ValueError( 1222 f"method can't contain control characters. {method!r} " 1223 f"(found at least {match.group()!r})") 1224 1225 def _validate_path(self, url): 1226 """Validate a url for putrequest.""" 1227 # Prevent CVE-2019-9740. 1228 match = _contains_disallowed_url_pchar_re.search(url) 1229 if match: 1230 raise InvalidURL(f"URL can't contain control characters. {url!r} " 1231 f"(found at least {match.group()!r})") 1232 1233 def _validate_host(self, host): 1234 """Validate a host so it doesn't contain control characters.""" 1235 # Prevent CVE-2019-18348. 1236 match = _contains_disallowed_url_pchar_re.search(host) 1237 if match: 1238 raise InvalidURL(f"URL can't contain control characters. {host!r} " 1239 f"(found at least {match.group()!r})") 1240 1241 def putheader(self, header, *values): 1242 """Send a request header line to the server. 1243 1244 For example: h.putheader('Accept', 'text/html') 1245 """ 1246 if self.__state != _CS_REQ_STARTED: 1247 raise CannotSendHeader() 1248 1249 if hasattr(header, 'encode'): 1250 header = header.encode('ascii') 1251 1252 if not _is_legal_header_name(header): 1253 raise ValueError('Invalid header name %r' % (header,)) 1254 1255 values = list(values) 1256 for i, one_value in enumerate(values): 1257 if hasattr(one_value, 'encode'): 1258 values[i] = one_value.encode('latin-1') 1259 elif isinstance(one_value, int): 1260 values[i] = str(one_value).encode('ascii') 1261 1262 if _is_illegal_header_value(values[i]): 1263 raise ValueError('Invalid header value %r' % (values[i],)) 1264 1265 value = b'\r\n\t'.join(values) 1266 header = header + b': ' + value 1267 self._output(header) 1268 1269 def endheaders(self, message_body=None, *, encode_chunked=False): 1270 """Indicate that the last header line has been sent to the server. 1271 1272 This method sends the request to the server. The optional message_body 1273 argument can be used to pass a message body associated with the 1274 request. 1275 """ 1276 if self.__state == _CS_REQ_STARTED: 1277 self.__state = _CS_REQ_SENT 1278 else: 1279 raise CannotSendHeader() 1280 self._send_output(message_body, encode_chunked=encode_chunked) 1281 1282 def request(self, method, url, body=None, headers={}, *, 1283 encode_chunked=False): 1284 """Send a complete request to the server.""" 1285 self._send_request(method, url, body, headers, encode_chunked) 1286 1287 def _send_request(self, method, url, body, headers, encode_chunked): 1288 # Honor explicitly requested Host: and Accept-Encoding: headers. 1289 header_names = frozenset(k.lower() for k in headers) 1290 skips = {} 1291 if 'host' in header_names: 1292 skips['skip_host'] = 1 1293 if 'accept-encoding' in header_names: 1294 skips['skip_accept_encoding'] = 1 1295 1296 self.putrequest(method, url, **skips) 1297 1298 # chunked encoding will happen if HTTP/1.1 is used and either 1299 # the caller passes encode_chunked=True or the following 1300 # conditions hold: 1301 # 1. content-length has not been explicitly set 1302 # 2. the body is a file or iterable, but not a str or bytes-like 1303 # 3. Transfer-Encoding has NOT been explicitly set by the caller 1304 1305 if 'content-length' not in header_names: 1306 # only chunk body if not explicitly set for backwards 1307 # compatibility, assuming the client code is already handling the 1308 # chunking 1309 if 'transfer-encoding' not in header_names: 1310 # if content-length cannot be automatically determined, fall 1311 # back to chunked encoding 1312 encode_chunked = False 1313 content_length = self._get_content_length(body, method) 1314 if content_length is None: 1315 if body is not None: 1316 if self.debuglevel > 0: 1317 print('Unable to determine size of %r' % body) 1318 encode_chunked = True 1319 self.putheader('Transfer-Encoding', 'chunked') 1320 else: 1321 self.putheader('Content-Length', str(content_length)) 1322 else: 1323 encode_chunked = False 1324 1325 for hdr, value in headers.items(): 1326 self.putheader(hdr, value) 1327 if isinstance(body, str): 1328 # RFC 2616 Section 3.7.1 says that text default has a 1329 # default charset of iso-8859-1. 1330 body = _encode(body, 'body') 1331 self.endheaders(body, encode_chunked=encode_chunked) 1332 1333 def getresponse(self): 1334 """Get the response from the server. 1335 1336 If the HTTPConnection is in the correct state, returns an 1337 instance of HTTPResponse or of whatever object is returned by 1338 the response_class variable. 1339 1340 If a request has not been sent or if a previous response has 1341 not be handled, ResponseNotReady is raised. If the HTTP 1342 response indicates that the connection should be closed, then 1343 it will be closed before the response is returned. When the 1344 connection is closed, the underlying socket is closed. 1345 """ 1346 1347 # if a prior response has been completed, then forget about it. 1348 if self.__response and self.__response.isclosed(): 1349 self.__response = None 1350 1351 # if a prior response exists, then it must be completed (otherwise, we 1352 # cannot read this response's header to determine the connection-close 1353 # behavior) 1354 # 1355 # note: if a prior response existed, but was connection-close, then the 1356 # socket and response were made independent of this HTTPConnection 1357 # object since a new request requires that we open a whole new 1358 # connection 1359 # 1360 # this means the prior response had one of two states: 1361 # 1) will_close: this connection was reset and the prior socket and 1362 # response operate independently 1363 # 2) persistent: the response was retained and we await its 1364 # isclosed() status to become true. 1365 # 1366 if self.__state != _CS_REQ_SENT or self.__response: 1367 raise ResponseNotReady(self.__state) 1368 1369 if self.debuglevel > 0: 1370 response = self.response_class(self.sock, self.debuglevel, 1371 method=self._method) 1372 else: 1373 response = self.response_class(self.sock, method=self._method) 1374 1375 try: 1376 try: 1377 response.begin() 1378 except ConnectionError: 1379 self.close() 1380 raise 1381 assert response.will_close != _UNKNOWN 1382 self.__state = _CS_IDLE 1383 1384 if response.will_close: 1385 # this effectively passes the connection to the response 1386 self.close() 1387 else: 1388 # remember this, so we can tell when it is complete 1389 self.__response = response 1390 1391 return response 1392 except: 1393 response.close() 1394 raise 1395 1396try: 1397 import ssl 1398except ImportError: 1399 pass 1400else: 1401 class HTTPSConnection(HTTPConnection): 1402 "This class allows communication via SSL." 1403 1404 default_port = HTTPS_PORT 1405 1406 # XXX Should key_file and cert_file be deprecated in favour of context? 1407 1408 def __init__(self, host, port=None, key_file=None, cert_file=None, 1409 timeout=socket._GLOBAL_DEFAULT_TIMEOUT, 1410 source_address=None, *, context=None, 1411 check_hostname=None, blocksize=8192): 1412 super(HTTPSConnection, self).__init__(host, port, timeout, 1413 source_address, 1414 blocksize=blocksize) 1415 if (key_file is not None or cert_file is not None or 1416 check_hostname is not None): 1417 import warnings 1418 warnings.warn("key_file, cert_file and check_hostname are " 1419 "deprecated, use a custom context instead.", 1420 DeprecationWarning, 2) 1421 self.key_file = key_file 1422 self.cert_file = cert_file 1423 if context is None: 1424 context = ssl._create_default_https_context() 1425 # enable PHA for TLS 1.3 connections if available 1426 if context.post_handshake_auth is not None: 1427 context.post_handshake_auth = True 1428 will_verify = context.verify_mode != ssl.CERT_NONE 1429 if check_hostname is None: 1430 check_hostname = context.check_hostname 1431 if check_hostname and not will_verify: 1432 raise ValueError("check_hostname needs a SSL context with " 1433 "either CERT_OPTIONAL or CERT_REQUIRED") 1434 if key_file or cert_file: 1435 context.load_cert_chain(cert_file, key_file) 1436 # cert and key file means the user wants to authenticate. 1437 # enable TLS 1.3 PHA implicitly even for custom contexts. 1438 if context.post_handshake_auth is not None: 1439 context.post_handshake_auth = True 1440 self._context = context 1441 if check_hostname is not None: 1442 self._context.check_hostname = check_hostname 1443 1444 def connect(self): 1445 "Connect to a host on a given (SSL) port." 1446 1447 super().connect() 1448 1449 if self._tunnel_host: 1450 server_hostname = self._tunnel_host 1451 else: 1452 server_hostname = self.host 1453 1454 self.sock = self._context.wrap_socket(self.sock, 1455 server_hostname=server_hostname) 1456 1457 __all__.append("HTTPSConnection") 1458 1459class HTTPException(Exception): 1460 # Subclasses that define an __init__ must call Exception.__init__ 1461 # or define self.args. Otherwise, str() will fail. 1462 pass 1463 1464class NotConnected(HTTPException): 1465 pass 1466 1467class InvalidURL(HTTPException): 1468 pass 1469 1470class UnknownProtocol(HTTPException): 1471 def __init__(self, version): 1472 self.args = version, 1473 self.version = version 1474 1475class UnknownTransferEncoding(HTTPException): 1476 pass 1477 1478class UnimplementedFileMode(HTTPException): 1479 pass 1480 1481class IncompleteRead(HTTPException): 1482 def __init__(self, partial, expected=None): 1483 self.args = partial, 1484 self.partial = partial 1485 self.expected = expected 1486 def __repr__(self): 1487 if self.expected is not None: 1488 e = ', %i more expected' % self.expected 1489 else: 1490 e = '' 1491 return '%s(%i bytes read%s)' % (self.__class__.__name__, 1492 len(self.partial), e) 1493 __str__ = object.__str__ 1494 1495class ImproperConnectionState(HTTPException): 1496 pass 1497 1498class CannotSendRequest(ImproperConnectionState): 1499 pass 1500 1501class CannotSendHeader(ImproperConnectionState): 1502 pass 1503 1504class ResponseNotReady(ImproperConnectionState): 1505 pass 1506 1507class BadStatusLine(HTTPException): 1508 def __init__(self, line): 1509 if not line: 1510 line = repr(line) 1511 self.args = line, 1512 self.line = line 1513 1514class LineTooLong(HTTPException): 1515 def __init__(self, line_type): 1516 HTTPException.__init__(self, "got more than %d bytes when reading %s" 1517 % (_MAXLINE, line_type)) 1518 1519class RemoteDisconnected(ConnectionResetError, BadStatusLine): 1520 def __init__(self, *pos, **kw): 1521 BadStatusLine.__init__(self, "") 1522 ConnectionResetError.__init__(self, *pos, **kw) 1523 1524# for backwards compatibility 1525error = HTTPException 1526