1 r"""HTTP/1.1 client library
2
3 <intro stuff goes here>
4 <other stuff, too>
5
6 HTTPConnection goes through a number of "states", which define when a client
7 may legally make another request or fetch the response for a particular
8 request. This diagram details these state transitions:
9
10 (null)
11 |
12 | HTTPConnection()
13 v
14 Idle
15 |
16 | putrequest()
17 v
18 Request-started
19 |
20 | ( putheader() )* endheaders()
21 v
22 Request-sent
23 |\_____________________________
24 | | getresponse() raises
25 | response = getresponse() | ConnectionError
26 v v
27 Unread-response Idle
28 [Response-headers-read]
29 |\____________________
30 | |
31 | response.read() | putrequest()
32 v v
33 Idle Req-started-unread-response
34 ______/|
35 / |
36 response.read() | | ( putheader() )* endheaders()
37 v v
38 Request-started Req-sent-unread-response
39 |
40 | response.read()
41 v
42 Request-sent
43
44 This diagram presents the following rules:
45 -- a second request may not be started until {response-headers-read}
46 -- a response [object] cannot be retrieved until {request-sent}
47 -- there is no differentiation between an unread response body and a
48 partially read response body
49
50 Note: this enforcement is applied by the HTTPConnection class. The
51 HTTPResponse class does not enforce this state machine, which
52 implies sophisticated clients may accelerate the request/response
53 pipeline. Caution should be taken, though: accelerating the states
54 beyond the above pattern may imply knowledge of the server's
55 connection-close behavior for certain requests. For example, it
56 is impossible to tell whether the server will close the connection
57 UNTIL the response headers have been read; this means that further
58 requests cannot be placed into the pipeline until it is known that
59 the server will NOT be closing the connection.
60
61 Logical State __state __response
62 ------------- ------- ----------
63 Idle _CS_IDLE None
64 Request-started _CS_REQ_STARTED None
65 Request-sent _CS_REQ_SENT None
66 Unread-response _CS_IDLE <response_class>
67 Req-started-unread-response _CS_REQ_STARTED <response_class>
68 Req-sent-unread-response _CS_REQ_SENT <response_class>
69 """
70
71 import email.parser
72 import email.message
73 import errno
74 import http
75 import io
76 import re
77 import socket
78 import sys
79 import collections.abc
80 from urllib.parse import urlsplit
81
82 # HTTPMessage, parse_headers(), and the HTTP status code constants are
83 # intentionally omitted for simplicity
84 __all__ = ["HTTPResponse", "HTTPConnection",
85 "HTTPException", "NotConnected", "UnknownProtocol",
86 "UnknownTransferEncoding", "UnimplementedFileMode",
87 "IncompleteRead", "InvalidURL", "ImproperConnectionState",
88 "CannotSendRequest", "CannotSendHeader", "ResponseNotReady",
89 "BadStatusLine", "LineTooLong", "RemoteDisconnected", "error",
90 "responses"]
91
92 HTTP_PORT = 80
93 HTTPS_PORT = 443
94
95 _UNKNOWN = 'UNKNOWN'
96
97 # connection states
98 _CS_IDLE = 'Idle'
99 _CS_REQ_STARTED = 'Request-started'
100 _CS_REQ_SENT = 'Request-sent'
101
102
103 # hack to maintain backwards compatibility
104 globals().update(http.HTTPStatus.__members__)
105
106 # another hack to maintain backwards compatibility
107 # Mapping status codes to official W3C names
108 responses = {v: v.phrase for v in http.HTTPStatus.__members__.values()}
109
110 # maximal line length when calling readline().
111 _MAXLINE = 65536
112 _MAXHEADERS = 100
113
114 # Header name/value ABNF (http://tools.ietf.org/html/rfc7230#section-3.2)
115 #
116 # VCHAR = %x21-7E
117 # obs-text = %x80-FF
118 # header-field = field-name ":" OWS field-value OWS
119 # field-name = token
120 # field-value = *( field-content / obs-fold )
121 # field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ]
122 # field-vchar = VCHAR / obs-text
123 #
124 # obs-fold = CRLF 1*( SP / HTAB )
125 # ; obsolete line folding
126 # ; see Section 3.2.4
127
128 # token = 1*tchar
129 #
130 # tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*"
131 # / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~"
132 # / DIGIT / ALPHA
133 # ; any VCHAR, except delimiters
134 #
135 # VCHAR defined in http://tools.ietf.org/html/rfc5234#appendix-B.1
136
137 # the patterns for both name and value are more lenient than RFC
138 # definitions to allow for backwards compatibility
139 _is_legal_header_name = re.compile(rb'[^:\s][^:\r\n]*').fullmatch
140 _is_illegal_header_value = re.compile(rb'\n(?![ \t])|\r(?![ \t\n])').search
141
142 # These characters are not allowed within HTTP URL paths.
143 # See https://tools.ietf.org/html/rfc3986#section-3.3 and the
144 # https://tools.ietf.org/html/rfc3986#appendix-A pchar definition.
145 # Prevents CVE-2019-9740. Includes control characters such as \r\n.
146 # We don't restrict chars above \x7f as putrequest() limits us to ASCII.
147 _contains_disallowed_url_pchar_re = re.compile('[\x00-\x20\x7f]')
148 # Arguably only these _should_ allowed:
149 # _is_allowed_url_pchars_re = re.compile(r"^[/!$&'()*+,;=:@%a-zA-Z0-9._~-]+$")
150 # We are more lenient for assumed real world compatibility purposes.
151
152 # These characters are not allowed within HTTP method names
153 # to prevent http header injection.
154 _contains_disallowed_method_pchar_re = re.compile('[\x00-\x1f]')
155
156 # We always set the Content-Length header for these methods because some
157 # servers will otherwise respond with a 411
158 _METHODS_EXPECTING_BODY = {'PATCH', 'POST', 'PUT'}
159
160
161 def _encode(data, name='data'):
162 """Call data.encode("latin-1") but show a better error message."""
163 try:
164 return data.encode("latin-1")
165 except UnicodeEncodeError as err:
166 raise UnicodeEncodeError(
167 err.encoding,
168 err.object,
169 err.start,
170 err.end,
171 "%s (%.20r) is not valid Latin-1. Use %s.encode('utf-8') "
172 "if you want to send it encoded in UTF-8." %
173 (name.title(), data[err.start:err.end], name)) from None
174
175 def _strip_ipv6_iface(enc_name: bytes) -> bytes:
176 """Remove interface scope from IPv6 address."""
177 enc_name, percent, _ = enc_name.partition(b"%")
178 if percent:
179 assert enc_name.startswith(b'['), enc_name
180 enc_name += b']'
181 return enc_name
182
183 class ESC[4;38;5;81mHTTPMessage(ESC[4;38;5;149memailESC[4;38;5;149m.ESC[4;38;5;149mmessageESC[4;38;5;149m.ESC[4;38;5;149mMessage):
184 # XXX The only usage of this method is in
185 # http.server.CGIHTTPRequestHandler. Maybe move the code there so
186 # that it doesn't need to be part of the public API. The API has
187 # never been defined so this could cause backwards compatibility
188 # issues.
189
190 def getallmatchingheaders(self, name):
191 """Find all header lines matching a given header name.
192
193 Look through the list of headers and find all lines matching a given
194 header name (and their continuation lines). A list of the lines is
195 returned, without interpretation. If the header does not occur, an
196 empty list is returned. If the header occurs multiple times, all
197 occurrences are returned. Case is not important in the header name.
198
199 """
200 name = name.lower() + ':'
201 n = len(name)
202 lst = []
203 hit = 0
204 for line in self.keys():
205 if line[:n].lower() == name:
206 hit = 1
207 elif not line[:1].isspace():
208 hit = 0
209 if hit:
210 lst.append(line)
211 return lst
212
213 def _read_headers(fp):
214 """Reads potential header lines into a list from a file pointer.
215
216 Length of line is limited by _MAXLINE, and number of
217 headers is limited by _MAXHEADERS.
218 """
219 headers = []
220 while True:
221 line = fp.readline(_MAXLINE + 1)
222 if len(line) > _MAXLINE:
223 raise LineTooLong("header line")
224 headers.append(line)
225 if len(headers) > _MAXHEADERS:
226 raise HTTPException("got more than %d headers" % _MAXHEADERS)
227 if line in (b'\r\n', b'\n', b''):
228 break
229 return headers
230
231 def parse_headers(fp, _class=HTTPMessage):
232 """Parses only RFC2822 headers from a file pointer.
233
234 email Parser wants to see strings rather than bytes.
235 But a TextIOWrapper around self.rfile would buffer too many bytes
236 from the stream, bytes which we later need to read as bytes.
237 So we read the correct bytes here, as bytes, for email Parser
238 to parse.
239
240 """
241 headers = _read_headers(fp)
242 hstring = b''.join(headers).decode('iso-8859-1')
243 return email.parser.Parser(_class=_class).parsestr(hstring)
244
245
246 class ESC[4;38;5;81mHTTPResponse(ESC[4;38;5;149mioESC[4;38;5;149m.ESC[4;38;5;149mBufferedIOBase):
247
248 # See RFC 2616 sec 19.6 and RFC 1945 sec 6 for details.
249
250 # The bytes from the socket object are iso-8859-1 strings.
251 # See RFC 2616 sec 2.2 which notes an exception for MIME-encoded
252 # text following RFC 2047. The basic status line parsing only
253 # accepts iso-8859-1.
254
255 def __init__(self, sock, debuglevel=0, method=None, url=None):
256 # If the response includes a content-length header, we need to
257 # make sure that the client doesn't read more than the
258 # specified number of bytes. If it does, it will block until
259 # the server times out and closes the connection. This will
260 # happen if a self.fp.read() is done (without a size) whether
261 # self.fp is buffered or not. So, no self.fp.read() by
262 # clients unless they know what they are doing.
263 self.fp = sock.makefile("rb")
264 self.debuglevel = debuglevel
265 self._method = method
266
267 # The HTTPResponse object is returned via urllib. The clients
268 # of http and urllib expect different attributes for the
269 # headers. headers is used here and supports urllib. msg is
270 # provided as a backwards compatibility layer for http
271 # clients.
272
273 self.headers = self.msg = None
274
275 # from the Status-Line of the response
276 self.version = _UNKNOWN # HTTP-Version
277 self.status = _UNKNOWN # Status-Code
278 self.reason = _UNKNOWN # Reason-Phrase
279
280 self.chunked = _UNKNOWN # is "chunked" being used?
281 self.chunk_left = _UNKNOWN # bytes left to read in current chunk
282 self.length = _UNKNOWN # number of bytes left in response
283 self.will_close = _UNKNOWN # conn will close at end of response
284
285 def _read_status(self):
286 line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
287 if len(line) > _MAXLINE:
288 raise LineTooLong("status line")
289 if self.debuglevel > 0:
290 print("reply:", repr(line))
291 if not line:
292 # Presumably, the server closed the connection before
293 # sending a valid response.
294 raise RemoteDisconnected("Remote end closed connection without"
295 " response")
296 try:
297 version, status, reason = line.split(None, 2)
298 except ValueError:
299 try:
300 version, status = line.split(None, 1)
301 reason = ""
302 except ValueError:
303 # empty version will cause next test to fail.
304 version = ""
305 if not version.startswith("HTTP/"):
306 self._close_conn()
307 raise BadStatusLine(line)
308
309 # The status code is a three-digit number
310 try:
311 status = int(status)
312 if status < 100 or status > 999:
313 raise BadStatusLine(line)
314 except ValueError:
315 raise BadStatusLine(line)
316 return version, status, reason
317
318 def begin(self):
319 if self.headers is not None:
320 # we've already started reading the response
321 return
322
323 # read until we get a non-100 response
324 while True:
325 version, status, reason = self._read_status()
326 if status != CONTINUE:
327 break
328 # skip the header from the 100 response
329 skipped_headers = _read_headers(self.fp)
330 if self.debuglevel > 0:
331 print("headers:", skipped_headers)
332 del skipped_headers
333
334 self.code = self.status = status
335 self.reason = reason.strip()
336 if version in ("HTTP/1.0", "HTTP/0.9"):
337 # Some servers might still return "0.9", treat it as 1.0 anyway
338 self.version = 10
339 elif version.startswith("HTTP/1."):
340 self.version = 11 # use HTTP/1.1 code for HTTP/1.x where x>=1
341 else:
342 raise UnknownProtocol(version)
343
344 self.headers = self.msg = parse_headers(self.fp)
345
346 if self.debuglevel > 0:
347 for hdr, val in self.headers.items():
348 print("header:", hdr + ":", val)
349
350 # are we using the chunked-style of transfer encoding?
351 tr_enc = self.headers.get("transfer-encoding")
352 if tr_enc and tr_enc.lower() == "chunked":
353 self.chunked = True
354 self.chunk_left = None
355 else:
356 self.chunked = False
357
358 # will the connection close at the end of the response?
359 self.will_close = self._check_close()
360
361 # do we have a Content-Length?
362 # NOTE: RFC 2616, S4.4, #3 says we ignore this if tr_enc is "chunked"
363 self.length = None
364 length = self.headers.get("content-length")
365 if length and not self.chunked:
366 try:
367 self.length = int(length)
368 except ValueError:
369 self.length = None
370 else:
371 if self.length < 0: # ignore nonsensical negative lengths
372 self.length = None
373 else:
374 self.length = None
375
376 # does the body have a fixed length? (of zero)
377 if (status == NO_CONTENT or status == NOT_MODIFIED or
378 100 <= status < 200 or # 1xx codes
379 self._method == "HEAD"):
380 self.length = 0
381
382 # if the connection remains open, and we aren't using chunked, and
383 # a content-length was not provided, then assume that the connection
384 # WILL close.
385 if (not self.will_close and
386 not self.chunked and
387 self.length is None):
388 self.will_close = True
389
390 def _check_close(self):
391 conn = self.headers.get("connection")
392 if self.version == 11:
393 # An HTTP/1.1 proxy is assumed to stay open unless
394 # explicitly closed.
395 if conn and "close" in conn.lower():
396 return True
397 return False
398
399 # Some HTTP/1.0 implementations have support for persistent
400 # connections, using rules different than HTTP/1.1.
401
402 # For older HTTP, Keep-Alive indicates persistent connection.
403 if self.headers.get("keep-alive"):
404 return False
405
406 # At least Akamai returns a "Connection: Keep-Alive" header,
407 # which was supposed to be sent by the client.
408 if conn and "keep-alive" in conn.lower():
409 return False
410
411 # Proxy-Connection is a netscape hack.
412 pconn = self.headers.get("proxy-connection")
413 if pconn and "keep-alive" in pconn.lower():
414 return False
415
416 # otherwise, assume it will close
417 return True
418
419 def _close_conn(self):
420 fp = self.fp
421 self.fp = None
422 fp.close()
423
424 def close(self):
425 try:
426 super().close() # set "closed" flag
427 finally:
428 if self.fp:
429 self._close_conn()
430
431 # These implementations are for the benefit of io.BufferedReader.
432
433 # XXX This class should probably be revised to act more like
434 # the "raw stream" that BufferedReader expects.
435
436 def flush(self):
437 super().flush()
438 if self.fp:
439 self.fp.flush()
440
441 def readable(self):
442 """Always returns True"""
443 return True
444
445 # End of "raw stream" methods
446
447 def isclosed(self):
448 """True if the connection is closed."""
449 # NOTE: it is possible that we will not ever call self.close(). This
450 # case occurs when will_close is TRUE, length is None, and we
451 # read up to the last byte, but NOT past it.
452 #
453 # IMPLIES: if will_close is FALSE, then self.close() will ALWAYS be
454 # called, meaning self.isclosed() is meaningful.
455 return self.fp is None
456
457 def read(self, amt=None):
458 """Read and return the response body, or up to the next amt bytes."""
459 if self.fp is None:
460 return b""
461
462 if self._method == "HEAD":
463 self._close_conn()
464 return b""
465
466 if self.chunked:
467 return self._read_chunked(amt)
468
469 if amt is not None:
470 if self.length is not None and amt > self.length:
471 # clip the read to the "end of response"
472 amt = self.length
473 s = self.fp.read(amt)
474 if not s and amt:
475 # Ideally, we would raise IncompleteRead if the content-length
476 # wasn't satisfied, but it might break compatibility.
477 self._close_conn()
478 elif self.length is not None:
479 self.length -= len(s)
480 if not self.length:
481 self._close_conn()
482 return s
483 else:
484 # Amount is not given (unbounded read) so we must check self.length
485 if self.length is None:
486 s = self.fp.read()
487 else:
488 try:
489 s = self._safe_read(self.length)
490 except IncompleteRead:
491 self._close_conn()
492 raise
493 self.length = 0
494 self._close_conn() # we read everything
495 return s
496
497 def readinto(self, b):
498 """Read up to len(b) bytes into bytearray b and return the number
499 of bytes read.
500 """
501
502 if self.fp is None:
503 return 0
504
505 if self._method == "HEAD":
506 self._close_conn()
507 return 0
508
509 if self.chunked:
510 return self._readinto_chunked(b)
511
512 if self.length is not None:
513 if len(b) > self.length:
514 # clip the read to the "end of response"
515 b = memoryview(b)[0:self.length]
516
517 # we do not use _safe_read() here because this may be a .will_close
518 # connection, and the user is reading more bytes than will be provided
519 # (for example, reading in 1k chunks)
520 n = self.fp.readinto(b)
521 if not n and b:
522 # Ideally, we would raise IncompleteRead if the content-length
523 # wasn't satisfied, but it might break compatibility.
524 self._close_conn()
525 elif self.length is not None:
526 self.length -= n
527 if not self.length:
528 self._close_conn()
529 return n
530
531 def _read_next_chunk_size(self):
532 # Read the next chunk size from the file
533 line = self.fp.readline(_MAXLINE + 1)
534 if len(line) > _MAXLINE:
535 raise LineTooLong("chunk size")
536 i = line.find(b";")
537 if i >= 0:
538 line = line[:i] # strip chunk-extensions
539 try:
540 return int(line, 16)
541 except ValueError:
542 # close the connection as protocol synchronisation is
543 # probably lost
544 self._close_conn()
545 raise
546
547 def _read_and_discard_trailer(self):
548 # read and discard trailer up to the CRLF terminator
549 ### note: we shouldn't have any trailers!
550 while True:
551 line = self.fp.readline(_MAXLINE + 1)
552 if len(line) > _MAXLINE:
553 raise LineTooLong("trailer line")
554 if not line:
555 # a vanishingly small number of sites EOF without
556 # sending the trailer
557 break
558 if line in (b'\r\n', b'\n', b''):
559 break
560
561 def _get_chunk_left(self):
562 # return self.chunk_left, reading a new chunk if necessary.
563 # chunk_left == 0: at the end of the current chunk, need to close it
564 # chunk_left == None: No current chunk, should read next.
565 # This function returns non-zero or None if the last chunk has
566 # been read.
567 chunk_left = self.chunk_left
568 if not chunk_left: # Can be 0 or None
569 if chunk_left is not None:
570 # We are at the end of chunk, discard chunk end
571 self._safe_read(2) # toss the CRLF at the end of the chunk
572 try:
573 chunk_left = self._read_next_chunk_size()
574 except ValueError:
575 raise IncompleteRead(b'')
576 if chunk_left == 0:
577 # last chunk: 1*("0") [ chunk-extension ] CRLF
578 self._read_and_discard_trailer()
579 # we read everything; close the "file"
580 self._close_conn()
581 chunk_left = None
582 self.chunk_left = chunk_left
583 return chunk_left
584
585 def _read_chunked(self, amt=None):
586 assert self.chunked != _UNKNOWN
587 value = []
588 try:
589 while True:
590 chunk_left = self._get_chunk_left()
591 if chunk_left is None:
592 break
593
594 if amt is not None and amt <= chunk_left:
595 value.append(self._safe_read(amt))
596 self.chunk_left = chunk_left - amt
597 break
598
599 value.append(self._safe_read(chunk_left))
600 if amt is not None:
601 amt -= chunk_left
602 self.chunk_left = 0
603 return b''.join(value)
604 except IncompleteRead as exc:
605 raise IncompleteRead(b''.join(value)) from exc
606
607 def _readinto_chunked(self, b):
608 assert self.chunked != _UNKNOWN
609 total_bytes = 0
610 mvb = memoryview(b)
611 try:
612 while True:
613 chunk_left = self._get_chunk_left()
614 if chunk_left is None:
615 return total_bytes
616
617 if len(mvb) <= chunk_left:
618 n = self._safe_readinto(mvb)
619 self.chunk_left = chunk_left - n
620 return total_bytes + n
621
622 temp_mvb = mvb[:chunk_left]
623 n = self._safe_readinto(temp_mvb)
624 mvb = mvb[n:]
625 total_bytes += n
626 self.chunk_left = 0
627
628 except IncompleteRead:
629 raise IncompleteRead(bytes(b[0:total_bytes]))
630
631 def _safe_read(self, amt):
632 """Read the number of bytes requested.
633
634 This function should be used when <amt> bytes "should" be present for
635 reading. If the bytes are truly not available (due to EOF), then the
636 IncompleteRead exception can be used to detect the problem.
637 """
638 data = self.fp.read(amt)
639 if len(data) < amt:
640 raise IncompleteRead(data, amt-len(data))
641 return data
642
643 def _safe_readinto(self, b):
644 """Same as _safe_read, but for reading into a buffer."""
645 amt = len(b)
646 n = self.fp.readinto(b)
647 if n < amt:
648 raise IncompleteRead(bytes(b[:n]), amt-n)
649 return n
650
651 def read1(self, n=-1):
652 """Read with at most one underlying system call. If at least one
653 byte is buffered, return that instead.
654 """
655 if self.fp is None or self._method == "HEAD":
656 return b""
657 if self.chunked:
658 return self._read1_chunked(n)
659 if self.length is not None and (n < 0 or n > self.length):
660 n = self.length
661 result = self.fp.read1(n)
662 if not result and n:
663 self._close_conn()
664 elif self.length is not None:
665 self.length -= len(result)
666 return result
667
668 def peek(self, n=-1):
669 # Having this enables IOBase.readline() to read more than one
670 # byte at a time
671 if self.fp is None or self._method == "HEAD":
672 return b""
673 if self.chunked:
674 return self._peek_chunked(n)
675 return self.fp.peek(n)
676
677 def readline(self, limit=-1):
678 if self.fp is None or self._method == "HEAD":
679 return b""
680 if self.chunked:
681 # Fallback to IOBase readline which uses peek() and read()
682 return super().readline(limit)
683 if self.length is not None and (limit < 0 or limit > self.length):
684 limit = self.length
685 result = self.fp.readline(limit)
686 if not result and limit:
687 self._close_conn()
688 elif self.length is not None:
689 self.length -= len(result)
690 return result
691
692 def _read1_chunked(self, n):
693 # Strictly speaking, _get_chunk_left() may cause more than one read,
694 # but that is ok, since that is to satisfy the chunked protocol.
695 chunk_left = self._get_chunk_left()
696 if chunk_left is None or n == 0:
697 return b''
698 if not (0 <= n <= chunk_left):
699 n = chunk_left # if n is negative or larger than chunk_left
700 read = self.fp.read1(n)
701 self.chunk_left -= len(read)
702 if not read:
703 raise IncompleteRead(b"")
704 return read
705
706 def _peek_chunked(self, n):
707 # Strictly speaking, _get_chunk_left() may cause more than one read,
708 # but that is ok, since that is to satisfy the chunked protocol.
709 try:
710 chunk_left = self._get_chunk_left()
711 except IncompleteRead:
712 return b'' # peek doesn't worry about protocol
713 if chunk_left is None:
714 return b'' # eof
715 # peek is allowed to return more than requested. Just request the
716 # entire chunk, and truncate what we get.
717 return self.fp.peek(chunk_left)[:chunk_left]
718
719 def fileno(self):
720 return self.fp.fileno()
721
722 def getheader(self, name, default=None):
723 '''Returns the value of the header matching *name*.
724
725 If there are multiple matching headers, the values are
726 combined into a single string separated by commas and spaces.
727
728 If no matching header is found, returns *default* or None if
729 the *default* is not specified.
730
731 If the headers are unknown, raises http.client.ResponseNotReady.
732
733 '''
734 if self.headers is None:
735 raise ResponseNotReady()
736 headers = self.headers.get_all(name) or default
737 if isinstance(headers, str) or not hasattr(headers, '__iter__'):
738 return headers
739 else:
740 return ', '.join(headers)
741
742 def getheaders(self):
743 """Return list of (header, value) tuples."""
744 if self.headers is None:
745 raise ResponseNotReady()
746 return list(self.headers.items())
747
748 # We override IOBase.__iter__ so that it doesn't check for closed-ness
749
750 def __iter__(self):
751 return self
752
753 # For compatibility with old-style urllib responses.
754
755 def info(self):
756 '''Returns an instance of the class mimetools.Message containing
757 meta-information associated with the URL.
758
759 When the method is HTTP, these headers are those returned by
760 the server at the head of the retrieved HTML page (including
761 Content-Length and Content-Type).
762
763 When the method is FTP, a Content-Length header will be
764 present if (as is now usual) the server passed back a file
765 length in response to the FTP retrieval request. A
766 Content-Type header will be present if the MIME type can be
767 guessed.
768
769 When the method is local-file, returned headers will include
770 a Date representing the file's last-modified time, a
771 Content-Length giving file size, and a Content-Type
772 containing a guess at the file's type. See also the
773 description of the mimetools module.
774
775 '''
776 return self.headers
777
778 def geturl(self):
779 '''Return the real URL of the page.
780
781 In some cases, the HTTP server redirects a client to another
782 URL. The urlopen() function handles this transparently, but in
783 some cases the caller needs to know which URL the client was
784 redirected to. The geturl() method can be used to get at this
785 redirected URL.
786
787 '''
788 return self.url
789
790 def getcode(self):
791 '''Return the HTTP status code that was sent with the response,
792 or None if the URL is not an HTTP URL.
793
794 '''
795 return self.status
796
797 class ESC[4;38;5;81mHTTPConnection:
798
799 _http_vsn = 11
800 _http_vsn_str = 'HTTP/1.1'
801
802 response_class = HTTPResponse
803 default_port = HTTP_PORT
804 auto_open = 1
805 debuglevel = 0
806
807 @staticmethod
808 def _is_textIO(stream):
809 """Test whether a file-like object is a text or a binary stream.
810 """
811 return isinstance(stream, io.TextIOBase)
812
813 @staticmethod
814 def _get_content_length(body, method):
815 """Get the content-length based on the body.
816
817 If the body is None, we set Content-Length: 0 for methods that expect
818 a body (RFC 7230, Section 3.3.2). We also set the Content-Length for
819 any method if the body is a str or bytes-like object and not a file.
820 """
821 if body is None:
822 # do an explicit check for not None here to distinguish
823 # between unset and set but empty
824 if method.upper() in _METHODS_EXPECTING_BODY:
825 return 0
826 else:
827 return None
828
829 if hasattr(body, 'read'):
830 # file-like object.
831 return None
832
833 try:
834 # does it implement the buffer protocol (bytes, bytearray, array)?
835 mv = memoryview(body)
836 return mv.nbytes
837 except TypeError:
838 pass
839
840 if isinstance(body, str):
841 return len(body)
842
843 return None
844
845 def __init__(self, host, port=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
846 source_address=None, blocksize=8192):
847 self.timeout = timeout
848 self.source_address = source_address
849 self.blocksize = blocksize
850 self.sock = None
851 self._buffer = []
852 self.__response = None
853 self.__state = _CS_IDLE
854 self._method = None
855 self._tunnel_host = None
856 self._tunnel_port = None
857 self._tunnel_headers = {}
858
859 (self.host, self.port) = self._get_hostport(host, port)
860
861 self._validate_host(self.host)
862
863 # This is stored as an instance variable to allow unit
864 # tests to replace it with a suitable mockup
865 self._create_connection = socket.create_connection
866
867 def set_tunnel(self, host, port=None, headers=None):
868 """Set up host and port for HTTP CONNECT tunnelling.
869
870 In a connection that uses HTTP CONNECT tunneling, the host passed to the
871 constructor is used as a proxy server that relays all communication to
872 the endpoint passed to `set_tunnel`. This done by sending an HTTP
873 CONNECT request to the proxy server when the connection is established.
874
875 This method must be called before the HTTP connection has been
876 established.
877
878 The headers argument should be a mapping of extra HTTP headers to send
879 with the CONNECT request.
880 """
881
882 if self.sock:
883 raise RuntimeError("Can't set up tunnel for established connection")
884
885 self._tunnel_host, self._tunnel_port = self._get_hostport(host, port)
886 if headers:
887 self._tunnel_headers = headers
888 else:
889 self._tunnel_headers.clear()
890
891 def _get_hostport(self, host, port):
892 if port is None:
893 i = host.rfind(':')
894 j = host.rfind(']') # ipv6 addresses have [...]
895 if i > j:
896 try:
897 port = int(host[i+1:])
898 except ValueError:
899 if host[i+1:] == "": # http://foo.com:/ == http://foo.com/
900 port = self.default_port
901 else:
902 raise InvalidURL("nonnumeric port: '%s'" % host[i+1:])
903 host = host[:i]
904 else:
905 port = self.default_port
906 if host and host[0] == '[' and host[-1] == ']':
907 host = host[1:-1]
908
909 return (host, port)
910
911 def set_debuglevel(self, level):
912 self.debuglevel = level
913
914 def _tunnel(self):
915 connect = b"CONNECT %s:%d HTTP/1.0\r\n" % (
916 self._tunnel_host.encode("ascii"), self._tunnel_port)
917 headers = [connect]
918 for header, value in self._tunnel_headers.items():
919 headers.append(f"{header}: {value}\r\n".encode("latin-1"))
920 headers.append(b"\r\n")
921 # Making a single send() call instead of one per line encourages
922 # the host OS to use a more optimal packet size instead of
923 # potentially emitting a series of small packets.
924 self.send(b"".join(headers))
925 del headers
926
927 response = self.response_class(self.sock, method=self._method)
928 try:
929 (version, code, message) = response._read_status()
930
931 if code != http.HTTPStatus.OK:
932 self.close()
933 raise OSError(f"Tunnel connection failed: {code} {message.strip()}")
934 while True:
935 line = response.fp.readline(_MAXLINE + 1)
936 if len(line) > _MAXLINE:
937 raise LineTooLong("header line")
938 if not line:
939 # for sites which EOF without sending a trailer
940 break
941 if line in (b'\r\n', b'\n', b''):
942 break
943
944 if self.debuglevel > 0:
945 print('header:', line.decode())
946 finally:
947 response.close()
948
949 def connect(self):
950 """Connect to the host and port specified in __init__."""
951 sys.audit("http.client.connect", self, self.host, self.port)
952 self.sock = self._create_connection(
953 (self.host,self.port), self.timeout, self.source_address)
954 # Might fail in OSs that don't implement TCP_NODELAY
955 try:
956 self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
957 except OSError as e:
958 if e.errno != errno.ENOPROTOOPT:
959 raise
960
961 if self._tunnel_host:
962 self._tunnel()
963
964 def close(self):
965 """Close the connection to the HTTP server."""
966 self.__state = _CS_IDLE
967 try:
968 sock = self.sock
969 if sock:
970 self.sock = None
971 sock.close() # close it manually... there may be other refs
972 finally:
973 response = self.__response
974 if response:
975 self.__response = None
976 response.close()
977
978 def send(self, data):
979 """Send `data' to the server.
980 ``data`` can be a string object, a bytes object, an array object, a
981 file-like object that supports a .read() method, or an iterable object.
982 """
983
984 if self.sock is None:
985 if self.auto_open:
986 self.connect()
987 else:
988 raise NotConnected()
989
990 if self.debuglevel > 0:
991 print("send:", repr(data))
992 if hasattr(data, "read") :
993 if self.debuglevel > 0:
994 print("sending a readable")
995 encode = self._is_textIO(data)
996 if encode and self.debuglevel > 0:
997 print("encoding file using iso-8859-1")
998 while 1:
999 datablock = data.read(self.blocksize)
1000 if not datablock:
1001 break
1002 if encode:
1003 datablock = datablock.encode("iso-8859-1")
1004 sys.audit("http.client.send", self, datablock)
1005 self.sock.sendall(datablock)
1006 return
1007 sys.audit("http.client.send", self, data)
1008 try:
1009 self.sock.sendall(data)
1010 except TypeError:
1011 if isinstance(data, collections.abc.Iterable):
1012 for d in data:
1013 self.sock.sendall(d)
1014 else:
1015 raise TypeError("data should be a bytes-like object "
1016 "or an iterable, got %r" % type(data))
1017
1018 def _output(self, s):
1019 """Add a line of output to the current request buffer.
1020
1021 Assumes that the line does *not* end with \\r\\n.
1022 """
1023 self._buffer.append(s)
1024
1025 def _read_readable(self, readable):
1026 if self.debuglevel > 0:
1027 print("reading a readable")
1028 encode = self._is_textIO(readable)
1029 if encode and self.debuglevel > 0:
1030 print("encoding file using iso-8859-1")
1031 while True:
1032 datablock = readable.read(self.blocksize)
1033 if not datablock:
1034 break
1035 if encode:
1036 datablock = datablock.encode("iso-8859-1")
1037 yield datablock
1038
1039 def _send_output(self, message_body=None, encode_chunked=False):
1040 """Send the currently buffered request and clear the buffer.
1041
1042 Appends an extra \\r\\n to the buffer.
1043 A message_body may be specified, to be appended to the request.
1044 """
1045 self._buffer.extend((b"", b""))
1046 msg = b"\r\n".join(self._buffer)
1047 del self._buffer[:]
1048 self.send(msg)
1049
1050 if message_body is not None:
1051
1052 # create a consistent interface to message_body
1053 if hasattr(message_body, 'read'):
1054 # Let file-like take precedence over byte-like. This
1055 # is needed to allow the current position of mmap'ed
1056 # files to be taken into account.
1057 chunks = self._read_readable(message_body)
1058 else:
1059 try:
1060 # this is solely to check to see if message_body
1061 # implements the buffer API. it /would/ be easier
1062 # to capture if PyObject_CheckBuffer was exposed
1063 # to Python.
1064 memoryview(message_body)
1065 except TypeError:
1066 try:
1067 chunks = iter(message_body)
1068 except TypeError:
1069 raise TypeError("message_body should be a bytes-like "
1070 "object or an iterable, got %r"
1071 % type(message_body))
1072 else:
1073 # the object implements the buffer interface and
1074 # can be passed directly into socket methods
1075 chunks = (message_body,)
1076
1077 for chunk in chunks:
1078 if not chunk:
1079 if self.debuglevel > 0:
1080 print('Zero length chunk ignored')
1081 continue
1082
1083 if encode_chunked and self._http_vsn == 11:
1084 # chunked encoding
1085 chunk = f'{len(chunk):X}\r\n'.encode('ascii') + chunk \
1086 + b'\r\n'
1087 self.send(chunk)
1088
1089 if encode_chunked and self._http_vsn == 11:
1090 # end chunked transfer
1091 self.send(b'0\r\n\r\n')
1092
1093 def putrequest(self, method, url, skip_host=False,
1094 skip_accept_encoding=False):
1095 """Send a request to the server.
1096
1097 `method' specifies an HTTP request method, e.g. 'GET'.
1098 `url' specifies the object being requested, e.g. '/index.html'.
1099 `skip_host' if True does not add automatically a 'Host:' header
1100 `skip_accept_encoding' if True does not add automatically an
1101 'Accept-Encoding:' header
1102 """
1103
1104 # if a prior response has been completed, then forget about it.
1105 if self.__response and self.__response.isclosed():
1106 self.__response = None
1107
1108
1109 # in certain cases, we cannot issue another request on this connection.
1110 # this occurs when:
1111 # 1) we are in the process of sending a request. (_CS_REQ_STARTED)
1112 # 2) a response to a previous request has signalled that it is going
1113 # to close the connection upon completion.
1114 # 3) the headers for the previous response have not been read, thus
1115 # we cannot determine whether point (2) is true. (_CS_REQ_SENT)
1116 #
1117 # if there is no prior response, then we can request at will.
1118 #
1119 # if point (2) is true, then we will have passed the socket to the
1120 # response (effectively meaning, "there is no prior response"), and
1121 # will open a new one when a new request is made.
1122 #
1123 # Note: if a prior response exists, then we *can* start a new request.
1124 # We are not allowed to begin fetching the response to this new
1125 # request, however, until that prior response is complete.
1126 #
1127 if self.__state == _CS_IDLE:
1128 self.__state = _CS_REQ_STARTED
1129 else:
1130 raise CannotSendRequest(self.__state)
1131
1132 self._validate_method(method)
1133
1134 # Save the method for use later in the response phase
1135 self._method = method
1136
1137 url = url or '/'
1138 self._validate_path(url)
1139
1140 request = '%s %s %s' % (method, url, self._http_vsn_str)
1141
1142 self._output(self._encode_request(request))
1143
1144 if self._http_vsn == 11:
1145 # Issue some standard headers for better HTTP/1.1 compliance
1146
1147 if not skip_host:
1148 # this header is issued *only* for HTTP/1.1
1149 # connections. more specifically, this means it is
1150 # only issued when the client uses the new
1151 # HTTPConnection() class. backwards-compat clients
1152 # will be using HTTP/1.0 and those clients may be
1153 # issuing this header themselves. we should NOT issue
1154 # it twice; some web servers (such as Apache) barf
1155 # when they see two Host: headers
1156
1157 # If we need a non-standard port,include it in the
1158 # header. If the request is going through a proxy,
1159 # but the host of the actual URL, not the host of the
1160 # proxy.
1161
1162 netloc = ''
1163 if url.startswith('http'):
1164 nil, netloc, nil, nil, nil = urlsplit(url)
1165
1166 if netloc:
1167 try:
1168 netloc_enc = netloc.encode("ascii")
1169 except UnicodeEncodeError:
1170 netloc_enc = netloc.encode("idna")
1171 self.putheader('Host', _strip_ipv6_iface(netloc_enc))
1172 else:
1173 if self._tunnel_host:
1174 host = self._tunnel_host
1175 port = self._tunnel_port
1176 else:
1177 host = self.host
1178 port = self.port
1179
1180 try:
1181 host_enc = host.encode("ascii")
1182 except UnicodeEncodeError:
1183 host_enc = host.encode("idna")
1184
1185 # As per RFC 273, IPv6 address should be wrapped with []
1186 # when used as Host header
1187
1188 if ":" in host:
1189 host_enc = b'[' + host_enc + b']'
1190 host_enc = _strip_ipv6_iface(host_enc)
1191
1192 if port == self.default_port:
1193 self.putheader('Host', host_enc)
1194 else:
1195 host_enc = host_enc.decode("ascii")
1196 self.putheader('Host', "%s:%s" % (host_enc, port))
1197
1198 # note: we are assuming that clients will not attempt to set these
1199 # headers since *this* library must deal with the
1200 # consequences. this also means that when the supporting
1201 # libraries are updated to recognize other forms, then this
1202 # code should be changed (removed or updated).
1203
1204 # we only want a Content-Encoding of "identity" since we don't
1205 # support encodings such as x-gzip or x-deflate.
1206 if not skip_accept_encoding:
1207 self.putheader('Accept-Encoding', 'identity')
1208
1209 # we can accept "chunked" Transfer-Encodings, but no others
1210 # NOTE: no TE header implies *only* "chunked"
1211 #self.putheader('TE', 'chunked')
1212
1213 # if TE is supplied in the header, then it must appear in a
1214 # Connection header.
1215 #self.putheader('Connection', 'TE')
1216
1217 else:
1218 # For HTTP/1.0, the server will assume "not chunked"
1219 pass
1220
1221 def _encode_request(self, request):
1222 # ASCII also helps prevent CVE-2019-9740.
1223 return request.encode('ascii')
1224
1225 def _validate_method(self, method):
1226 """Validate a method name for putrequest."""
1227 # prevent http header injection
1228 match = _contains_disallowed_method_pchar_re.search(method)
1229 if match:
1230 raise ValueError(
1231 f"method can't contain control characters. {method!r} "
1232 f"(found at least {match.group()!r})")
1233
1234 def _validate_path(self, url):
1235 """Validate a url for putrequest."""
1236 # Prevent CVE-2019-9740.
1237 match = _contains_disallowed_url_pchar_re.search(url)
1238 if match:
1239 raise InvalidURL(f"URL can't contain control characters. {url!r} "
1240 f"(found at least {match.group()!r})")
1241
1242 def _validate_host(self, host):
1243 """Validate a host so it doesn't contain control characters."""
1244 # Prevent CVE-2019-18348.
1245 match = _contains_disallowed_url_pchar_re.search(host)
1246 if match:
1247 raise InvalidURL(f"URL can't contain control characters. {host!r} "
1248 f"(found at least {match.group()!r})")
1249
1250 def putheader(self, header, *values):
1251 """Send a request header line to the server.
1252
1253 For example: h.putheader('Accept', 'text/html')
1254 """
1255 if self.__state != _CS_REQ_STARTED:
1256 raise CannotSendHeader()
1257
1258 if hasattr(header, 'encode'):
1259 header = header.encode('ascii')
1260
1261 if not _is_legal_header_name(header):
1262 raise ValueError('Invalid header name %r' % (header,))
1263
1264 values = list(values)
1265 for i, one_value in enumerate(values):
1266 if hasattr(one_value, 'encode'):
1267 values[i] = one_value.encode('latin-1')
1268 elif isinstance(one_value, int):
1269 values[i] = str(one_value).encode('ascii')
1270
1271 if _is_illegal_header_value(values[i]):
1272 raise ValueError('Invalid header value %r' % (values[i],))
1273
1274 value = b'\r\n\t'.join(values)
1275 header = header + b': ' + value
1276 self._output(header)
1277
1278 def endheaders(self, message_body=None, *, encode_chunked=False):
1279 """Indicate that the last header line has been sent to the server.
1280
1281 This method sends the request to the server. The optional message_body
1282 argument can be used to pass a message body associated with the
1283 request.
1284 """
1285 if self.__state == _CS_REQ_STARTED:
1286 self.__state = _CS_REQ_SENT
1287 else:
1288 raise CannotSendHeader()
1289 self._send_output(message_body, encode_chunked=encode_chunked)
1290
1291 def request(self, method, url, body=None, headers={}, *,
1292 encode_chunked=False):
1293 """Send a complete request to the server."""
1294 self._send_request(method, url, body, headers, encode_chunked)
1295
1296 def _send_request(self, method, url, body, headers, encode_chunked):
1297 # Honor explicitly requested Host: and Accept-Encoding: headers.
1298 header_names = frozenset(k.lower() for k in headers)
1299 skips = {}
1300 if 'host' in header_names:
1301 skips['skip_host'] = 1
1302 if 'accept-encoding' in header_names:
1303 skips['skip_accept_encoding'] = 1
1304
1305 self.putrequest(method, url, **skips)
1306
1307 # chunked encoding will happen if HTTP/1.1 is used and either
1308 # the caller passes encode_chunked=True or the following
1309 # conditions hold:
1310 # 1. content-length has not been explicitly set
1311 # 2. the body is a file or iterable, but not a str or bytes-like
1312 # 3. Transfer-Encoding has NOT been explicitly set by the caller
1313
1314 if 'content-length' not in header_names:
1315 # only chunk body if not explicitly set for backwards
1316 # compatibility, assuming the client code is already handling the
1317 # chunking
1318 if 'transfer-encoding' not in header_names:
1319 # if content-length cannot be automatically determined, fall
1320 # back to chunked encoding
1321 encode_chunked = False
1322 content_length = self._get_content_length(body, method)
1323 if content_length is None:
1324 if body is not None:
1325 if self.debuglevel > 0:
1326 print('Unable to determine size of %r' % body)
1327 encode_chunked = True
1328 self.putheader('Transfer-Encoding', 'chunked')
1329 else:
1330 self.putheader('Content-Length', str(content_length))
1331 else:
1332 encode_chunked = False
1333
1334 for hdr, value in headers.items():
1335 self.putheader(hdr, value)
1336 if isinstance(body, str):
1337 # RFC 2616 Section 3.7.1 says that text default has a
1338 # default charset of iso-8859-1.
1339 body = _encode(body, 'body')
1340 self.endheaders(body, encode_chunked=encode_chunked)
1341
1342 def getresponse(self):
1343 """Get the response from the server.
1344
1345 If the HTTPConnection is in the correct state, returns an
1346 instance of HTTPResponse or of whatever object is returned by
1347 the response_class variable.
1348
1349 If a request has not been sent or if a previous response has
1350 not be handled, ResponseNotReady is raised. If the HTTP
1351 response indicates that the connection should be closed, then
1352 it will be closed before the response is returned. When the
1353 connection is closed, the underlying socket is closed.
1354 """
1355
1356 # if a prior response has been completed, then forget about it.
1357 if self.__response and self.__response.isclosed():
1358 self.__response = None
1359
1360 # if a prior response exists, then it must be completed (otherwise, we
1361 # cannot read this response's header to determine the connection-close
1362 # behavior)
1363 #
1364 # note: if a prior response existed, but was connection-close, then the
1365 # socket and response were made independent of this HTTPConnection
1366 # object since a new request requires that we open a whole new
1367 # connection
1368 #
1369 # this means the prior response had one of two states:
1370 # 1) will_close: this connection was reset and the prior socket and
1371 # response operate independently
1372 # 2) persistent: the response was retained and we await its
1373 # isclosed() status to become true.
1374 #
1375 if self.__state != _CS_REQ_SENT or self.__response:
1376 raise ResponseNotReady(self.__state)
1377
1378 if self.debuglevel > 0:
1379 response = self.response_class(self.sock, self.debuglevel,
1380 method=self._method)
1381 else:
1382 response = self.response_class(self.sock, method=self._method)
1383
1384 try:
1385 try:
1386 response.begin()
1387 except ConnectionError:
1388 self.close()
1389 raise
1390 assert response.will_close != _UNKNOWN
1391 self.__state = _CS_IDLE
1392
1393 if response.will_close:
1394 # this effectively passes the connection to the response
1395 self.close()
1396 else:
1397 # remember this, so we can tell when it is complete
1398 self.__response = response
1399
1400 return response
1401 except:
1402 response.close()
1403 raise
1404
1405 try:
1406 import ssl
1407 except ImportError:
1408 pass
1409 else:
1410 class ESC[4;38;5;81mHTTPSConnection(ESC[4;38;5;149mHTTPConnection):
1411 "This class allows communication via SSL."
1412
1413 default_port = HTTPS_PORT
1414
1415 # XXX Should key_file and cert_file be deprecated in favour of context?
1416
1417 def __init__(self, host, port=None, key_file=None, cert_file=None,
1418 timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
1419 source_address=None, *, context=None,
1420 check_hostname=None, blocksize=8192):
1421 super(HTTPSConnection, self).__init__(host, port, timeout,
1422 source_address,
1423 blocksize=blocksize)
1424 if (key_file is not None or cert_file is not None or
1425 check_hostname is not None):
1426 import warnings
1427 warnings.warn("key_file, cert_file and check_hostname are "
1428 "deprecated, use a custom context instead.",
1429 DeprecationWarning, 2)
1430 self.key_file = key_file
1431 self.cert_file = cert_file
1432 if context is None:
1433 context = ssl._create_default_https_context()
1434 # send ALPN extension to indicate HTTP/1.1 protocol
1435 if self._http_vsn == 11:
1436 context.set_alpn_protocols(['http/1.1'])
1437 # enable PHA for TLS 1.3 connections if available
1438 if context.post_handshake_auth is not None:
1439 context.post_handshake_auth = True
1440 will_verify = context.verify_mode != ssl.CERT_NONE
1441 if check_hostname is None:
1442 check_hostname = context.check_hostname
1443 if check_hostname and not will_verify:
1444 raise ValueError("check_hostname needs a SSL context with "
1445 "either CERT_OPTIONAL or CERT_REQUIRED")
1446 if key_file or cert_file:
1447 context.load_cert_chain(cert_file, key_file)
1448 # cert and key file means the user wants to authenticate.
1449 # enable TLS 1.3 PHA implicitly even for custom contexts.
1450 if context.post_handshake_auth is not None:
1451 context.post_handshake_auth = True
1452 self._context = context
1453 if check_hostname is not None:
1454 self._context.check_hostname = check_hostname
1455
1456 def connect(self):
1457 "Connect to a host on a given (SSL) port."
1458
1459 super().connect()
1460
1461 if self._tunnel_host:
1462 server_hostname = self._tunnel_host
1463 else:
1464 server_hostname = self.host
1465
1466 self.sock = self._context.wrap_socket(self.sock,
1467 server_hostname=server_hostname)
1468
1469 __all__.append("HTTPSConnection")
1470
1471 class ESC[4;38;5;81mHTTPException(ESC[4;38;5;149mException):
1472 # Subclasses that define an __init__ must call Exception.__init__
1473 # or define self.args. Otherwise, str() will fail.
1474 pass
1475
1476 class ESC[4;38;5;81mNotConnected(ESC[4;38;5;149mHTTPException):
1477 pass
1478
1479 class ESC[4;38;5;81mInvalidURL(ESC[4;38;5;149mHTTPException):
1480 pass
1481
1482 class ESC[4;38;5;81mUnknownProtocol(ESC[4;38;5;149mHTTPException):
1483 def __init__(self, version):
1484 self.args = version,
1485 self.version = version
1486
1487 class ESC[4;38;5;81mUnknownTransferEncoding(ESC[4;38;5;149mHTTPException):
1488 pass
1489
1490 class ESC[4;38;5;81mUnimplementedFileMode(ESC[4;38;5;149mHTTPException):
1491 pass
1492
1493 class ESC[4;38;5;81mIncompleteRead(ESC[4;38;5;149mHTTPException):
1494 def __init__(self, partial, expected=None):
1495 self.args = partial,
1496 self.partial = partial
1497 self.expected = expected
1498 def __repr__(self):
1499 if self.expected is not None:
1500 e = ', %i more expected' % self.expected
1501 else:
1502 e = ''
1503 return '%s(%i bytes read%s)' % (self.__class__.__name__,
1504 len(self.partial), e)
1505 __str__ = object.__str__
1506
1507 class ESC[4;38;5;81mImproperConnectionState(ESC[4;38;5;149mHTTPException):
1508 pass
1509
1510 class ESC[4;38;5;81mCannotSendRequest(ESC[4;38;5;149mImproperConnectionState):
1511 pass
1512
1513 class ESC[4;38;5;81mCannotSendHeader(ESC[4;38;5;149mImproperConnectionState):
1514 pass
1515
1516 class ESC[4;38;5;81mResponseNotReady(ESC[4;38;5;149mImproperConnectionState):
1517 pass
1518
1519 class ESC[4;38;5;81mBadStatusLine(ESC[4;38;5;149mHTTPException):
1520 def __init__(self, line):
1521 if not line:
1522 line = repr(line)
1523 self.args = line,
1524 self.line = line
1525
1526 class ESC[4;38;5;81mLineTooLong(ESC[4;38;5;149mHTTPException):
1527 def __init__(self, line_type):
1528 HTTPException.__init__(self, "got more than %d bytes when reading %s"
1529 % (_MAXLINE, line_type))
1530
1531 class ESC[4;38;5;81mRemoteDisconnected(ESC[4;38;5;149mConnectionResetError, ESC[4;38;5;149mBadStatusLine):
1532 def __init__(self, *pos, **kw):
1533 BadStatusLine.__init__(self, "")
1534 ConnectionResetError.__init__(self, *pos, **kw)
1535
1536 # for backwards compatibility
1537 error = HTTPException