1try: 2 STRING_TYPES = (str, unicode) 3except NameError: #pragma NO COVER Python >= 3.0 4 STRING_TYPES = (str,) 5 6try: 7 u = unicode 8except NameError: #pragma NO COVER Python >= 3.0 9 u = str 10 b = bytes 11else: #pragma NO COVER Python < 3.0 12 b = str 13 14import base64 15if 'decodebytes' in base64.__dict__: #pragma NO COVER Python >= 3.0 16 decodebytes = base64.decodebytes 17 encodebytes = base64.encodebytes 18 def decodestring(value): 19 return base64.decodebytes(bytes(value, 'ascii')).decode('ascii') 20 def encodestring(value): 21 return base64.encodebytes(bytes(value, 'ascii')).decode('ascii') 22else: #pragma NO COVER Python < 3.0 23 decodebytes = base64.decodestring 24 encodebytes = base64.encodestring 25 decodestring = base64.decodestring 26 encodestring = base64.encodestring 27 28try: 29 from urllib.parse import parse_qs 30except ImportError: #pragma NO COVER Python < 3.0 31 from cgi import parse_qs 32 from cgi import parse_qsl 33else: #pragma NO COVER Python >= 3.0 34 from urllib.parse import parse_qsl 35 36try: 37 import ConfigParser 38except ImportError: #pragma NO COVER Python >= 3.0 39 from configparser import ConfigParser 40 from configparser import ParsingError 41else: #pragma NO COVER Python < 3.0 42 from ConfigParser import SafeConfigParser as ConfigParser 43 from ConfigParser import ParsingError 44 45try: 46 from Cookie import SimpleCookie 47except ImportError: #pragma NO COVER Python >= 3.0 48 from http.cookies import SimpleCookie 49 from http.cookies import CookieError 50else: #pragma NO COVER Python < 3.0 51 from Cookie import CookieError 52 53try: 54 from itertools import izip_longest 55except ImportError: #pragma NO COVER Python >= 3.0 56 from itertools import zip_longest as izip_longest 57 58try: 59 from StringIO import StringIO 60except ImportError: #pragma NO COVER Python >= 3.0 61 from io import StringIO 62 63try: 64 from urllib import urlencode 65except ImportError: #pragma NO COVER Python >= 3.0 66 from urllib.parse import urlencode 67 from urllib.parse import quote as url_quote 68 from urllib.parse import unquote as url_unquote 69else: #pragma NO COVER Python < 3.0 70 from urllib import quote as url_quote 71 from urllib import unquote as url_unquote 72 73try: 74 from urlparse import urlparse 75except ImportError: #pragma NO COVER Python >= 3.0 76 from urllib.parse import urlparse 77 from urllib.parse import urlunparse 78else: #pragma NO COVER Python < 3.0 79 from urlparse import urlunparse 80 81import wsgiref.util 82import wsgiref.headers 83 84def REQUEST_METHOD(environ): 85 return environ['REQUEST_METHOD'] 86 87def CONTENT_TYPE(environ): 88 return environ.get('CONTENT_TYPE', '') 89 90def USER_AGENT(environ): 91 return environ.get('HTTP_USER_AGENT') 92 93def AUTHORIZATION(environ): 94 return environ.get('HTTP_AUTHORIZATION', '') 95 96def get_cookies(environ): 97 header = environ.get('HTTP_COOKIE', '') 98 if 'paste.cookies' in environ: 99 cookies, check_header = environ['paste.cookies'] 100 if check_header == header: 101 return cookies 102 cookies = SimpleCookie() 103 try: 104 cookies.load(header) 105 except CookieError: #pragma NO COVER (can't see how to provoke this) 106 pass 107 environ['paste.cookies'] = (cookies, header) 108 return cookies 109 110def construct_url(environ): 111 return wsgiref.util.request_uri(environ) 112 113def header_value(environ, key): 114 headers = wsgiref.headers.Headers(environ) 115 values = headers.get(key) 116 if not values: 117 return "" 118 if isinstance(values, list): #pragma NO COVER can't be true under Py3k. 119 return ",".join(values) 120 else: 121 return values 122 123def must_decode(value): 124 if type(value) is b: 125 try: 126 return value.decode('utf-8') 127 except UnicodeDecodeError: 128 return value.decode('latin1') 129 return value 130 131def must_encode(value): 132 if type(value) is u: 133 return value.encode('utf-8') 134 return value 135