1#!/usr/bin/env python
2
3"""
4Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
5See the file 'LICENSE' for copying permission
6"""
7
8import re
9import socket
10
11from lib.core.common import getSafeExString
12from lib.core.common import popValue
13from lib.core.common import pushValue
14from lib.core.common import readInput
15from lib.core.common import urlencode
16from lib.core.convert import getUnicode
17from lib.core.data import conf
18from lib.core.data import kb
19from lib.core.data import logger
20from lib.core.decorators import stackedmethod
21from lib.core.enums import CUSTOM_LOGGING
22from lib.core.enums import HTTP_HEADER
23from lib.core.enums import REDIRECTION
24from lib.core.exception import SqlmapBaseException
25from lib.core.exception import SqlmapConnectionException
26from lib.core.exception import SqlmapUserQuitException
27from lib.core.settings import BING_REGEX
28from lib.core.settings import DUCKDUCKGO_REGEX
29from lib.core.settings import DUMMY_SEARCH_USER_AGENT
30from lib.core.settings import GOOGLE_REGEX
31from lib.core.settings import HTTP_ACCEPT_ENCODING_HEADER_VALUE
32from lib.core.settings import UNICODE_ENCODING
33from lib.request.basic import decodePage
34from thirdparty.six.moves import http_client as _http_client
35from thirdparty.six.moves import urllib as _urllib
36from thirdparty.socks import socks
37
38def _search(dork):
39    """
40    This method performs the effective search on Google providing
41    the google dork and the Google session cookie
42    """
43
44    if not dork:
45        return None
46
47    page = None
48    data = None
49    requestHeaders = {}
50    responseHeaders = {}
51
52    requestHeaders[HTTP_HEADER.USER_AGENT] = dict(conf.httpHeaders).get(HTTP_HEADER.USER_AGENT, DUMMY_SEARCH_USER_AGENT)
53    requestHeaders[HTTP_HEADER.ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE
54
55    try:
56        req = _urllib.request.Request("https://www.google.com/ncr", headers=requestHeaders)
57        conn = _urllib.request.urlopen(req)
58    except Exception as ex:
59        errMsg = "unable to connect to Google ('%s')" % getSafeExString(ex)
60        raise SqlmapConnectionException(errMsg)
61
62    gpage = conf.googlePage if conf.googlePage > 1 else 1
63    logger.info("using search result page #%d" % gpage)
64
65    url = "https://www.google.com/search?"
66    url += "q=%s&" % urlencode(dork, convall=True)
67    url += "num=100&hl=en&complete=0&safe=off&filter=0&btnG=Search"
68    url += "&start=%d" % ((gpage - 1) * 100)
69
70    try:
71        req = _urllib.request.Request(url, headers=requestHeaders)
72        conn = _urllib.request.urlopen(req)
73
74        requestMsg = "HTTP request:\nGET %s" % url
75        requestMsg += " %s" % _http_client.HTTPConnection._http_vsn_str
76        logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)
77
78        page = conn.read()
79        code = conn.code
80        status = conn.msg
81        responseHeaders = conn.info()
82
83        responseMsg = "HTTP response (%s - %d):\n" % (status, code)
84
85        if conf.verbose <= 4:
86            responseMsg += getUnicode(responseHeaders, UNICODE_ENCODING)
87        elif conf.verbose > 4:
88            responseMsg += "%s\n%s\n" % (responseHeaders, page)
89
90        logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
91    except _urllib.error.HTTPError as ex:
92        try:
93            page = ex.read()
94            responseHeaders = ex.info()
95        except Exception as _:
96            warnMsg = "problem occurred while trying to get "
97            warnMsg += "an error page information (%s)" % getSafeExString(_)
98            logger.critical(warnMsg)
99            return None
100    except (_urllib.error.URLError, _http_client.error, socket.error, socket.timeout, socks.ProxyError):
101        errMsg = "unable to connect to Google"
102        raise SqlmapConnectionException(errMsg)
103
104    page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE))
105
106    retVal = [_urllib.parse.unquote(match.group(1) or match.group(2)) for match in re.finditer(GOOGLE_REGEX, page, re.I)]
107
108    if not retVal and "detected unusual traffic" in page:
109        warnMsg = "Google has detected 'unusual' traffic from "
110        warnMsg += "used IP address disabling further searches"
111
112        if conf.proxyList:
113            raise SqlmapBaseException(warnMsg)
114        else:
115            logger.critical(warnMsg)
116
117    if not retVal:
118        message = "no usable links found. What do you want to do?"
119        message += "\n[1] (re)try with DuckDuckGo (default)"
120        message += "\n[2] (re)try with Bing"
121        message += "\n[3] quit"
122        choice = readInput(message, default='1')
123
124        if choice == '3':
125            raise SqlmapUserQuitException
126        elif choice == '2':
127            url = "https://www.bing.com/search?q=%s&first=%d" % (urlencode(dork, convall=True), (gpage - 1) * 10 + 1)
128            regex = BING_REGEX
129        else:
130            url = "https://duckduckgo.com/html/"
131            data = "q=%s&s=%d" % (urlencode(dork, convall=True), (gpage - 1) * 30)
132            regex = DUCKDUCKGO_REGEX
133
134        try:
135            req = _urllib.request.Request(url, data=data, headers=requestHeaders)
136            conn = _urllib.request.urlopen(req)
137
138            requestMsg = "HTTP request:\nGET %s" % url
139            requestMsg += " %s" % _http_client.HTTPConnection._http_vsn_str
140            logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)
141
142            page = conn.read()
143            code = conn.code
144            status = conn.msg
145            responseHeaders = conn.info()
146            page = decodePage(page, responseHeaders.get("Content-Encoding"), responseHeaders.get("Content-Type"))
147
148            responseMsg = "HTTP response (%s - %d):\n" % (status, code)
149
150            if conf.verbose <= 4:
151                responseMsg += getUnicode(responseHeaders, UNICODE_ENCODING)
152            elif conf.verbose > 4:
153                responseMsg += "%s\n%s\n" % (responseHeaders, page)
154
155            logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
156        except _urllib.error.HTTPError as ex:
157            try:
158                page = ex.read()
159                page = decodePage(page, ex.headers.get("Content-Encoding"), ex.headers.get("Content-Type"))
160            except socket.timeout:
161                warnMsg = "connection timed out while trying "
162                warnMsg += "to get error page information (%d)" % ex.code
163                logger.critical(warnMsg)
164                return None
165        except:
166            errMsg = "unable to connect"
167            raise SqlmapConnectionException(errMsg)
168
169        retVal = [_urllib.parse.unquote(match.group(1).replace("&amp;", "&")) for match in re.finditer(regex, page, re.I | re.S)]
170
171        if not retVal and "issue with the Tor Exit Node you are currently using" in page:
172            warnMsg = "DuckDuckGo has detected 'unusual' traffic from "
173            warnMsg += "used (Tor) IP address"
174
175            if conf.proxyList:
176                raise SqlmapBaseException(warnMsg)
177            else:
178                logger.critical(warnMsg)
179
180    return retVal
181
182@stackedmethod
183def search(dork):
184    pushValue(kb.redirectChoice)
185    kb.redirectChoice = REDIRECTION.YES
186
187    try:
188        return _search(dork)
189    except SqlmapBaseException as ex:
190        if conf.proxyList:
191            logger.critical(getSafeExString(ex))
192
193            warnMsg = "changing proxy"
194            logger.warn(warnMsg)
195
196            conf.proxy = None
197
198            setHTTPHandlers()
199            return search(dork)
200        else:
201            raise
202    finally:
203        kb.redirectChoice = popValue()
204
205def setHTTPHandlers():  # Cross-referenced function
206    raise NotImplementedError
207