2020-04-24 02:59:43 +00:00
|
|
|
from app import rhyme
|
|
|
|
from io import BytesIO
|
|
|
|
import pycurl
|
|
|
|
import urllib.parse as urlparse
|
|
|
|
|
|
|
|
# Base search url
|
|
|
|
SEARCH_URL = 'https://www.google.com/search?gbv=1&q='
|
|
|
|
|
|
|
|
MOBILE_UA = '{}/5.0 (Android 0; Mobile; rv:54.0) Gecko/54.0 {}/59.0'
|
|
|
|
DESKTOP_UA = '{}/5.0 (X11; {} x86_64; rv:75.0) Gecko/20100101 {}/75.0'
|
|
|
|
|
2020-04-29 00:19:34 +00:00
|
|
|
# Valid query params
|
2020-04-30 00:53:58 +00:00
|
|
|
VALID_PARAMS = ['tbs', 'tbm', 'start', 'near']
|
2020-04-29 00:19:34 +00:00
|
|
|
|
2020-04-24 02:59:43 +00:00
|
|
|
|
|
|
|
def gen_user_agent(normal_ua):
|
|
|
|
is_mobile = 'Android' in normal_ua or 'iPhone' in normal_ua
|
|
|
|
|
|
|
|
mozilla = rhyme.get_rhyme('Mo') + rhyme.get_rhyme('zilla')
|
|
|
|
firefox = rhyme.get_rhyme('Fire') + rhyme.get_rhyme('fox')
|
|
|
|
linux = rhyme.get_rhyme('Lin') + 'ux'
|
|
|
|
|
|
|
|
if is_mobile:
|
|
|
|
return MOBILE_UA.format(mozilla, firefox)
|
|
|
|
else:
|
|
|
|
return DESKTOP_UA.format(mozilla, linux, firefox)
|
|
|
|
|
|
|
|
|
2020-04-29 00:59:33 +00:00
|
|
|
def gen_query(query, args, near_city=None):
|
2020-04-30 00:53:58 +00:00
|
|
|
param_dict = {key: '' for key in VALID_PARAMS}
|
2020-04-24 02:59:43 +00:00
|
|
|
# Use :past(hour/day/week/month/year) if available
|
|
|
|
# example search "new restaurants :past month"
|
2020-04-29 00:59:33 +00:00
|
|
|
if ':past' in query:
|
|
|
|
time_range = str.strip(query.split(':past', 1)[-1])
|
2020-04-30 00:53:58 +00:00
|
|
|
param_dict['tbs'] = '&tbs=qdr:' + str.lower(time_range[0])
|
2020-04-24 02:59:43 +00:00
|
|
|
|
|
|
|
# Ensure search query is parsable
|
2020-04-29 00:59:33 +00:00
|
|
|
query = urlparse.quote(query)
|
2020-04-24 02:59:43 +00:00
|
|
|
|
|
|
|
# Pass along type of results (news, images, books, etc)
|
|
|
|
if 'tbm' in args:
|
2020-04-30 00:53:58 +00:00
|
|
|
param_dict['tbm'] = '&tbm=' + args.get('tbm')
|
2020-04-24 02:59:43 +00:00
|
|
|
|
|
|
|
# Get results page start value (10 per page, ie page 2 start val = 20)
|
|
|
|
if 'start' in args:
|
2020-04-30 00:53:58 +00:00
|
|
|
param_dict['start'] = '&start=' + args.get('start')
|
2020-04-24 02:59:43 +00:00
|
|
|
|
|
|
|
# Search for results near a particular city, if available
|
|
|
|
if near_city is not None:
|
2020-04-30 00:53:58 +00:00
|
|
|
param_dict['near'] = '&near=' + urlparse.quote(near_city)
|
2020-04-24 02:59:43 +00:00
|
|
|
|
2020-04-30 00:53:58 +00:00
|
|
|
for val in param_dict.values():
|
2020-04-29 00:59:33 +00:00
|
|
|
if not val or val is None:
|
|
|
|
continue
|
|
|
|
query += val
|
|
|
|
|
|
|
|
return query
|
2020-04-24 02:59:43 +00:00
|
|
|
|
|
|
|
|
|
|
|
class Request:
|
|
|
|
def __init__(self, normal_ua):
|
|
|
|
self.modified_user_agent = gen_user_agent(normal_ua)
|
|
|
|
|
|
|
|
def __getitem__(self, name):
|
|
|
|
return getattr(self, name)
|
|
|
|
|
2020-04-28 02:21:36 +00:00
|
|
|
def send(self, base_url=SEARCH_URL, query='', return_bytes=False):
|
2020-04-24 02:59:43 +00:00
|
|
|
response_header = []
|
|
|
|
|
|
|
|
b_obj = BytesIO()
|
|
|
|
crl = pycurl.Curl()
|
|
|
|
crl.setopt(crl.URL, base_url + query)
|
|
|
|
crl.setopt(crl.USERAGENT, self.modified_user_agent)
|
|
|
|
crl.setopt(crl.WRITEDATA, b_obj)
|
|
|
|
crl.setopt(crl.HEADERFUNCTION, response_header.append)
|
|
|
|
crl.setopt(pycurl.FOLLOWLOCATION, 1)
|
|
|
|
crl.perform()
|
|
|
|
crl.close()
|
|
|
|
|
2020-04-28 02:21:36 +00:00
|
|
|
if return_bytes:
|
|
|
|
return b_obj.getvalue()
|
|
|
|
else:
|
2020-05-04 01:32:47 +00:00
|
|
|
return b_obj.getvalue().decode('unicode-escape', 'ignore')
|