Added "I'm feeling lucky" function (#46)
* Putting '! ' at the beginning of the query now redirects to the first search result Signed-off-by: Paul Rothrock <paul@movetoiceland.com> * Moved get_first_url outside of filter class Signed-off-by: Paul Rothrock <paul@movetoiceland.com>main
parent
56bf976ecd
commit
0e39b8f97b
|
@ -13,6 +13,40 @@ BLANK_B64 = '''
|
|||
data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAQAAAAnOwc2AAAAD0lEQVR42mNkwAIYh7IgAAVVAAuInjI5AAAAAElFTkSuQmCC
|
||||
'''
|
||||
|
||||
def get_first_link(soup):
|
||||
# Replace hrefs with only the intended destination (no "utm" type tags)
|
||||
for a in soup.find_all('a', href=True):
|
||||
href = a['href'].replace('https://www.google.com', '')
|
||||
|
||||
result_link = urlparse.urlparse(href)
|
||||
query_link = parse_qs(result_link.query)['q'][0] if '?q=' in href else ''
|
||||
|
||||
# Return the first search result URL
|
||||
if 'url?q=' in href:
|
||||
return filter_link_args(href)
|
||||
|
||||
def filter_link_args(query_link):
|
||||
parsed_link = urlparse.urlparse(query_link)
|
||||
link_args = parse_qs(parsed_link.query)
|
||||
safe_args = {}
|
||||
|
||||
if len(link_args) == 0 and len(parsed_link) > 0:
|
||||
return query_link
|
||||
|
||||
for arg in link_args.keys():
|
||||
if arg in SKIP_ARGS:
|
||||
continue
|
||||
|
||||
safe_args[arg] = link_args[arg]
|
||||
|
||||
# Remove original link query and replace with filtered args
|
||||
query_link = query_link.replace(parsed_link.query, '')
|
||||
if len(safe_args) > 0:
|
||||
query_link = query_link + urlparse.urlencode(safe_args, doseq=True)
|
||||
else:
|
||||
query_link = query_link.replace('?', '')
|
||||
|
||||
return query_link
|
||||
|
||||
class Filter:
|
||||
def __init__(self, mobile=False, config=None, secret_key=''):
|
||||
|
@ -149,27 +183,7 @@ class Filter:
|
|||
a['href'] = new_search
|
||||
elif 'url?q=' in href:
|
||||
# Strip unneeded arguments
|
||||
parsed_link = urlparse.urlparse(query_link)
|
||||
link_args = parse_qs(parsed_link.query)
|
||||
safe_args = {}
|
||||
|
||||
if len(link_args) == 0 and len(parsed_link) > 0:
|
||||
a['href'] = query_link
|
||||
continue
|
||||
|
||||
for arg in link_args.keys():
|
||||
if arg in SKIP_ARGS:
|
||||
continue
|
||||
|
||||
safe_args[arg] = link_args[arg]
|
||||
|
||||
# Remove original link query and replace with filtered args
|
||||
query_link = query_link.replace(parsed_link.query, '')
|
||||
if len(safe_args) > 0:
|
||||
query_link = query_link + urlparse.urlencode(safe_args, doseq=True)
|
||||
else:
|
||||
query_link = query_link.replace('?', '')
|
||||
|
||||
query_link = filter_link_args(query_link)
|
||||
a['href'] = query_link
|
||||
|
||||
# Add no-js option
|
||||
|
@ -185,4 +199,4 @@ def gen_nojs(soup, link, sibling):
|
|||
nojs_link['style'] = 'display:block;width:100%;'
|
||||
nojs_link.string = 'NoJS Link: ' + nojs_link['href']
|
||||
sibling.append(BeautifulSoup('<br><hr><br>', 'html.parser'))
|
||||
sibling.append(nojs_link)
|
||||
sibling.append(nojs_link)
|
|
@ -1,5 +1,5 @@
|
|||
from app import app
|
||||
from app.filter import Filter
|
||||
from app.filter import Filter, get_first_link
|
||||
from app.models.config import Config
|
||||
from app.request import Request, gen_query
|
||||
import argparse
|
||||
|
@ -72,7 +72,7 @@ def opensearch():
|
|||
def search():
|
||||
request_params = request.args if request.method == 'GET' else request.form
|
||||
q = request_params.get('q')
|
||||
|
||||
|
||||
if q is None or len(q) == 0:
|
||||
return redirect('/')
|
||||
else:
|
||||
|
@ -82,6 +82,11 @@ def search():
|
|||
except InvalidToken:
|
||||
pass
|
||||
|
||||
feeling_lucky = q.startswith('! ')
|
||||
|
||||
if feeling_lucky: # Well do you, punk?
|
||||
q = q[2:]
|
||||
|
||||
user_agent = request.headers.get('User-Agent')
|
||||
mobile = 'Android' in user_agent or 'iPhone' in user_agent
|
||||
|
||||
|
@ -90,7 +95,15 @@ def search():
|
|||
get_body = g.user_request.send(query=full_query)
|
||||
|
||||
results = content_filter.reskin(get_body)
|
||||
formatted_results = content_filter.clean(BeautifulSoup(results, 'html.parser'))
|
||||
dirty_soup = BeautifulSoup(results, 'html.parser')
|
||||
|
||||
if feeling_lucky:
|
||||
redirect_url = get_first_link(dirty_soup)
|
||||
return redirect(redirect_url, 303) # Using 303 so the browser performs a GET request for the URL
|
||||
else:
|
||||
formatted_results = content_filter.clean(dirty_soup)
|
||||
|
||||
|
||||
|
||||
return render_template('display.html', query=urlparse.unquote(q), response=formatted_results)
|
||||
|
||||
|
|
|
@ -17,6 +17,10 @@ def test_search(client):
|
|||
rv = client.get('/search?q=test')
|
||||
assert rv._status_code == 200
|
||||
|
||||
def test_feeling_lucky(client):
|
||||
rv = client.get('/search?q=!%20test')
|
||||
assert rv._status_code == 303
|
||||
|
||||
|
||||
def test_config(client):
|
||||
rv = client.post('/config', data=demo_config)
|
||||
|
|
Loading…
Reference in New Issue