Fix feeling lucky (#1130)
* Fix feeling lucky, fall through to display results if doesn't work * Allow lucky bang anywhere * Update feeling lucky testmain
parent
fd20135af0
commit
f18bf07ac3
|
@ -144,12 +144,26 @@ def get_first_link(soup: BeautifulSoup) -> str:
|
||||||
str: A str link to the first result
|
str: A str link to the first result
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
first_link = ''
|
||||||
|
orig_details = []
|
||||||
|
|
||||||
|
# Temporarily remove details so we don't grab those links
|
||||||
|
for details in soup.find_all('details'):
|
||||||
|
temp_details = soup.new_tag('removed_details')
|
||||||
|
orig_details.append(details.replace_with(temp_details))
|
||||||
|
|
||||||
# Replace hrefs with only the intended destination (no "utm" type tags)
|
# Replace hrefs with only the intended destination (no "utm" type tags)
|
||||||
for a in soup.find_all('a', href=True):
|
for a in soup.find_all('a', href=True):
|
||||||
# Return the first search result URL
|
# Return the first search result URL
|
||||||
if 'url?q=' in a['href']:
|
if a['href'].startswith('http://') or a['href'].startswith('https://'):
|
||||||
return filter_link_args(a['href'])
|
first_link = a['href']
|
||||||
return ''
|
break
|
||||||
|
|
||||||
|
# Add the details back
|
||||||
|
for orig_detail, details in zip(orig_details, soup.find_all('removed_details')):
|
||||||
|
details.replace_with(orig_detail)
|
||||||
|
|
||||||
|
return first_link
|
||||||
|
|
||||||
|
|
||||||
def get_site_alt(link: str, site_alts: dict = SITE_ALTS) -> str:
|
def get_site_alt(link: str, site_alts: dict = SITE_ALTS) -> str:
|
||||||
|
|
|
@ -102,9 +102,15 @@ class Search:
|
||||||
except InvalidToken:
|
except InvalidToken:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# Strip leading '! ' for "feeling lucky" queries
|
# Strip '!' for "feeling lucky" queries
|
||||||
self.feeling_lucky = q.startswith('! ')
|
if match := re.search("(^|\s)!($|\s)", q):
|
||||||
self.query = q[2:] if self.feeling_lucky else q
|
self.feeling_lucky = True
|
||||||
|
start, end = match.span()
|
||||||
|
self.query = " ".join([seg for seg in [q[:start], q[end:]] if seg])
|
||||||
|
else:
|
||||||
|
self.feeling_lucky = False
|
||||||
|
self.query = q
|
||||||
|
|
||||||
# Check for possible widgets
|
# Check for possible widgets
|
||||||
self.widget = "ip" if re.search("([^a-z0-9]|^)my *[^a-z0-9] *(ip|internet protocol)" +
|
self.widget = "ip" if re.search("([^a-z0-9]|^)my *[^a-z0-9] *(ip|internet protocol)" +
|
||||||
"($|( *[^a-z0-9] *(((addres|address|adres|" +
|
"($|( *[^a-z0-9] *(((addres|address|adres|" +
|
||||||
|
@ -161,10 +167,13 @@ class Search:
|
||||||
if g.user_request.tor_valid:
|
if g.user_request.tor_valid:
|
||||||
html_soup.insert(0, bsoup(TOR_BANNER, 'html.parser'))
|
html_soup.insert(0, bsoup(TOR_BANNER, 'html.parser'))
|
||||||
|
|
||||||
if self.feeling_lucky:
|
|
||||||
return get_first_link(html_soup)
|
|
||||||
else:
|
|
||||||
formatted_results = content_filter.clean(html_soup)
|
formatted_results = content_filter.clean(html_soup)
|
||||||
|
if self.feeling_lucky:
|
||||||
|
if lucky_link := get_first_link(formatted_results):
|
||||||
|
return lucky_link
|
||||||
|
|
||||||
|
# Fall through to regular search if unable to find link
|
||||||
|
self.feeling_lucky = False
|
||||||
|
|
||||||
# Append user config to all search links, if available
|
# Append user config to all search links, if available
|
||||||
param_str = ''.join('&{}={}'.format(k, v)
|
param_str = ''.join('&{}={}'.format(k, v)
|
||||||
|
|
|
@ -17,8 +17,15 @@ def test_search(client):
|
||||||
|
|
||||||
|
|
||||||
def test_feeling_lucky(client):
|
def test_feeling_lucky(client):
|
||||||
rv = client.get(f'/{Endpoint.search}?q=!%20test')
|
# Bang at beginning of query
|
||||||
|
rv = client.get(f'/{Endpoint.search}?q=!%20wikipedia')
|
||||||
assert rv._status_code == 303
|
assert rv._status_code == 303
|
||||||
|
assert rv.headers.get('Location').startswith('https://www.wikipedia.org')
|
||||||
|
|
||||||
|
# Move bang to end of query
|
||||||
|
rv = client.get(f'/{Endpoint.search}?q=github%20!')
|
||||||
|
assert rv._status_code == 303
|
||||||
|
assert rv.headers.get('Location').startswith('https://github.com')
|
||||||
|
|
||||||
|
|
||||||
def test_ddg_bang(client):
|
def test_ddg_bang(client):
|
||||||
|
|
Loading…
Reference in New Issue