Added image proxying, refactored filter class

Images were previously directly fetched from google search results,
which was a potential privacy hazard. All image sources are now modified
to be passed through shoogle's routing first, which will then fetch raw
image data and pass it through to the user.

Filter class was refactored to split the primary clean method into
smaller, more manageable submethods.
main
Ben Busby 2020-04-27 20:21:36 -06:00
parent b0e6167733
commit 4180aedd87
3 changed files with 106 additions and 67 deletions

View File

@ -30,9 +30,11 @@ class Filter:
return page return page
def clean(self, soup): def clean(self, soup):
# Remove all ads def remove_ads():
main_divs = soup.find('div', {'id': 'main'}) main_divs = soup.find('div', {'id': 'main'})
if main_divs is not None: if main_divs is None:
return
result_divs = main_divs.findAll('div', recursive=False) result_divs = main_divs.findAll('div', recursive=False)
# Only ads/sponsored content use classes in the list of result divs # Only ads/sponsored content use classes in the list of result divs
@ -40,6 +42,14 @@ class Filter:
for div in ad_divs: for div in ad_divs:
div.decompose() div.decompose()
def sync_images():
for img in soup.find_all('img'):
if img['src'].startswith('//'):
img['src'] = 'https:' + img['src']
img['src'] = '/tmp?image_url=' + img['src']
def update_styling():
# Remove unnecessary button(s) # Remove unnecessary button(s)
for button in soup.find_all('button'): for button in soup.find_all('button'):
button.decompose() button.decompose()
@ -51,7 +61,8 @@ class Filter:
# Update logo # Update logo
logo = soup.find('a', {'class': 'l'}) logo = soup.find('a', {'class': 'l'})
if logo and self.mobile: if logo and self.mobile:
logo['style'] = 'display:flex; justify-content:center; align-items:center; color:#685e79; font-size:18px;' logo['style'] = 'display:flex; justify-content:center; align-items:center; color:#685e79; ' \
'font-size:18px; '
# Fix search bar length on mobile # Fix search bar length on mobile
try: try:
@ -60,6 +71,13 @@ class Filter:
except AttributeError: except AttributeError:
pass pass
# Set up dark mode if active
if self.dark:
soup.find('html')['style'] = 'scrollbar-color: #333 #111;'
for input_element in soup.findAll('input'):
input_element['style'] = 'color:#fff;'
def update_links():
# Replace hrefs with only the intended destination (no "utm" type tags) # Replace hrefs with only the intended destination (no "utm" type tags)
for a in soup.find_all('a', href=True): for a in soup.find_all('a', href=True):
href = a['href'] href = a['href']
@ -100,12 +118,6 @@ class Filter:
a.append(BeautifulSoup('<br><hr><br>', 'html.parser')) a.append(BeautifulSoup('<br><hr><br>', 'html.parser'))
a.append(nojs_link) a.append(nojs_link)
# Set up dark mode if active
if self.dark:
soup.find('html')['style'] = 'scrollbar-color: #333 #111;'
for input_element in soup.findAll('input'):
input_element['style'] = 'color:#fff;'
# Ensure no extra scripts passed through # Ensure no extra scripts passed through
try: try:
for script in soup('script'): for script in soup('script'):
@ -114,4 +126,8 @@ class Filter:
except Exception: except Exception:
pass pass
remove_ads()
sync_images()
update_styling()
update_links()
return soup return soup

View File

@ -1,5 +1,4 @@
from app import rhyme from app import rhyme
from app.filter import Filter
from io import BytesIO from io import BytesIO
import pycurl import pycurl
import urllib.parse as urlparse import urllib.parse as urlparse
@ -60,7 +59,7 @@ class Request:
def __getitem__(self, name): def __getitem__(self, name):
return getattr(self, name) return getattr(self, name)
def send(self, base_url=SEARCH_URL, query=''): def send(self, base_url=SEARCH_URL, query='', return_bytes=False):
response_header = [] response_header = []
b_obj = BytesIO() b_obj = BytesIO()
@ -73,4 +72,7 @@ class Request:
crl.perform() crl.perform()
crl.close() crl.close()
if return_bytes:
return b_obj.getvalue()
else:
return b_obj.getvalue().decode('utf-8', 'ignore') return b_obj.getvalue().decode('utf-8', 'ignore')

View File

@ -2,7 +2,8 @@ from app import app
from app.filter import Filter from app.filter import Filter
from app.request import Request, gen_query from app.request import Request, gen_query
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
from flask import g, make_response, request, redirect, render_template from flask import g, make_response, request, redirect, render_template, send_file
import io
import json import json
import os import os
import urllib.parse as urlparse import urllib.parse as urlparse
@ -18,6 +19,11 @@ def before_request_func():
g.user_request = Request(request.headers.get('User-Agent')) g.user_request = Request(request.headers.get('User-Agent'))
# @app.after_request
# def after_request(response):
# return response
@app.route('/', methods=['GET']) @app.route('/', methods=['GET'])
def index(): def index():
bg = '#000' if 'dark' in user_config and user_config['dark'] else '#fff' bg = '#000' if 'dark' in user_config and user_config['dark'] else '#fff'
@ -87,6 +93,21 @@ def imgres():
return redirect(request.args.get('imgurl')) return redirect(request.args.get('imgurl'))
@app.route('/tmp')
def tmp():
file_data = g.user_request.send(base_url=request.args.get('image_url'), return_bytes=True)
tmp_mem = io.BytesIO()
tmp_mem.write(file_data)
tmp_mem.seek(0)
return send_file(
tmp_mem,
as_attachment=True,
attachment_filename='tmp.png',
mimetype='image/png'
)
@app.route('/window') @app.route('/window')
def window(): def window():
get_body = g.user_request.send(base_url=request.args.get('location')) get_body = g.user_request.send(base_url=request.args.get('location'))