2020-04-10 23:52:27 +03:00
|
|
|
from app import app, rhyme, filter
|
2020-01-23 09:19:17 +03:00
|
|
|
from bs4 import BeautifulSoup
|
2020-04-10 23:52:27 +03:00
|
|
|
from flask import request, redirect, render_template
|
|
|
|
from io import BytesIO
|
2020-04-05 04:30:53 +03:00
|
|
|
import json
|
2020-01-21 23:26:49 +03:00
|
|
|
import os
|
|
|
|
import pycurl
|
2020-02-22 02:52:29 +03:00
|
|
|
import urllib.parse as urlparse
|
2020-01-21 23:26:49 +03:00
|
|
|
|
2020-04-06 02:59:50 +03:00
|
|
|
APP_ROOT = os.path.dirname(os.path.abspath(__file__))
|
|
|
|
STATIC_FOLDER = os.path.join(APP_ROOT, 'static')
|
|
|
|
|
2020-04-05 04:30:53 +03:00
|
|
|
# Get Mozilla Firefox rhyme (important) and form a new user agent
|
|
|
|
mozilla = rhyme.get_rhyme('Mo') + 'zilla'
|
|
|
|
firefox = rhyme.get_rhyme('Fire') + 'fox'
|
2020-02-22 02:52:29 +03:00
|
|
|
|
2020-04-05 04:30:53 +03:00
|
|
|
MOBILE_UA = mozilla + '/5.0 (Android 4.20; Mobile; rv:54.0) Gecko/54.0 ' + firefox + '/59.0'
|
|
|
|
DESKTOP_UA = mozilla + '/5.0 (Windows NT 6.1; Win64; x64; rv:59.0) Gecko/20100101 Mobile ' + firefox + '/59.0'
|
|
|
|
|
|
|
|
# Base search url
|
2020-02-22 02:52:29 +03:00
|
|
|
SEARCH_URL = 'https://www.google.com/search?gbv=1&q='
|
|
|
|
|
2020-04-06 02:59:50 +03:00
|
|
|
user_config = json.load(open(STATIC_FOLDER + '/config.json'))
|
2020-04-05 04:30:53 +03:00
|
|
|
|
2020-02-22 02:52:29 +03:00
|
|
|
|
|
|
|
def get_ua(user_agent):
|
|
|
|
return MOBILE_UA if ('Android' in user_agent or 'iPhone' in user_agent) else DESKTOP_UA
|
|
|
|
|
|
|
|
|
2020-04-05 04:30:53 +03:00
|
|
|
def send_request(curl_url, ua):
|
2020-04-07 20:04:03 +03:00
|
|
|
response_header = []
|
2020-02-22 02:52:29 +03:00
|
|
|
|
|
|
|
b_obj = BytesIO()
|
|
|
|
crl = pycurl.Curl()
|
2020-04-05 04:30:53 +03:00
|
|
|
crl.setopt(crl.URL, curl_url)
|
2020-02-22 02:52:29 +03:00
|
|
|
crl.setopt(crl.USERAGENT, ua)
|
|
|
|
crl.setopt(crl.WRITEDATA, b_obj)
|
2020-04-07 20:04:03 +03:00
|
|
|
crl.setopt(crl.HEADERFUNCTION, response_header.append)
|
|
|
|
crl.setopt(pycurl.FOLLOWLOCATION, 1)
|
2020-02-22 02:52:29 +03:00
|
|
|
crl.perform()
|
|
|
|
crl.close()
|
|
|
|
|
|
|
|
return b_obj.getvalue().decode('utf-8', 'ignore')
|
2020-01-22 08:51:02 +03:00
|
|
|
|
2020-01-21 23:26:49 +03:00
|
|
|
|
|
|
|
@app.route('/', methods=['GET'])
|
|
|
|
def index():
|
2020-04-08 21:47:21 +03:00
|
|
|
bg = '#000' if 'dark' in user_config and user_config['dark'] else '#fff'
|
|
|
|
return render_template('index.html', bg=bg)
|
2020-01-21 23:26:49 +03:00
|
|
|
|
|
|
|
|
|
|
|
@app.route('/search', methods=['GET'])
|
|
|
|
def search():
|
|
|
|
q = request.args.get('q')
|
|
|
|
if q is None or len(q) <= 0:
|
|
|
|
return render_template('error.html')
|
2020-04-06 01:15:46 +03:00
|
|
|
|
2020-04-10 23:52:27 +03:00
|
|
|
full_query = filter.gen_query(q, request.args)
|
2020-02-22 02:52:29 +03:00
|
|
|
user_agent = request.headers.get('User-Agent')
|
2020-04-10 23:52:27 +03:00
|
|
|
dark_mode = 'dark' in user_config and user_config['dark']
|
|
|
|
nojs = 'nojs' in user_config and user_config['nojs']
|
2020-01-23 09:19:17 +03:00
|
|
|
|
2020-04-10 23:52:27 +03:00
|
|
|
get_body = filter.reskin(send_request(
|
|
|
|
SEARCH_URL + full_query, get_ua(user_agent)), dark_mode=dark_mode)
|
2020-04-06 01:15:46 +03:00
|
|
|
|
2020-04-10 23:52:27 +03:00
|
|
|
soup = filter.cook(BeautifulSoup(get_body, 'html.parser'), user_agent, nojs=nojs, dark_mode=dark_mode)
|
2020-01-23 09:19:17 +03:00
|
|
|
|
2020-04-08 21:11:08 +03:00
|
|
|
return render_template('display.html', query=urlparse.unquote(q), response=soup)
|
2020-01-21 23:26:49 +03:00
|
|
|
|
|
|
|
|
2020-04-06 02:59:50 +03:00
|
|
|
@app.route('/config', methods=['POST'])
|
|
|
|
def config():
|
|
|
|
global user_config
|
|
|
|
with open(STATIC_FOLDER + '/config.json', 'w') as config_file:
|
|
|
|
config_file.write(json.dumps(json.loads(request.data), indent=4))
|
|
|
|
config_file.close()
|
|
|
|
|
|
|
|
user_config = json.loads(request.data)
|
|
|
|
|
|
|
|
return 'New config: ' + str(request.data)
|
|
|
|
|
|
|
|
|
2020-01-21 23:26:49 +03:00
|
|
|
@app.route('/url', methods=['GET'])
|
|
|
|
def url():
|
2020-01-23 09:19:17 +03:00
|
|
|
if 'url' in request.args:
|
|
|
|
return redirect(request.args.get('url'))
|
|
|
|
|
2020-01-21 23:26:49 +03:00
|
|
|
q = request.args.get('q')
|
|
|
|
if len(q) > 0 and 'http' in q:
|
|
|
|
return redirect(q)
|
|
|
|
else:
|
2020-04-07 23:12:16 +03:00
|
|
|
return render_template('error.html', query=q)
|
2020-01-21 23:26:49 +03:00
|
|
|
|
|
|
|
|
2020-01-23 09:19:17 +03:00
|
|
|
@app.route('/imgres')
|
|
|
|
def imgres():
|
|
|
|
return redirect(request.args.get('imgurl'))
|
|
|
|
|
|
|
|
|
2020-02-22 02:52:29 +03:00
|
|
|
@app.route('/window')
|
|
|
|
def window():
|
|
|
|
get_body = send_request(request.args.get('location'), get_ua(request.headers.get('User-Agent')))
|
|
|
|
get_body = get_body.replace('src="/', 'src="' + request.args.get('location') + '"')
|
|
|
|
get_body = get_body.replace('href="/', 'href="' + request.args.get('location') + '"')
|
|
|
|
|
|
|
|
soup = BeautifulSoup(get_body, 'html.parser')
|
|
|
|
|
|
|
|
try:
|
|
|
|
for script in soup('script'):
|
|
|
|
script.decompose()
|
|
|
|
except Exception:
|
|
|
|
pass
|
|
|
|
|
|
|
|
return render_template('display.html', response=soup)
|
|
|
|
|
|
|
|
|
2020-01-21 23:26:49 +03:00
|
|
|
if __name__ == '__main__':
|
2020-04-11 23:37:15 +03:00
|
|
|
app.run(debug=True)
|