libremiami-search/searx/webapp.py

248 lines
8.3 KiB
Python
Raw Normal View History

2013-10-14 23:09:13 +02:00
#!/usr/bin/env python
2013-10-15 00:33:18 +02:00
'''
searx is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
searx is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with searx. If not, see < http://www.gnu.org/licenses/ >.
(C) 2013- by Adam Tauber, <asciimoo@gmail.com>
'''
2013-12-01 23:52:49 +01:00
import os
import sys
2013-10-14 23:09:13 +02:00
if __name__ == "__main__":
sys.path.append(os.path.realpath(os.path.dirname(os.path.realpath(__file__))+'/../'))
# first argument is for specifying settings module, used mostly by robot tests
from sys import argv
if len(argv) == 2:
from importlib import import_module
settings = import_module('searx.' + argv[1])
else:
from searx import settings
2013-10-14 23:09:13 +02:00
2014-01-01 23:04:13 +01:00
from flask import Flask, request, render_template, url_for, Response, make_response, redirect
2013-11-04 00:21:27 +01:00
from searx.engines import search, categories, engines, get_engines_stats
2013-10-17 00:30:41 +02:00
import json
2013-11-15 18:55:18 +01:00
import cStringIO
from searx.utils import UnicodeWriter
2013-12-01 23:52:49 +01:00
from flask import send_from_directory
2014-01-10 23:38:08 +01:00
from searx.utils import highlight_content, html_to_text
2013-12-01 23:52:49 +01:00
2013-10-14 23:09:13 +02:00
app = Flask(__name__)
2013-10-19 16:18:41 +02:00
app.secret_key = settings.secret_key
2013-10-14 23:09:13 +02:00
2014-01-14 18:17:19 +01:00
2013-10-16 00:01:08 +02:00
opensearch_xml = '''<?xml version="1.0" encoding="utf-8"?>
<OpenSearchDescription xmlns="http://a9.com/-/spec/opensearch/1.1/">
<ShortName>searx</ShortName>
<Description>Search searx</Description>
<InputEncoding>UTF-8</InputEncoding>
<LongName>searx meta search engine</LongName>
2013-10-20 22:37:55 +02:00
<Url type="text/html" method="{method}" template="{host}">
2013-10-16 00:01:08 +02:00
<Param name="q" value="{{searchTerms}}" />
</Url>
</OpenSearchDescription>
'''
2014-01-14 18:17:19 +01:00
def get_base_url():
if settings.base_url:
hostname = settings.base_url
else:
scheme = 'http'
if request.is_secure:
scheme = 'https'
hostname = url_for('index', _external=True, _scheme=scheme)
return hostname
2013-10-15 20:50:12 +02:00
def render(template_name, **kwargs):
2013-10-17 21:06:28 +02:00
global categories
2013-10-19 20:45:48 +02:00
kwargs['categories'] = sorted(categories.keys())
2013-10-17 21:06:28 +02:00
if not 'selected_categories' in kwargs:
kwargs['selected_categories'] = []
cookie_categories = request.cookies.get('categories', '').split(',')
for ccateg in cookie_categories:
if ccateg in categories:
kwargs['selected_categories'].append(ccateg)
if not len(kwargs['selected_categories']):
kwargs['selected_categories'] = ['general']
2013-10-15 20:50:12 +02:00
return render_template(template_name, **kwargs)
2013-11-04 00:21:27 +01:00
def parse_query(query):
query_engines = []
query_parts = query.split()
if query_parts[0].startswith('-') and query_parts[0][1:] in engines:
query_engines.append({'category': 'TODO', 'name': query_parts[0][1:]})
query = query.replace(query_parts[0], '', 1).strip()
return query, query_engines
2014-01-14 18:17:19 +01:00
2014-01-14 18:19:21 +01:00
@app.route('/', methods=['GET', 'POST'])
2013-10-14 23:09:13 +02:00
def index():
2013-10-17 21:06:28 +02:00
global categories
2013-11-15 19:28:30 +01:00
2013-10-14 23:09:13 +02:00
if request.method=='POST':
2013-10-20 22:21:34 +02:00
request_data = request.form
else:
request_data = request.args
2013-10-22 18:57:20 +02:00
if not request_data.get('q'):
return render('index.html')
2013-11-04 00:18:07 +01:00
2013-10-22 18:57:20 +02:00
selected_categories = []
2013-11-04 00:18:07 +01:00
2013-11-04 00:21:27 +01:00
query, selected_engines = parse_query(request_data['q'].encode('utf-8'))
if not len(selected_engines):
for pd_name,pd in request_data.items():
if pd_name.startswith('category_'):
category = pd_name[9:]
if not category in categories:
continue
selected_categories.append(category)
if not len(selected_categories):
cookie_categories = request.cookies.get('categories', '').split(',')
for ccateg in cookie_categories:
if ccateg in categories:
selected_categories.append(ccateg)
if not len(selected_categories):
selected_categories = ['general']
for categ in selected_categories:
selected_engines.extend({'category': categ, 'name': x.name} for x in categories[categ])
2013-11-04 00:18:07 +01:00
2013-11-13 19:32:46 +01:00
results, suggestions = search(query, request, selected_engines)
2013-11-15 19:28:30 +01:00
2013-10-25 02:14:26 +02:00
for result in results:
2014-01-10 23:38:08 +01:00
if request_data.get('format', 'html') == 'html':
if 'content' in result:
result['content'] = highlight_content(result['content'], query)
result['title'] = highlight_content(result['title'], query)
else:
if 'content' in result:
result['content'] = html_to_text(result['content']).strip()
result['title'] = html_to_text(result['title']).strip()
2013-10-25 02:14:26 +02:00
if len(result['url']) > 74:
result['pretty_url'] = result['url'][:35] + '[..]' + result['url'][-35:]
else:
result['pretty_url'] = result['url']
2013-11-15 19:28:30 +01:00
2013-10-22 18:57:20 +02:00
if request_data.get('format') == 'json':
2013-10-23 21:45:00 +02:00
return Response(json.dumps({'query': query, 'results': results}), mimetype='application/json')
2013-11-15 18:55:18 +01:00
elif request_data.get('format') == 'csv':
csv = UnicodeWriter(cStringIO.StringIO())
2013-12-02 21:36:09 +01:00
keys = ('title', 'url', 'content', 'host', 'engine', 'score')
2013-11-15 18:55:18 +01:00
if len(results):
csv.writerow(keys)
for row in results:
2013-12-02 21:36:09 +01:00
row['host'] = row['parsed_url'].netloc
csv.writerow([row.get(key, '') for key in keys])
2013-11-15 18:55:18 +01:00
csv.stream.seek(0)
2013-11-15 19:28:30 +01:00
response = Response(csv.stream.read(), mimetype='application/csv')
2013-12-02 21:36:09 +01:00
response.headers.add('Content-Disposition', 'attachment;Filename=searx_-_{0}.csv'.format('_'.join(query.split())))
2013-11-15 18:55:18 +01:00
return response
2014-01-14 18:17:19 +01:00
elif request_data.get('format') == 'rss':
response_rss = render('opensearch_response_rss.xml'
,results=results
,q=request_data['q']
,number_of_results=len(results)
,base_url=get_base_url()
)
2014-01-14 22:18:21 +01:00
return Response(response_rss, mimetype='text/xml')
2014-01-14 18:17:19 +01:00
2013-11-15 19:28:30 +01:00
2014-01-01 22:16:53 +01:00
return render('results.html'
,results=results
,q=request_data['q']
,selected_categories=selected_categories
,number_of_results=len(results)
,suggestions=suggestions
)
2013-10-14 23:09:13 +02:00
2013-10-21 00:28:48 +02:00
@app.route('/about', methods=['GET'])
def about():
global categories
return render('about.html', categs=categories.items())
2014-01-01 22:16:53 +01:00
@app.route('/preferences', methods=['GET', 'POST'])
def preferences():
if request.method=='POST':
selected_categories = []
for pd_name,pd in request.form.items():
if pd_name.startswith('category_'):
category = pd_name[9:]
if not category in categories:
continue
selected_categories.append(category)
if selected_categories:
2014-01-01 23:04:13 +01:00
resp = make_response(redirect('/'))
# cookie max age: 4 weeks
resp.set_cookie('categories', ','.join(selected_categories), max_age=60*60*24*7*4)
2014-01-01 22:16:53 +01:00
return resp
return render('preferences.html')
2013-10-27 01:03:05 +02:00
@app.route('/stats', methods=['GET'])
def stats():
global categories
stats = get_engines_stats()
return render('stats.html', stats=stats)
2014-01-01 22:16:53 +01:00
2013-12-01 16:10:38 +01:00
@app.route('/robots.txt', methods=['GET'])
def robots():
return Response("""User-agent: *
Allow: /
Allow: /about
Disallow: /stats
""", mimetype='text/plain')
2014-01-01 22:16:53 +01:00
2013-10-16 00:01:08 +02:00
@app.route('/opensearch.xml', methods=['GET'])
def opensearch():
global opensearch_xml
2013-10-20 22:37:55 +02:00
method = 'post'
2013-10-21 00:28:48 +02:00
# chrome/chromium only supports HTTP GET....
2013-10-20 22:37:55 +02:00
if request.headers.get('User-Agent', '').lower().find('webkit') >= 0:
method = 'get'
2014-01-14 18:17:19 +01:00
base_url = get_base_url()
ret = opensearch_xml.format(method=method, host=base_url)
2013-10-16 00:01:08 +02:00
resp = Response(response=ret,
status=200,
mimetype="application/xml")
return resp
2013-12-01 23:52:49 +01:00
@app.route('/favicon.ico')
def favicon():
return send_from_directory(os.path.join(app.root_path, 'static/img'),
'favicon.png', mimetype='image/vnd.microsoft.icon')
def run():
2013-10-14 23:09:13 +02:00
from gevent import monkey
monkey.patch_all()
2013-10-19 16:18:41 +02:00
app.run(debug = settings.debug
,use_debugger = settings.debug
,port = settings.port
2013-10-14 23:09:13 +02:00
)
if __name__ == "__main__":
run()