libremiami-search/searx/webapp.py

175 lines
6.0 KiB
Python
Raw Normal View History

2013-10-14 23:09:13 +02:00
#!/usr/bin/env python
2013-10-15 00:33:18 +02:00
'''
searx is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
searx is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with searx. If not, see < http://www.gnu.org/licenses/ >.
(C) 2013- by Adam Tauber, <asciimoo@gmail.com>
'''
2013-10-14 23:09:13 +02:00
if __name__ == "__main__":
from sys import path
from os.path import realpath, dirname
path.append(realpath(dirname(realpath(__file__))+'/../'))
2013-10-21 00:28:48 +02:00
from flask import Flask, request, render_template, url_for, Response, make_response
2013-11-04 00:21:27 +01:00
from searx.engines import search, categories, engines, get_engines_stats
2013-10-19 16:18:41 +02:00
from searx import settings
2013-10-17 00:30:41 +02:00
import json
2013-11-15 18:55:18 +01:00
import cStringIO
from searx.utils import UnicodeWriter
2013-10-14 23:09:13 +02:00
app = Flask(__name__)
2013-10-19 16:18:41 +02:00
app.secret_key = settings.secret_key
2013-10-14 23:09:13 +02:00
2013-10-16 00:01:08 +02:00
opensearch_xml = '''<?xml version="1.0" encoding="utf-8"?>
<OpenSearchDescription xmlns="http://a9.com/-/spec/opensearch/1.1/">
<ShortName>searx</ShortName>
<Description>Search searx</Description>
<InputEncoding>UTF-8</InputEncoding>
<LongName>searx meta search engine</LongName>
2013-10-20 22:37:55 +02:00
<Url type="text/html" method="{method}" template="{host}">
2013-10-16 00:01:08 +02:00
<Param name="q" value="{{searchTerms}}" />
</Url>
</OpenSearchDescription>
'''
2013-10-15 20:50:12 +02:00
def render(template_name, **kwargs):
2013-10-17 21:06:28 +02:00
global categories
2013-10-19 20:45:48 +02:00
kwargs['categories'] = sorted(categories.keys())
2013-10-17 21:06:28 +02:00
if not 'selected_categories' in kwargs:
kwargs['selected_categories'] = []
cookie_categories = request.cookies.get('categories', '').split(',')
for ccateg in cookie_categories:
if ccateg in categories:
kwargs['selected_categories'].append(ccateg)
if not len(kwargs['selected_categories']):
kwargs['selected_categories'] = ['general']
2013-10-15 20:50:12 +02:00
return render_template(template_name, **kwargs)
2013-11-04 00:21:27 +01:00
def parse_query(query):
query_engines = []
query_parts = query.split()
if query_parts[0].startswith('-') and query_parts[0][1:] in engines:
query_engines.append({'category': 'TODO', 'name': query_parts[0][1:]})
query = query.replace(query_parts[0], '', 1).strip()
return query, query_engines
2013-10-14 23:09:13 +02:00
@app.route('/', methods=['GET', 'POST'])
def index():
2013-10-17 21:06:28 +02:00
global categories
2013-11-15 19:28:30 +01:00
2013-10-14 23:09:13 +02:00
if request.method=='POST':
2013-10-20 22:21:34 +02:00
request_data = request.form
else:
request_data = request.args
2013-10-22 18:57:20 +02:00
if not request_data.get('q'):
return render('index.html')
2013-11-04 00:18:07 +01:00
2013-10-22 18:57:20 +02:00
selected_categories = []
2013-11-04 00:18:07 +01:00
2013-11-04 00:21:27 +01:00
query, selected_engines = parse_query(request_data['q'].encode('utf-8'))
if not len(selected_engines):
for pd_name,pd in request_data.items():
if pd_name.startswith('category_'):
category = pd_name[9:]
if not category in categories:
continue
selected_categories.append(category)
if not len(selected_categories):
cookie_categories = request.cookies.get('categories', '').split(',')
for ccateg in cookie_categories:
if ccateg in categories:
selected_categories.append(ccateg)
if not len(selected_categories):
selected_categories = ['general']
for categ in selected_categories:
selected_engines.extend({'category': categ, 'name': x.name} for x in categories[categ])
2013-11-04 00:18:07 +01:00
2013-11-13 19:32:46 +01:00
results, suggestions = search(query, request, selected_engines)
2013-11-15 19:28:30 +01:00
2013-10-25 02:14:26 +02:00
for result in results:
if len(result['url']) > 74:
result['pretty_url'] = result['url'][:35] + '[..]' + result['url'][-35:]
else:
result['pretty_url'] = result['url']
2013-11-15 19:28:30 +01:00
2013-10-22 18:57:20 +02:00
if request_data.get('format') == 'json':
2013-10-23 21:45:00 +02:00
return Response(json.dumps({'query': query, 'results': results}), mimetype='application/json')
2013-11-15 18:55:18 +01:00
elif request_data.get('format') == 'csv':
csv = UnicodeWriter(cStringIO.StringIO())
if len(results):
keys = results[0].keys()
csv.writerow(keys)
for row in results:
csv.writerow([row[key] for key in keys])
csv.stream.seek(0)
2013-11-15 19:28:30 +01:00
response = Response(csv.stream.read(), mimetype='application/csv')
2013-11-15 18:55:18 +01:00
response.headers.add('Content-Disposition', 'attachment;Filename=searx_-_{0}.csv'.format(query))
return response
2013-11-15 19:28:30 +01:00
2013-10-22 18:57:20 +02:00
template = render('results.html'
,results=results
2013-11-04 00:21:27 +01:00
,q=request_data['q']
2013-10-22 18:57:20 +02:00
,selected_categories=selected_categories
,number_of_results=len(results)
2013-11-13 19:32:46 +01:00
,suggestions=suggestions
2013-10-22 18:57:20 +02:00
)
resp = make_response(template)
resp.set_cookie('categories', ','.join(selected_categories))
2013-11-15 19:28:30 +01:00
2013-10-22 18:57:20 +02:00
return resp
2013-10-14 23:09:13 +02:00
2013-10-16 00:01:08 +02:00
@app.route('/favicon.ico', methods=['GET'])
def fav():
return ''
2013-10-21 00:28:48 +02:00
@app.route('/about', methods=['GET'])
def about():
global categories
return render('about.html', categs=categories.items())
2013-10-27 01:03:05 +02:00
@app.route('/stats', methods=['GET'])
def stats():
global categories
stats = get_engines_stats()
return render('stats.html', stats=stats)
2013-10-16 00:01:08 +02:00
@app.route('/opensearch.xml', methods=['GET'])
def opensearch():
global opensearch_xml
2013-10-20 22:37:55 +02:00
method = 'post'
2013-10-30 16:30:55 +01:00
scheme = 'http'
2013-10-21 00:28:48 +02:00
# chrome/chromium only supports HTTP GET....
2013-10-20 22:37:55 +02:00
if request.headers.get('User-Agent', '').lower().find('webkit') >= 0:
method = 'get'
2013-10-30 23:01:58 +01:00
if request.is_secure:
2013-10-30 16:30:55 +01:00
scheme = 'https'
ret = opensearch_xml.format(method=method, host=url_for('index', _external=True, _scheme=scheme))
2013-10-16 00:01:08 +02:00
resp = Response(response=ret,
status=200,
mimetype="application/xml")
return resp
2013-10-14 23:09:13 +02:00
if __name__ == "__main__":
from gevent import monkey
monkey.patch_all()
2013-10-19 16:18:41 +02:00
app.run(debug = settings.debug
,use_debugger = settings.debug
,port = settings.port
2013-10-14 23:09:13 +02:00
)