diff --git a/docs/admin/installation.rst b/docs/admin/installation.rst index 239ce070..15800fc0 100644 --- a/docs/admin/installation.rst +++ b/docs/admin/installation.rst @@ -114,6 +114,9 @@ content: # Module to import module = searx.webapp + # Support running the module from a webserver subdirectory. + route-run = fixpathinfo: + # Virtualenv and python path virtualenv = /usr/local/searx/searx-ve/ pythonpath = /usr/local/searx/ @@ -151,7 +154,10 @@ content: server { listen 80; server_name searx.example.com; - root /usr/local/searx; + root /usr/local/searx/searx; + + location /static { + } location / { include uwsgi_params; @@ -180,14 +186,13 @@ Add this configuration in the server config file .. code:: nginx - location = /searx { rewrite ^ /searx/; } - location /searx { - try_files $uri @searx; + location /searx/static { + alias /usr/local/searx/searx/static; } - location @searx { + + location /searx { uwsgi_param SCRIPT_NAME /searx; include uwsgi_params; - uwsgi_modifier1 30; uwsgi_pass unix:/run/uwsgi/app/searx/socket; } @@ -197,6 +202,10 @@ in case of single-user or low-traffic instances.) .. code:: nginx + location /searx/static { + alias /usr/local/searx/searx/static; + } + location /searx { proxy_pass http://127.0.0.1:8888; proxy_set_header Host $host; @@ -338,4 +347,3 @@ References * How to: `Setup searx in a couple of hours with a free SSL certificate `__ - diff --git a/searx/engines/bing.py b/searx/engines/bing.py index ed0b87db..b193f7c6 100644 --- a/searx/engines/bing.py +++ b/searx/engines/bing.py @@ -89,8 +89,7 @@ def response(resp): 'content': content}) try: - result_len_container = "".join(eval_xpath(dom, '//span[@class="sb_count"]/text()')) - result_len_container = utils.to_string(result_len_container) + result_len_container = "".join(eval_xpath(dom, '//span[@class="sb_count"]//text()')) if "-" in result_len_container: # Remove the part "from-to" for paginated request ... result_len_container = result_len_container[result_len_container.find("-") * 2 + 2:] @@ -102,7 +101,7 @@ def response(resp): logger.debug('result error :\n%s', e) pass - if _get_offset_from_pageno(resp.search_params.get("pageno", 0)) > result_len: + if result_len and _get_offset_from_pageno(resp.search_params.get("pageno", 0)) > result_len: return [] results.append({'number_of_results': result_len}) diff --git a/searx/engines/flickr_noapi.py b/searx/engines/flickr_noapi.py index 198ac2cf..e1abb378 100644 --- a/searx/engines/flickr_noapi.py +++ b/searx/engines/flickr_noapi.py @@ -109,14 +109,22 @@ def response(resp): else: url = build_flickr_url(photo['ownerNsid'], photo['id']) - results.append({'url': url, - 'title': title, - 'img_src': img_src, - 'thumbnail_src': thumbnail_src, - 'content': content, - 'author': author, - 'source': source, - 'img_format': img_format, - 'template': 'images.html'}) + result = { + 'url': url, + 'img_src': img_src, + 'thumbnail_src': thumbnail_src, + 'source': source, + 'img_format': img_format, + 'template': 'images.html' + } + try: + result['author'] = author.encode('utf-8') + result['title'] = title.encode('utf-8') + result['content'] = content.encode('utf-8') + except: + result['author'] = '' + result['title'] = '' + result['content'] = '' + results.append(result) return results diff --git a/searx/engines/ina.py b/searx/engines/ina.py index 37a05f09..ea509649 100644 --- a/searx/engines/ina.py +++ b/searx/engines/ina.py @@ -32,7 +32,7 @@ base_url = 'https://www.ina.fr' search_url = base_url + '/layout/set/ajax/recherche/result?autopromote=&hf={ps}&b={start}&type=Video&r=&{query}' # specific xpath variables -results_xpath = '//div[contains(@class,"search-results--list")]/div[@class="media"]' +results_xpath = '//div[contains(@class,"search-results--list")]//div[@class="media-body"]' url_xpath = './/a/@href' title_xpath = './/h3[@class="h3--title media-heading"]' thumbnail_xpath = './/img/@src' @@ -65,8 +65,11 @@ def response(resp): videoid = result.xpath(url_xpath)[0] url = base_url + videoid title = p.unescape(extract_text(result.xpath(title_xpath))) - thumbnail = extract_text(result.xpath(thumbnail_xpath)[0]) - if thumbnail[0] == '/': + try: + thumbnail = extract_text(result.xpath(thumbnail_xpath)[0]) + except: + thumbnail = '' + if thumbnail and thumbnail[0] == '/': thumbnail = base_url + thumbnail d = extract_text(result.xpath(publishedDate_xpath)[0]) d = d.split('/') diff --git a/searx/engines/microsoft_academic.py b/searx/engines/microsoft_academic.py index 9387b08d..9bac0069 100644 --- a/searx/engines/microsoft_academic.py +++ b/searx/engines/microsoft_academic.py @@ -45,6 +45,8 @@ def request(query, params): def response(resp): results = [] response_data = loads(resp.text) + if not response_data: + return results for result in response_data['results']: url = _get_url(result) diff --git a/searx/engines/scanr_structures.py b/searx/engines/scanr_structures.py index 72fd2b3c..7208dcb7 100644 --- a/searx/engines/scanr_structures.py +++ b/searx/engines/scanr_structures.py @@ -29,7 +29,7 @@ def request(query, params): params['url'] = search_url params['method'] = 'POST' params['headers']['Content-type'] = "application/json" - params['data'] = dumps({"query": query, + params['data'] = dumps({"query": query.decode('utf-8'), "searchField": "ALL", "sortDirection": "ASC", "sortOrder": "RELEVANCY", diff --git a/searx/settings.yml b/searx/settings.yml index 2a2d2bf8..2777f9ca 100644 --- a/searx/settings.yml +++ b/searx/settings.yml @@ -79,9 +79,10 @@ engines: categories : science timeout : 4.0 - - name : base - engine : base - shortcut : bs +# tmp suspended: dh key too small +# - name : base +# engine : base +# shortcut : bs - name : wikipedia engine : wikipedia @@ -552,10 +553,11 @@ engines: timeout : 10.0 disabled : True - - name : scanr structures - shortcut: scs - engine : scanr_structures - disabled : True +# tmp suspended: bad certificate +# - name : scanr structures +# shortcut: scs +# engine : scanr_structures +# disabled : True - name : soundcloud engine : soundcloud