Merge branch 'master' into fix/manage.sh

This commit is contained in:
Markus Heiser 2020-04-29 12:55:13 +00:00 committed by GitHub
commit 4bae1a9eab
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
274 changed files with 41798 additions and 40564 deletions

View File

@ -4,6 +4,9 @@
*/*/*/*~ */*/*/*~
*/*/*/*/*~ */*/*/*/*~
#
local/
# Git # Git
.git .git
.gitignore .gitignore
@ -36,6 +39,11 @@ robot_report.html
test_basic/ test_basic/
setup.cfg setup.cfg
# node_modules
node_modules/ node_modules/
*/node_modules/
*/*/node_modules/
*/*/*/node_modules/
*/*/*/*/node_modules/
.tx/ .tx/

5
.gitignore vendored
View File

@ -15,9 +15,12 @@ setup.cfg
*/*.pyc */*.pyc
*~ *~
node_modules/ /node_modules
.tx/ .tx/
build/
dist/
local/ local/
gh-pages/
searx.egg-info/ searx.egg-info/

View File

@ -1,26 +1,24 @@
os: linux
dist: bionic
language: python language: python
sudo: false
cache: cache:
- pip
- npm
- directories: - directories:
- $HOME/.cache/pip - $HOME/.cache/pip
addons: addons:
firefox: "latest" firefox: "latest"
install: install:
- ./manage.sh install_geckodriver ~/drivers - env
- export PATH=~/drivers:$PATH - which python; python --version
- ./manage.sh npm_packages - make V=1 install
- ./manage.sh update_dev_packages - make V=1 gecko.driver
- pip install codecov - make V=1 node.env
- make V=1 travis.codecov
script: script:
- ./manage.sh styles - make V=1 themes
- ./manage.sh grunt_build - make V=1 test
- ./manage.sh tests
after_success: after_success:
- ./manage.sh py_test_coverage - make V=1 test.coverage
- codecov - codecov
stages: stages:
@ -31,10 +29,13 @@ stages:
jobs: jobs:
include: include:
- python: "2.7" - python: "2.7"
env: PY=2
- python: "3.5" - python: "3.5"
- python: "3.6" - python: "3.6"
- python: "3.7"
- python: "3.8"
- stage: docker - stage: docker
python: "3.6" python: "3.8"
git: git:
depth: false depth: false
services: services:
@ -44,7 +45,7 @@ jobs:
install: true install: true
script: script:
- echo "$DOCKER_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin - echo "$DOCKER_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin
- ./manage.sh docker_build push - make -e GIT_URL=$(git remote get-url origin) docker.push
after_success: true after_success: true
notifications: notifications:

View File

@ -100,3 +100,27 @@ generally made searx better:
- @ZEROF - @ZEROF
- Ivan Skytte Jørgensen @isj-privacore - Ivan Skytte Jørgensen @isj-privacore
- @miicha - @miicha
- Étienne Deparis @milouse
- @pelag0s
- Denis Wernert @d-tux
- Robin Hallabro-Kokko @hallabro
- Jonas Zohren @jfowl
- Elias Ojala @theel0ja
- @brunob
- Nick Espig @nachtalb
- Rachmadani Haryono @rachmadaniHaryono
- Frank de Lange @yetangitu
- Émilien Devos @unifox
- Nicolas Gelot @nfk
- @volth
- Mathieu Brunot @madmath03
- @lorddavidiii
- @x250
- Robby O'Connor @robbyoconnor
- Finn @0xhtml
- @tmikaeld
- @hobbestigrou
- Vipul @finn0
- @CaffeinatedTech
- Robin Schneider @ypid
- @splintah

View File

@ -1,3 +1,41 @@
0.16.0 2020.01.30
=================
- New engines
- Splash
- Apkmirror
- NPM search
- Invidious
- Seedpeer
- New languages
- Estonian
- Interlingua
- Lithuanian
- Tibetian
- Occitan
- Tamil
- Engine fixes (wolframalpha, google scholar, youtube, google images, seznam, google, soundcloud, google cloud, duden, btdigg, google play, bing images, flickr noapi, wikidata, dailymotion, google videos, arxiv, dictzone, fdroid, etymonline, bing, duckduckgo, startpage, voat, 1x, deviantart, digg, gigablast, mojeek, duckduckgo definitions, spotify, libgen, qwant, openstreetmap, wikipedia, ina, microsoft academic, scanr structures)
- Dependency updates
- Speed optimizations
- Initial support for offline engines
- Image format display
- Inline js scripts removed
- Infinite scroll plugin fix
- Simple theme bugfixes
- Docker image updates
- Bang expression fixes
- Result merging fixes
- New environment variable added: SEARX_BIND_ADDRESS
News
~~~~
- @return42 joined the maintainer team
- This is the last release with Python2 support
0.15.0 2019.01.06 0.15.0 2019.01.06
================= =================

View File

@ -4,6 +4,7 @@ EXPOSE 8080
VOLUME /etc/searx VOLUME /etc/searx
VOLUME /var/log/uwsgi VOLUME /var/log/uwsgi
ARG GIT_URL=unknown
ARG VERSION_GITCOMMIT=unknown ARG VERSION_GITCOMMIT=unknown
ARG SEARX_GIT_VERSION=unknown ARG SEARX_GIT_VERSION=unknown
@ -66,7 +67,7 @@ RUN su searx -c "/usr/bin/python3 -m compileall -q searx"; \
# Keep this argument at the end since it change each time # Keep this argument at the end since it change each time
ARG LABEL_DATE= ARG LABEL_DATE=
LABEL maintainer="searx <https://github.com/asciimoo/searx>" \ LABEL maintainer="searx <${GIT_URL}>" \
description="A privacy-respecting, hackable metasearch engine." \ description="A privacy-respecting, hackable metasearch engine." \
version="${SEARX_GIT_VERSION}" \ version="${SEARX_GIT_VERSION}" \
org.label-schema.schema-version="1.0" \ org.label-schema.schema-version="1.0" \
@ -76,4 +77,11 @@ LABEL maintainer="searx <https://github.com/asciimoo/searx>" \
org.label-schema.vcs-ref=${LABEL_VCS_REF} \ org.label-schema.vcs-ref=${LABEL_VCS_REF} \
org.label-schema.vcs-url=${LABEL_VCS_URL} \ org.label-schema.vcs-url=${LABEL_VCS_URL} \
org.label-schema.build-date="${LABEL_DATE}" \ org.label-schema.build-date="${LABEL_DATE}" \
org.label-schema.usage="https://github.com/searx/searx-docker" org.label-schema.usage="https://github.com/searx/searx-docker" \
org.opencontainers.image.title="searx" \
org.opencontainers.image.version="${SEARX_GIT_VERSION}" \
org.opencontainers.image.url="${LABEL_VCS_URL}" \
org.opencontainers.image.revision=${LABEL_VCS_REF} \
org.opencontainers.image.source=${LABEL_VCS_URL} \
org.opencontainers.image.created="${LABEL_DATE}" \
org.opencontainers.image.documentation="https://github.com/searx/searx-docker"

195
Makefile
View File

@ -1,36 +1,54 @@
# -*- coding: utf-8; mode: makefile-gmake -*- # -*- coding: utf-8; mode: makefile-gmake -*-
export GIT_URL=https://github.com/asciimoo/searx
export SEARX_URL=https://searx.me
export DOCS_URL=https://asciimoo.github.io/searx
PYOBJECTS = searx PYOBJECTS = searx
DOC = docs
PY_SETUP_EXTRAS ?= \[test\] PY_SETUP_EXTRAS ?= \[test\]
PYDIST=./dist/py
PYBUILD=./build/py
include utils/makefile.include include utils/makefile.include
include utils/makefile.python include utils/makefile.python
include utils/makefile.sphinx
all: clean install all: clean install
PHONY += help PHONY += help
help: help:
@echo ' test - run developer tests' @echo ' test - run developer tests'
@echo ' docs - build documentation'
@echo ' docs-live - autobuild HTML documentation while editing'
@echo ' run - run developer instance' @echo ' run - run developer instance'
@echo ' install - developer install (./local)' @echo ' install - developer install (./local)'
@echo ' uninstall - uninstall (./local)' @echo ' uninstall - uninstall (./local)'
@echo ' gh-pages - build docs & deploy on gh-pages branch'
@echo ' clean - drop builds and environments'
@echo ' project - re-build generic files of the searx project'
@echo ' buildenv - re-build environment files (aka brand)'
@echo ' themes - re-build build the source of the themes'
@echo ' docker - build Docker image'
@echo ' node.env - download & install npm dependencies locally'
@echo '' @echo ''
@$(MAKE) -s -f utils/makefile.include make-help @$(MAKE) -s -f utils/makefile.include make-help
@echo '' @echo ''
@$(MAKE) -s -f utils/makefile.python python-help @$(MAKE) -s -f utils/makefile.python python-help
PHONY += install PHONY += install
install: pyenvinstall install: buildenv pyenvinstall
PHONY += uninstall PHONY += uninstall
uninstall: pyenvuninstall uninstall: pyenvuninstall
PHONY += clean PHONY += clean
clean: pyclean clean: pyclean node.clean test.clean
$(call cmd,common_clean) $(call cmd,common_clean)
PHONY += run PHONY += run
run: pyenvinstall run: buildenv pyenvinstall
$(Q) ( \ $(Q) ( \
sed -i -e "s/debug : False/debug : True/g" ./searx/settings.yml ; \ sed -i -e "s/debug : False/debug : True/g" ./searx/settings.yml ; \
sleep 2 ; \ sleep 2 ; \
@ -40,23 +58,178 @@ run: pyenvinstall
) & ) &
$(PY_ENV)/bin/python ./searx/webapp.py $(PY_ENV)/bin/python ./searx/webapp.py
# docs
# ----
PHONY += docs
docs: buildenv pyenvinstall sphinx-doc
$(call cmd,sphinx,html,docs,docs)
PHONY += docs-live
docs-live: buildenv pyenvinstall sphinx-live
$(call cmd,sphinx_autobuild,html,docs,docs)
$(GH_PAGES)::
@echo "doc available at --> $(DOCS_URL)"
# update project files
# --------------------
PHONY += project engines.languages useragents.update buildenv
project: buildenv useragents.update engines.languages
engines.languages: pyenvinstall
$(Q)echo "fetch languages .."
$(Q)$(PY_ENV_ACT); python utils/fetch_languages.py
$(Q)echo "update searx/data/engines_languages.json"
$(Q)mv engines_languages.json searx/data/engines_languages.json
$(Q)echo "update searx/languages.py"
$(Q)mv languages.py searx/languages.py
useragents.update: pyenvinstall
$(Q)echo "Update searx/data/useragents.json with the most recent versions of Firefox."
$(Q)$(PY_ENV_ACT); python utils/fetch_firefox_version.py
buildenv:
$(Q)echo "build searx/brand.py"
$(Q)echo "GIT_URL = '$(GIT_URL)'" > searx/brand.py
$(Q)echo "ISSUE_URL = 'https://github.com/asciimoo/searx/issues'" >> searx/brand.py
$(Q)echo "SEARX_URL = '$(SEARX_URL)'" >> searx/brand.py
$(Q)echo "DOCS_URL = '$(DOCS_URL)'" >> searx/brand.py
$(Q)echo "PUBLIC_INSTANCES = 'https://searx.space'" >> searx/brand.py
$(Q)echo "build utils/brand.env"
$(Q)echo "export GIT_URL='$(GIT_URL)'" > utils/brand.env
$(Q)echo "export ISSUE_URL='https://github.com/asciimoo/searx/issues'" >> utils/brand.env
$(Q)echo "export SEARX_URL='$(SEARX_URL)'" >> utils/brand.env
$(Q)echo "export DOCS_URL='$(DOCS_URL)'" >> utils/brand.env
$(Q)echo "export PUBLIC_INSTANCES='https://searx.space'" >> utils/brand.env
# node / npm
# ----------
node.env: buildenv
$(Q)./manage.sh npm_packages
node.clean:
$(Q)echo "CLEAN locally installed npm dependencies"
$(Q)rm -rf \
./node_modules \
./package-lock.json \
./searx/static/themes/oscar/package-lock.json \
./searx/static/themes/oscar/node_modules \
./searx/static/themes/simple/package-lock.json \
./searx/static/themes/simple/node_modules
# build themes
# ------------
PHONY += themes.bootstrap themes themes.oscar themes.simple themes.legacy themes.courgette themes.pixart
themes: buildenv themes.bootstrap themes.oscar themes.simple themes.legacy themes.courgette themes.pixart
quiet_cmd_lessc = LESSC $3
cmd_lessc = PATH="$$(npm bin):$$PATH" \
lessc --clean-css="--s1 --advanced --compatibility=ie9" "searx/static/$2" "searx/static/$3"
quiet_cmd_grunt = GRUNT $2
cmd_grunt = PATH="$$(npm bin):$$PATH" \
grunt --gruntfile "$2"
themes.oscar:
$(Q)echo '[!] build oscar theme'
$(call cmd,grunt,searx/static/themes/oscar/gruntfile.js)
themes.simple:
$(Q)echo '[!] build simple theme'
$(call cmd,grunt,searx/static/themes/simple/gruntfile.js)
themes.legacy:
$(Q)echo '[!] build legacy theme'
$(call cmd,lessc,themes/legacy/less/style-rtl.less,themes/legacy/css/style-rtl.css)
$(call cmd,lessc,themes/legacy/less/style.less,themes/legacy/css/style.css)
themes.courgette:
$(Q)echo '[!] build courgette theme'
$(call cmd,lessc,themes/courgette/less/style.less,themes/courgette/css/style.css)
$(call cmd,lessc,themes/courgette/less/style-rtl.less,themes/courgette/css/style-rtl.css)
themes.pixart:
$(Q)echo '[!] build pixart theme'
$(call cmd,lessc,themes/pix-art/less/style.less,themes/pix-art/css/style.css)
themes.bootstrap:
$(call cmd,lessc,less/bootstrap/bootstrap.less,css/bootstrap.min.css)
# docker
# ------
PHONY += docker
docker: buildenv
$(Q)./manage.sh docker_build
docker.push: buildenv
$(Q)./manage.sh docker_build push
# gecko
# -----
PHONY += gecko.driver
gecko.driver:
$(PY_ENV_ACT); ./manage.sh install_geckodriver
# test # test
# ---- # ----
PHONY += test test.pylint test.pep8 test.unit test.robot PHONY += test test.pylint test.pep8 test.unit test.coverage test.robot
test: buildenv test.pylint test.pep8 test.unit gecko.driver test.robot
ifeq ($(PY),2)
test.pylint:
@echo "LINT skip liniting py2"
else
# TODO: balance linting with pylint # TODO: balance linting with pylint
test: test.pep8 test.unit test.robot test.pylint: pyenvinstall
- make pylint $(call cmd,pylint,\
searx/preferences.py \
searx/testing.py \
)
endif
# ignored rules:
# E402 module level import not at top of file
# W503 line break before binary operator
test.pep8: pyenvinstall test.pep8: pyenvinstall
$(PY_ENV_ACT); ./manage.sh pep8_check @echo "TEST pep8"
$(Q)$(PY_ENV_ACT); pep8 --exclude=searx/static --max-line-length=120 --ignore "E402,W503" searx tests
test.unit: pyenvinstall test.unit: pyenvinstall
$(PY_ENV_ACT); ./manage.sh unit_tests @echo "TEST tests/unit"
$(Q)$(PY_ENV_ACT); python -m nose2 -s tests/unit
test.robot: pyenvinstall test.coverage: pyenvinstall
$(PY_ENV_ACT); ./manage.sh install_geckodriver @echo "TEST unit test coverage"
$(PY_ENV_ACT); ./manage.sh robot_tests $(Q)$(PY_ENV_ACT); \
python -m nose2 -C --log-capture --with-coverage --coverage searx -s tests/unit \
&& coverage report \
&& coverage html \
test.robot: pyenvinstall gecko.driver
@echo "TEST robot"
$(Q)$(PY_ENV_ACT); PYTHONPATH=. python searx/testing.py robot
test.clean:
@echo "CLEAN intermediate test stuff"
$(Q)rm -rf geckodriver.log .coverage coverage/
# travis
# ------
travis.codecov:
$(Q)$(PY_ENV_BIN)/python -m pip install codecov
.PHONY: $(PHONY) .PHONY: $(PHONY)

View File

@ -7,7 +7,7 @@ engine <https://en.wikipedia.org/wiki/Metasearch_engine>`__.
Pronunciation: səːks Pronunciation: səːks
List of `running List of `running
instances <https://github.com/asciimoo/searx/wiki/Searx-instances>`__. instances <https://searx.space/>`__.
See the `documentation <https://asciimoo.github.io/searx>`__ and the `wiki <https://github.com/asciimoo/searx/wiki>`__ for more information. See the `documentation <https://asciimoo.github.io/searx>`__ and the `wiki <https://github.com/asciimoo/searx/wiki>`__ for more information.
@ -18,18 +18,18 @@ Installation
~~~~~~~~~~~~ ~~~~~~~~~~~~
With Docker With Docker
------ -----------
Go to the `searx-docker <https://github.com/searx/searx-docker>`__ project. Go to the `searx-docker <https://github.com/searx/searx-docker>`__ project.
Without Docker Without Docker
------ --------------
For all the details, follow this `step by step installation <https://asciimoo.github.io/searx/dev/install/installation.html>`__. For all of the details, follow this `step by step installation <https://asciimoo.github.io/searx/admin/installation.html>`__.
Note: the documentation needs to be updated. Note: the documentation needs to be updated.
If you are in hurry If you are in a hurry
------ ---------------------
- clone source: - clone the source:
``git clone https://github.com/asciimoo/searx.git && cd searx`` ``git clone https://github.com/asciimoo/searx.git && cd searx``
- install dependencies: ``./manage.sh update_packages`` - install dependencies: ``./manage.sh update_packages``
- edit your - edit your

View File

@ -31,3 +31,7 @@ touch-logrotate = /run/uwsgi-logrotate
unique-cron = 15 0 -1 -1 -1 { touch /run/uwsgi-logrotate } unique-cron = 15 0 -1 -1 -1 { touch /run/uwsgi-logrotate }
log-backupname = /var/log/uwsgi/uwsgi.log.1 log-backupname = /var/log/uwsgi/uwsgi.log.1
logto = /var/log/uwsgi/uwsgi.log logto = /var/log/uwsgi/uwsgi.log
# No keep alive
# See https://github.com/searx/searx-docker/issues/24
add-header = Connection: close

130
docs/_themes/searx/static/searx.css vendored Normal file
View File

@ -0,0 +1,130 @@
@import url("pocoo.css");
a, a.reference, a.footnote-reference {
color: #004b6b;
border-color: #004b6b;
}
a:hover {
color: #6d4100;
border-color: #6d4100;
}
p.version-warning {
background-color: #004b6b;
}
div.sidebar {
background-color: whitesmoke;
border-color: lightsteelblue;
border-radius: 3pt;
}
p.sidebar-title, .sidebar p {
margin: 6pt;
}
.sidebar li,
.hlist li {
list-style-type: disclosure-closed;
}
/* admonitions
*/
div.admonition, div.topic {
background-color: #fafafa;
margin: 8px 0px;
padding: 1em;
border-radius: 3pt 0 0 3pt;
border-top: none;
border-right: none;
border-bottom: none;
border-left: 5pt solid #ccc;
}
p.admonition-title:after {
content: none;
}
.admonition.hint { border-color: #416dc0b0; }
.admonition.note { border-color: #6c856cb0; }
.admonition.tip { border-color: #85c5c2b0; }
.admonition.attention { border-color: #ecec97b0; }
.admonition.caution { border-color: #a6c677b0; }
.admonition.danger { border-color: #d46262b0; }
.admonition.important { border-color: #dfa3a3b0; }
.admonition.error { border-color: red; }
.admonition.warning { border-color: darkred; }
.admonition.admonition-generic-admonition-title {
border-color: #416dc0b0;
}
/* admonitions with (rendered) reST markup examples (:class: rst-example)
*
* .. admonition:: title of the example
* :class: rst-example
* ....
*/
div.rst-example {
background-color: inherit;
margin: 0;
border-top: none;
border-right: 1px solid #ccc;
border-bottom: none;
border-left: none;
border-radius: none;
padding: 0;
}
div.rst-example > p.admonition-title {
font-family: Sans Serif;
font-style: italic;
font-size: 0.8em;
display: block;
border-bottom: 1px solid #ccc;
padding: 0.5em 1em;
text-align: right;
}
/* code block in figures
*/
div.highlight pre {
text-align: left;
}
/* Table theme
*/
thead, tfoot {
background-color: #fff;
}
th:hover, td:hover {
background-color: #ffc;
}
thead th, tfoot th, tfoot td, tbody th {
background-color: #fffaef;
}
tbody tr:nth-child(odd) {
background-color: #fff;
}
tbody tr:nth-child(even) {
background-color: #fafafa;
}
caption {
font-family: Sans Serif;
padding: 0.5em;
margin: 0.5em 0 0.5em 0;
caption-side: top;
text-align: left;
}

6
docs/_themes/searx/theme.conf vendored Normal file
View File

@ -0,0 +1,6 @@
[theme]
inherit = pocoo
stylesheet = searx.css
[options]
touch_icon =

96
docs/admin/api.rst Normal file
View File

@ -0,0 +1,96 @@
.. _adminapi:
==================
Administration API
==================
Get configuration data
======================
.. code:: http
GET /config HTTP/1.1
Sample response
---------------
.. code:: json
{
"autocomplete": "",
"categories": [
"map",
"it",
"images",
],
"default_locale": "",
"default_theme": "oscar",
"engines": [
{
"categories": [
"map"
],
"enabled": true,
"name": "openstreetmap",
"shortcut": "osm"
},
{
"categories": [
"it"
],
"enabled": true,
"name": "arch linux wiki",
"shortcut": "al"
},
{
"categories": [
"images"
],
"enabled": true,
"name": "google images",
"shortcut": "goi"
},
{
"categories": [
"it"
],
"enabled": false,
"name": "bitbucket",
"shortcut": "bb"
},
],
"instance_name": "searx",
"locales": {
"de": "Deutsch (German)",
"en": "English",
"eo": "Esperanto (Esperanto)",
},
"plugins": [
{
"enabled": true,
"name": "HTTPS rewrite"
},
{
"enabled": false,
"name": "Vim-like hotkeys"
}
],
"safe_search": 0
}
Embed search bar
================
The search bar can be embedded into websites. Just paste the example into the
HTML of the site. URL of the searx instance and values are customizable.
.. code:: html
<form method="post" action="https://searx.me/">
<!-- search --> <input type="text" name="q" />
<!-- categories --> <input type="hidden" name="categories" value="general,social media" />
<!-- language --> <input type="hidden" name="lang" value="all" />
<!-- locale --> <input type="hidden" name="locale" value="en" />
<!-- date filter --> <input type="hidden" name="time_range" value="month" />
</form>

View File

@ -0,0 +1,33 @@
digraph G {
node [style=filled, shape=box, fillcolor="#ffffcc", fontname="Sans"];
edge [fontname="Sans"];
browser [label="Browser", shape=Mdiamond];
rp [label="Reverse Proxy", href="url to configure reverse proxy"];
filtron [label="Filtron", href="https://github.com/asciimoo/filtron"];
morty [label="Morty", href="https://github.com/asciimoo/morty"];
static [label="Static files", href="url to configure static files"];
uwsgi [label="uwsgi", href="url to configure uwsgi"]
searx1 [label="Searx #1"];
searx2 [label="Searx #2"];
searx3 [label="Searx #3"];
searx4 [label="Searx #4"];
browser -> rp [label="HTTPS"]
subgraph cluster_searx {
label = "Searx instance" fontname="Sans";
bgcolor="#fafafa";
{ rank=same; static rp };
rp -> morty [label="optional: images and HTML pages proxy"];
rp -> static [label="optional: reverse proxy serves directly static files"];
rp -> filtron [label="HTTP"];
filtron -> uwsgi [label="HTTP"];
uwsgi -> searx1;
uwsgi -> searx2;
uwsgi -> searx3;
uwsgi -> searx4;
}
}

View File

@ -0,0 +1,24 @@
.. _architecture:
============
Architecture
============
.. sidebar:: Needs work!
This article needs some work / Searx is a collaborative effort. If you have
any contribution, feel welcome to send us your :pull:`PR <../pulls>`, see
:ref:`how to contribute`.
Herein you will find some hints and suggestions about typical architectures of
searx infrastructures.
We start with a contribution from :pull:`@dalf <1776#issuecomment-567917320>`.
It shows a *reference* setup for public searx instances.
.. _arch public:
.. kernel-figure:: arch_public.dot
:alt: arch_public.dot
Reference architecture of a public searx setup.

103
docs/admin/buildhosts.rst Normal file
View File

@ -0,0 +1,103 @@
.. _buildhosts:
==========
Buildhosts
==========
.. sidebar:: This article needs some work
If you have any contribution send us your :pull:`PR <../pulls>`, see
:ref:`how to contribute`.
To get best results from build, its recommend to install additional packages
on build hosts.
.. _docs build:
Build docs
==========
.. _Graphviz: https://graphviz.gitlab.io
.. _ImageMagick: https://www.imagemagick.org
.. _XeTeX: https://tug.org/xetex/
.. _dvisvgm: https://dvisvgm.de/
.. sidebar:: Sphinx build needs
- ImageMagick_
- Graphviz_
- XeTeX_
- dvisvgm_
Most of the sphinx requirements are installed from :origin:`setup.py` and the
docs can be build from scratch with ``make docs``. For better math and image
processing additional packages are needed. The XeTeX_ needed not only for PDF
creation, its also needed for :ref:`math` when HTML output is build.
To be able to do :ref:`sphinx:math-support` without CDNs, the math are rendered
as images (``sphinx.ext.imgmath`` extension). If your docs build (``make
docs``) shows warnings like this::
WARNING: dot(1) not found, for better output quality install \
graphviz from http://www.graphviz.org
..
WARNING: LaTeX command 'latex' cannot be run (needed for math \
display), check the imgmath_latex setting
you need to install additional packages on your build host, to get better HTML
output.
.. _system requirements:
.. tabs::
.. group-tab:: Ubuntu / debian
.. code-block:: sh
$ sudo apt install graphviz imagemagick texlive-xetex librsvg2-bin
.. group-tab:: Arch Linux
.. code-block:: sh
$ sudo pacman -S graphviz imagemagick texlive-bin extra/librsvg
.. group-tab:: Fedora / RHEL
.. code-block:: sh
$ sudo dnf install graphviz graphviz-gd texlive-xetex-bin librsvg2-tools
For PDF output you also need:
.. tabs::
.. group-tab:: Ubuntu / debian
.. code:: sh
$ sudo apt texlive-latex-recommended texlive-extra-utils ttf-dejavu
.. group-tab:: Arch Linux
.. code:: sh
$ sudo pacman -S texlive-core texlive-latexextra ttf-dejavu
.. group-tab:: Fedora / RHEL
.. code:: sh
$ sudo dnf install \
texlive-collection-fontsrecommended texlive-collection-latex \
dejavu-sans-fonts dejavu-serif-fonts dejavu-sans-mono-fonts
.. _system requirements END:
.. literalinclude:: ../conf.py
:language: python
:start-after: # sphinx.ext.imgmath setup
:end-before: # sphinx.ext.imgmath setup END

71
docs/admin/engines.rst Normal file
View File

@ -0,0 +1,71 @@
.. _engines generic:
=======
Engines
=======
.. sidebar:: Further reading ..
- :ref:`settings engine`
- :ref:`engine settings`
- :ref:`engine file`
============= =========== ==================== ============
:ref:`engine settings` :ref:`engine file`
------------------------- ---------------------------------
Name (cfg) Categories
------------------------- ---------------------------------
Engine .. Paging support **P**
------------------------- -------------------- ------------
Shortcut **S** Language support **L**
Timeout **TO** Time range support **TR**
Disabled **D** Offline **O**
------------- ----------- -------------------- ------------
Safe search **SS**
------------- ----------- ---------------------------------
Weigth **W**
------------- ----------- ---------------------------------
Disabled **D**
============= =========== =================================
Configuration defaults (at built time):
.. _configured engines:
.. jinja:: webapp
.. flat-table:: Engines configured at built time (defaults)
:header-rows: 1
:stub-columns: 2
* - Name (cfg)
- S
- Engine
- TO
- Categories
- P
- L
- SS
- D
- TR
- O
- W
- D
{% for name, mod in engines.items() %}
* - {{name}}
- !{{mod.shortcut}}
- {{mod.__name__}}
- {{mod.timeout}}
- {{", ".join(mod.categories)}}
- {{(mod.paging and "y") or ""}}
- {{(mod.language_support and "y") or ""}}
- {{(mod.safesearch and "y") or ""}}
- {{(mod.disabled and "y") or ""}}
- {{(mod.time_range_support and "y") or ""}}
- {{(mod.offline and "y") or ""}}
- {{mod.weight or 1 }}
- {{(mod.disabled and "y") or ""}}
{% endfor %}

148
docs/admin/filtron.rst Normal file
View File

@ -0,0 +1,148 @@
==========================
How to protect an instance
==========================
Searx depens on external search services. To avoid the abuse of these services
it is advised to limit the number of requests processed by searx.
An application firewall, ``filtron`` solves exactly this problem. Information
on how to install it can be found at the `project page of filtron
<https://github.com/asciimoo/filtron>`__.
Sample configuration of filtron
===============================
An example configuration can be find below. This configuration limits the access
of:
- scripts or applications (roboagent limit)
- webcrawlers (botlimit)
- IPs which send too many requests (IP limit)
- too many json, csv, etc. requests (rss/json limit)
- the same UserAgent of if too many requests (useragent limit)
.. code:: json
[{
"name":"search request",
"filters":[
"Param:q",
"Path=^(/|/search)$"
],
"interval":"<time-interval-in-sec (int)>",
"limit":"<max-request-number-in-interval (int)>",
"subrules":[
{
"name":"roboagent limit",
"interval":"<time-interval-in-sec (int)>",
"limit":"<max-request-number-in-interval (int)>",
"filters":[
"Header:User-Agent=(curl|cURL|Wget|python-requests|Scrapy|FeedFetcher|Go-http-client)"
],
"actions":[
{
"name":"block",
"params":{
"message":"Rate limit exceeded"
}
}
]
},
{
"name":"botlimit",
"limit":0,
"stop":true,
"filters":[
"Header:User-Agent=(Googlebot|bingbot|Baiduspider|yacybot|YandexMobileBot|YandexBot|Yahoo! Slurp|MJ12bot|AhrefsBot|archive.org_bot|msnbot|MJ12bot|SeznamBot|linkdexbot|Netvibes|SMTBot|zgrab|James BOT)"
],
"actions":[
{
"name":"block",
"params":{
"message":"Rate limit exceeded"
}
}
]
},
{
"name":"IP limit",
"interval":"<time-interval-in-sec (int)>",
"limit":"<max-request-number-in-interval (int)>",
"stop":true,
"aggregations":[
"Header:X-Forwarded-For"
],
"actions":[
{
"name":"block",
"params":{
"message":"Rate limit exceeded"
}
}
]
},
{
"name":"rss/json limit",
"interval":"<time-interval-in-sec (int)>",
"limit":"<max-request-number-in-interval (int)>",
"stop":true,
"filters":[
"Param:format=(csv|json|rss)"
],
"actions":[
{
"name":"block",
"params":{
"message":"Rate limit exceeded"
}
}
]
},
{
"name":"useragent limit",
"interval":"<time-interval-in-sec (int)>",
"limit":"<max-request-number-in-interval (int)>",
"aggregations":[
"Header:User-Agent"
],
"actions":[
{
"name":"block",
"params":{
"message":"Rate limit exceeded"
}
}
]
}
]
}]
Route request through filtron
=============================
Filtron can be started using the following command:
.. code:: sh
$ filtron -rules rules.json
It listens on ``127.0.0.1:4004`` and forwards filtered requests to
``127.0.0.1:8888`` by default.
Use it along with ``nginx`` with the following example configuration.
.. code:: nginx
location / {
proxy_set_header Host $http_host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Scheme $scheme;
proxy_pass http://127.0.0.1:4004/;
}
Requests are coming from port 4004 going through filtron and then forwarded to
port 8888 where a searx is being run.

16
docs/admin/index.rst Normal file
View File

@ -0,0 +1,16 @@
===========================
Administrator documentation
===========================
.. toctree::
:maxdepth: 1
installation
settings
api
architecture
filtron
morty
engines
plugins
buildhosts

349
docs/admin/installation.rst Normal file
View File

@ -0,0 +1,349 @@
.. _installation:
============
Installation
============
.. contents::
:depth: 3
Basic installation
==================
Step by step installation for Debian/Ubuntu with virtualenv. For Ubuntu, be sure
to have enable universe repository.
Install packages:
.. code:: sh
$ sudo -H apt-get install \
git build-essential libxslt-dev \
python-dev python-virtualenv python-babel \
zlib1g-dev libffi-dev libssl-dev
Install searx:
.. code:: sh
cd /usr/local
sudo -H git clone https://github.com/asciimoo/searx.git
sudo -H useradd searx -d /usr/local/searx
sudo -H chown searx:searx -R /usr/local/searx
Install dependencies in a virtualenv:
.. code:: sh
cd /usr/local/searx
sudo -H -u searx -i
.. code:: sh
(searx)$ virtualenv searx-ve
(searx)$ . ./searx-ve/bin/activate
(searx)$ ./manage.sh update_packages
Configuration
==============
.. code:: sh
sed -i -e "s/ultrasecretkey/`openssl rand -hex 16`/g" searx/settings.yml
Edit searx/settings.yml if necessary.
Check
=====
Start searx:
.. code:: sh
python searx/webapp.py
Go to http://localhost:8888
If everything works fine, disable the debug option in settings.yml:
.. code:: sh
sed -i -e "s/debug : True/debug : False/g" searx/settings.yml
At this point searx is not demonized ; uwsgi allows this.
You can exit the virtualenv and the searx user bash (enter exit command
twice).
uwsgi
=====
Install packages:
.. code:: sh
sudo -H apt-get install \
uwsgi uwsgi-plugin-python
Create the configuration file ``/etc/uwsgi/apps-available/searx.ini`` with this
content:
.. code:: ini
[uwsgi]
# Who will run the code
uid = searx
gid = searx
# disable logging for privacy
disable-logging = true
# Number of workers (usually CPU count)
workers = 4
# The right granted on the created socket
chmod-socket = 666
# Plugin to use and interpretor config
single-interpreter = true
master = true
plugin = python
lazy-apps = true
enable-threads = true
# Module to import
module = searx.webapp
# Support running the module from a webserver subdirectory.
route-run = fixpathinfo:
# Virtualenv and python path
virtualenv = /usr/local/searx/searx-ve/
pythonpath = /usr/local/searx/
chdir = /usr/local/searx/searx/
Activate the uwsgi application and restart:
.. code:: sh
cd /etc/uwsgi/apps-enabled
ln -s ../apps-available/searx.ini
/etc/init.d/uwsgi restart
Web server
==========
with nginx
----------
If nginx is not installed (uwsgi will not work with the package
nginx-light):
.. code:: sh
sudo -H apt-get install nginx
Hosted at /
~~~~~~~~~~~
Create the configuration file ``/etc/nginx/sites-available/searx`` with this
content:
.. code:: nginx
server {
listen 80;
server_name searx.example.com;
root /usr/local/searx/searx;
location /static {
}
location / {
include uwsgi_params;
uwsgi_pass unix:/run/uwsgi/app/searx/socket;
}
}
Create a symlink to sites-enabled:
.. code:: sh
sudo -H ln -s /etc/nginx/sites-available/searx /etc/nginx/sites-enabled/searx
Restart service:
.. code:: sh
sudo -H service nginx restart
sudo -H service uwsgi restart
from subdirectory URL (/searx)
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Add this configuration in the server config file
``/etc/nginx/sites-enabled/default``:
.. code:: nginx
location /searx/static {
alias /usr/local/searx/searx/static;
}
location /searx {
uwsgi_param SCRIPT_NAME /searx;
include uwsgi_params;
uwsgi_pass unix:/run/uwsgi/app/searx/socket;
}
**OR** using reverse proxy (Please, note that reverse proxy advised to be used
in case of single-user or low-traffic instances.)
.. code:: nginx
location /searx/static {
alias /usr/local/searx/searx/static;
}
location /searx {
proxy_pass http://127.0.0.1:8888;
proxy_set_header Host $host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Scheme $scheme;
proxy_set_header X-Script-Name /searx;
proxy_buffering off;
}
Enable ``base_url`` in ``searx/settings.yml``
.. code:: yaml
base_url : http://your.domain.tld/searx/
Restart service:
.. code:: sh
sudo -H service nginx restart
sudo -H service uwsgi restart
disable logs
^^^^^^^^^^^^
for better privacy you can disable nginx logs about searx.
how to proceed: below ``uwsgi_pass`` in ``/etc/nginx/sites-available/default``
add:
.. code:: nginx
access_log /dev/null;
error_log /dev/null;
Restart service:
.. code:: sh
sudo -H service nginx restart
with apache
-----------
Add wsgi mod:
.. code:: sh
sudo -H apt-get install libapache2-mod-uwsgi
sudo -H a2enmod uwsgi
Add this configuration in the file ``/etc/apache2/apache2.conf``:
.. code:: apache
<Location />
Options FollowSymLinks Indexes
SetHandler uwsgi-handler
uWSGISocket /run/uwsgi/app/searx/socket
</Location>
Note that if your instance of searx is not at the root, you should change
``<Location />`` by the location of your instance, like ``<Location /searx>``.
Restart Apache:
.. code:: sh
sudo -H /etc/init.d/apache2 restart
disable logs
~~~~~~~~~~~~
For better privacy you can disable Apache logs.
.. warning::
You can only disable logs for the whole (virtual) server not for a specific
path.
Go back to ``/etc/apache2/apache2.conf`` and above ``<Location />`` add:
.. code:: apache
CustomLog /dev/null combined
Restart Apache:
.. code:: sh
sudo -H /etc/init.d/apache2 restart
How to update
=============
.. code:: sh
cd /usr/local/searx
sudo -H -u searx -i
.. code:: sh
(searx)$ . ./searx-ve/bin/activate
(searx)$ git stash
(searx)$ git pull origin master
(searx)$ git stash apply
(searx)$ ./manage.sh update_packages
.. code:: sh
sudo -H service uwsgi restart
Docker
======
Make sure you have installed Docker. For instance, you can deploy searx like this:
.. code:: sh
docker pull wonderfall/searx
docker run -d --name searx -p $PORT:8888 wonderfall/searx
Go to ``http://localhost:$PORT``.
See https://hub.docker.com/r/wonderfall/searx/ for more informations. It's also
possible to build searx from the embedded Dockerfile.
.. code:: sh
git clone https://github.com/asciimoo/searx.git
cd searx
docker build -t whatever/searx .
References
==========
* https://about.okhin.fr/posts/Searx/ with some additions
* How to: `Setup searx in a couple of hours with a free SSL certificate
<https://www.reddit.com/r/privacytoolsIO/comments/366kvn/how_to_setup_your_own_privacy_respecting_search/>`__

26
docs/admin/morty.rst Normal file
View File

@ -0,0 +1,26 @@
=========================
How to setup result proxy
=========================
.. _morty: https://github.com/asciimoo/morty
.. _morty's README: https://github.com/asciimoo/morty
By default searx can only act as an image proxy for result images, but it is
possible to proxify all the result URLs with an external service, morty_.
To use this feature, morty has to be installed and activated in searx's
``settings.yml``.
Add the following snippet to your ``settings.yml`` and restart searx:
.. code:: yaml
result_proxy:
url : http://127.0.0.1:3000/
key : your_morty_proxy_key
``url``
Is the address of the running morty service.
``key``
Is an optional argument, see `morty's README`_ for more information.

39
docs/admin/plugins.rst Normal file
View File

@ -0,0 +1,39 @@
.. _plugins generic:
===============
Plugins builtin
===============
.. sidebar:: Further reading ..
- :ref:`dev plugin`
Configuration defaults (at built time):
:DO: Default on
.. _configured plugins:
.. jinja:: webapp
.. flat-table:: Plugins configured at built time (defaults)
:header-rows: 1
:stub-columns: 1
:widths: 3 1 9
* - Name
- DO
- Description
JS & CSS dependencies
{% for plgin in plugins %}
* - {{plgin.name}}
- {{(plgin.default_on and "y") or ""}}
- {{plgin.description}}
{% for dep in (plgin.js_dependencies + plgin.css_dependencies) %}
| ``{{dep}}`` {% endfor %}
{% endfor %}

181
docs/admin/settings.rst Normal file
View File

@ -0,0 +1,181 @@
.. _settings.yml:
================
``settings.yml``
================
.. sidebar:: Further reading ..
- :ref:`search API`
This page describe the options possibilities of the settings.yml file.
.. _settings global:
Global Settings
===============
.. code:: yaml
server:
port : 8888
secret_key : "ultrasecretkey" # change this!
debug : False # debug mode, only for development
request_timeout : 2.0 # seconds
base_url : False # set custom base_url (or False)
themes_path : "" # custom ui themes path
default_theme : oscar # ui theme
useragent_suffix : "" # suffix of searx_useragent, could contain
# informations like admins email address
image_proxy : False # proxying image results through searx
default_locale : "" # default interface locale
# uncomment below section if you want to use a proxy
#outgoing_proxies :
# http : http://127.0.0.1:8080
# https: http://127.0.0.1:8080
# uncomment below section only if you have more than one network interface
# which can be the source of outgoing search requests
#source_ips:
# - 1.1.1.1
# - 1.1.1.2
locales:
en : English
de : Deutsch
he : Hebrew
hu : Magyar
fr : Français
es : Español
it : Italiano
nl : Nederlands
ja : 日本語 (Japanese)
tr : Türkçe
ru : Russian
ro : Romanian
``port`` :
Port number of the searx web application if you run it directly using ``python
searx/webapp.py``. Doesn't apply to searx running on Apache or Nginx.
``secret_key`` :
Used for cryptography purpose.
``debug`` :
Allow a more detailed log if you run searx directly. Display *detailed* error
messages in the browser too, so this must be deactivated in production.
``request_timeout`` :
Global timeout of the requests made to others engines in seconds. A bigger
timeout will allow to wait for answers from slow engines, but in consequence
will slow searx reactivity (the result page may take the time specified in the
timeout to load)
``base_url`` :
The base URL where searx is deployed. Used to create correct inbound links.
``themes_path`` :
Path to where the themes are located. If you didn't develop anything, leave it
blank.
``default_theme`` :
Name of the theme you want to use by default on you searx instance.
``useragent_suffix`` :
Suffix to the user-agent searx uses to send requests to others engines. If an
engine wish to block you, a contact info here may be useful to avoid that.
``image_proxy`` :
Allow your instance of searx of being able to proxy images. Uses memory space.
``default_locale`` :
Aearx interface language. If blank, the locale is detected by using the
browser language. If it doesn't work, or you are deploying a language
specific instance of searx, a locale can be defined using an ISO language
code, like ``fr``, ``en``, ``de``.
.. _requests proxies: http://docs.python-requests.org/en/latest/user/advanced/#proxies
.. _PR SOCKS support: https://github.com/kennethreitz/requests/pull/478
``outgoing_proxies`` :
Define a proxy you wish to use, see `requests proxies`_. SOCKS proxies are
not supported / see `PR SOCKS support`.
``source_ips`` :
If you use multiple nework interfaces, define from which IP the requests must
be made.
``locales`` :
Locales codes and their names. Available translations of searx interface.
.. _settings engine:
Engine settings
===============
.. sidebar:: Further reading ..
- :ref:`engines-dev`
.. code:: yaml
- name : bing
engine : bing
shortcut : bi
base_url : 'https://{language}.wikipedia.org/'
categories : general
timeout : 3.0
api_key : 'apikey'
disabled : True
language : en_US
``name`` :
Name that will be used accross searx to define this engine. In settings, on
the result page...
``engine`` :
Name of the python file used to handle requests and responses to and from this
search engine.
``shortcut`` :
Code used to execute bang requests (in this case using ``!bi`` or ``?bi``)
``base_url`` : optional
Part of the URL that should be stable accross every request. Can be useful to
use multiple sites using only one engine, or updating the site URL without
touching at the code.
``categories`` : optional
Define in which categories this engine will be active. Most of the time, it is
defined in the code of the engine, but in a few cases it is useful, like when
describing multiple search engine using the same code.
``timeout`` : optional
Timeout of the search with the current search engine. **Be careful, it will
modify the global timeout of searx.**
``api_key`` : optional
In a few cases, using an API needs the use of a secret key. How to obtain them
is described in the file.
``disabled`` : optional
To disable by default the engine, but not deleting it. It will allow the user
to manually activate it in the settings.
``language`` : optional
If you want to use another language for a specific engine, you can define it
by using the full ISO code of language and country, like ``fr_FR``, ``en_US``,
``de_DE``.
``weigth`` : default ``1``
Weighting of the results of this engine.
.. note::
A few more options are possible, but they are pretty specific to some
engines, and so won't be described here.

43
docs/blog/admin.rst Normal file
View File

@ -0,0 +1,43 @@
=============================================================
Searx admin interface
=============================================================
.. _searx-admin: https://github.com/kvch/searx-admin#searx-admin
.. _NLnet Foundation: https://nlnet.nl/
manage your instance from your browser
.. sidebar:: Installation
Installation guide can be found in the repository of searx-admin_.
One of the biggest advantages of searx is being extremely customizable. But at
first it can be daunting to newcomers. A barrier of taking advantage of this
feature is our ugly settings file which is sometimes hard to understand and
edit.
To make self-hosting searx more accessible a new tool is introduced, called
``searx-admin``. It is a web application which is capable of managing your
instance and manipulating its settings via a web UI. It aims to replace editing
of ``settings.yml`` for less experienced administrators or people who prefer
graphical admin interfaces.
.. figure:: searx-admin-engines.png
:alt: Screenshot of engine list
Configuration page of engines
Since ``searx-admin`` acts as a supervisor for searx, we have decided to
implement it as a standalone tool instead of part of searx. Another reason for
making it a standalone tool is that the codebase and dependencies of searx
should not grow because of a fully optional feature, which does not affect
existing instances.
Acknowledgements
================
This development was sponsored by `NLnet Foundation`_.
| Happy hacking.
| kvch // 2017.08.22 21:25

11
docs/blog/index.rst Normal file
View File

@ -0,0 +1,11 @@
====
Blog
====
.. toctree::
:maxdepth: 1
python3
admin
intro-offline
private-engines

View File

@ -0,0 +1,77 @@
===============================
Preparation for offline engines
===============================
Offline engines
===============
To extend the functionality of searx, offline engines are going to be
introduced. An offline engine is an engine which does not need Internet
connection to perform a search and does not use HTTP to communicate.
Offline engines can be configured as online engines, by adding those to the
`engines` list of :origin:`settings.yml <searx/settings.yml>`. Thus, searx
finds the engine file and imports it.
Example skeleton for the new engines:
.. code:: python
from subprocess import PIPE, Popen
categories = ['general']
offline = True
def init(settings):
pass
def search(query, params):
process = Popen(['ls', query], stdout=PIPE)
return_code = process.wait()
if return_code != 0:
raise RuntimeError('non-zero return code', return_code)
results = []
line = process.stdout.readline()
while line:
result = parse_line(line)
results.append(results)
line = process.stdout.readline()
return results
Development progress
====================
First, a proposal has been created as a Github issue. Then it was moved to the
wiki as a design document. You can read it here: :wiki:`Offline-engines`.
In this development step, searx core was prepared to accept and perform offline
searches. Offline search requests are scheduled together with regular offline
requests.
As offline searches can return arbitrary results depending on the engine, the
current result templates were insufficient to present such results. Thus, a new
template is introduced which is caplable of presenting arbitrary key value pairs
as a table. You can check out the pull request for more details see
:pull:`1700`.
Next steps
==========
Today, it is possible to create/run an offline engine. However, it is going to be publicly available for everyone who knows the searx instance. So the next step is to introduce token based access for engines. This way administrators are able to limit the access to private engines.
Acknowledgement
===============
This development was sponsored by `Search and Discovery Fund`_ of `NLnet Foundation`_ .
.. _Search and Discovery Fund: https://nlnet.nl/discovery
.. _NLnet Foundation: https://nlnet.nl/
| Happy hacking.
| kvch // 2019.10.21 17:03

View File

@ -0,0 +1,63 @@
==================================
Limit access to your searx engines
==================================
Administrators might find themselves wanting to limit access to some of the
enabled engines on their instances. It might be because they do not want to
expose some private information through an offline engine. Or they
would rather share engines only with their trusted friends or colleagues.
Private engines
===============
To solve this issue private engines were introduced in :pull:`1823`.
A new option was added to engines named `tokens`. It expects a list
of strings. If the user making a request presents one of the tokens
of an engine, he/she is able to access information about the engine
and make search requests.
Example configuration to restrict access to the Arch Linux Wiki engine:
.. code:: yaml
- name : arch linux wiki
engine : archlinux
shortcut : al
tokens : [ 'my-secret-token' ]
Unless a user has configured the right token, the engine is going
to be hidden from him/her. It is not going to be included in the
list of engines on the Preferences page and in the output of
`/config` REST API call.
Tokens can be added to one's configuration on the Preferences page
under "Engine tokens". The input expects a comma separated list of
strings.
The distribution of the tokens from the administrator to the users
is not carved in stone. As providing access to such engines
implies that the admin knows and trusts the user, we do not see
necessary to come up with a strict process. Instead,
we would like to add guidelines to the documentation of the feature.
Next steps
==========
Now that searx has support for both offline engines and private engines,
it is possible to add concrete engines which benefit from these features.
For example engines which search on the local host running the instance.
Be it searching your file system or querying a private database. Be creative
and come up with new solutions which fit your use case.
Acknowledgement
===============
This development was sponsored by `Search and Discovery Fund`_ of `NLnet Foundation`_ .
.. _Search and Discovery Fund: https://nlnet.nl/discovery
.. _NLnet Foundation: https://nlnet.nl/
| Happy hacking.
| kvch // 2020.02.28 22:26

68
docs/blog/python3.rst Normal file
View File

@ -0,0 +1,68 @@
============================
Introducing Python 3 support
============================
.. _Python 2.7 clock: https://pythonclock.org/
.. sidebar:: Python 2.7 to 3 upgrade
This chapter exists of historical reasons. Python 2.7 release schedule ends
(`Python 2.7 clock`_) after 11 years Python 3 exists
As most operation systems are coming with Python3 installed by default. So it is
time for searx to support Python3. But don't worry support of Python2.7 won't be
dropped.
.. image:: searxpy3.png
:scale: 50 %
:alt: hurray
:align: center
How to run searx using Python 3
===============================
Please make sure that you run at least Python 3.5.
To run searx, first a Python3 virtualenv should be created. After entering the
virtualenv, dependencies must be installed. Then run searx with python3 instead
of the usual python command.
.. code:: sh
virtualenv -p python3 venv3
source venv3/bin/activate
pip3 install -r requirements.txt
python3 searx/webapp.py
If you want to run searx using Python2.7, you don't have to do anything
differently as before.
Fun facts
=========
- 115 files were changed when implementing the support for both Python versions.
- All of the dependencies was compatible except for the robotframework used for
browser tests. Thus, these tests were migrated to splinter. So from now on
both versions are being tested on Travis and can be tested locally.
If you found bugs
=================
Please open an issue on `GitHub`_. Make sure that you mention your Python
version in your issue, so we can investigate it properly.
.. _GitHub: https://github.com/asciimoo/searx/issues
Acknowledgment
==============
This development was sponsored by `NLnet Foundation`_.
.. _NLnet Foundation: https://nlnet.nl/
| Happy hacking.
| kvch // 2017.05.13 22:57

Binary file not shown.

After

Width:  |  Height:  |  Size: 50 KiB

BIN
docs/blog/searxpy3.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 30 KiB

114
docs/conf.py Normal file
View File

@ -0,0 +1,114 @@
# -*- coding: utf-8 -*-
import sys, os
from searx.version import VERSION_STRING
from pallets_sphinx_themes import ProjectLink
from searx.brand import GIT_URL
from searx.brand import SEARX_URL
from searx.brand import DOCS_URL
# Project --------------------------------------------------------------
project = u'searx'
copyright = u'2015-2020, Adam Tauber, Noémi Ványi'
author = u'Adam Tauber'
release, version = VERSION_STRING, VERSION_STRING
highlight_language = 'none'
# General --------------------------------------------------------------
master_doc = "index"
source_suffix = '.rst'
numfig = True
from searx import webapp
jinja_contexts = {
'webapp': dict(**webapp.__dict__)
}
# usage:: lorem :patch:`f373169` ipsum
extlinks = {}
# upstream links
extlinks['wiki'] = ('https://github.com/asciimoo/searx/wiki/%s', ' ')
extlinks['pull'] = ('https://github.com/asciimoo/searx/pull/%s', 'PR ')
# links to custom brand
extlinks['origin'] = (GIT_URL + '/blob/master/%s', 'git://')
extlinks['patch'] = (GIT_URL + '/commit/%s', '#')
extlinks['search'] = (SEARX_URL + '/%s', '#')
extlinks['docs'] = (DOCS_URL + '/%s', 'docs: ')
extlinks['pypi'] = ('https://pypi.org/project/%s', 'PyPi: ')
extlinks['man'] = ('https://manpages.debian.org/jump?q=%s', '')
#extlinks['role'] = (
# 'https://www.sphinx-doc.org/en/master/usage/restructuredtext/roles.html#role-%s', '')
extlinks['duref'] = (
'http://docutils.sourceforge.net/docs/ref/rst/restructuredtext.html#%s', '')
extlinks['durole'] = (
'http://docutils.sourceforge.net/docs/ref/rst/roles.html#%s', '')
extlinks['dudir'] = (
'http://docutils.sourceforge.net/docs/ref/rst/directives.html#%s', '')
extlinks['ctan'] = (
'https://ctan.org/pkg/%s', 'CTAN: ')
extensions = [
'sphinx.ext.imgmath',
'sphinx.ext.extlinks',
'sphinx.ext.viewcode',
"sphinx.ext.autodoc",
"sphinx.ext.intersphinx",
"pallets_sphinx_themes",
"sphinx_issues", # https://github.com/sloria/sphinx-issues/blob/master/README.rst
"sphinxcontrib.jinja", # https://github.com/tardyp/sphinx-jinja
'linuxdoc.rstFlatTable', # Implementation of the 'flat-table' reST-directive.
'linuxdoc.kfigure', # Sphinx extension which implements scalable image handling.
"sphinx_tabs.tabs", # https://github.com/djungelorm/sphinx-tabs
]
intersphinx_mapping = {
"python": ("https://docs.python.org/3/", None),
"flask": ("https://flask.palletsprojects.com/", None),
# "werkzeug": ("https://werkzeug.palletsprojects.com/", None),
"jinja": ("https://jinja.palletsprojects.com/", None),
"linuxdoc" : ("https://return42.github.io/linuxdoc/", None),
"sphinx" : ("https://www.sphinx-doc.org/en/master/", None),
}
issues_github_path = "asciimoo/searx"
# HTML -----------------------------------------------------------------
sys.path.append(os.path.abspath('_themes'))
html_theme_path = ['_themes']
html_theme = "searx"
# sphinx.ext.imgmath setup
html_math_renderer = 'imgmath'
imgmath_image_format = 'svg'
imgmath_font_size = 14
# sphinx.ext.imgmath setup END
html_theme_options = {"index_sidebar_logo": True}
html_context = {
"project_links": [
ProjectLink("Source", GIT_URL),
ProjectLink("Wiki", "https://github.com/asciimoo/searx/wiki"),
ProjectLink("Public instances", "https://searx.space/"),
ProjectLink("Twitter", "https://twitter.com/Searx_engine"),
]
}
html_sidebars = {
"**": ["project.html", "relations.html", "searchbox.html"],
}
singlehtml_sidebars = {"index": ["project.html", "localtoc.html"]}
html_static_path = ["static"]
html_logo = "static/img/searx_logo_small.png"
html_title = "Searx Documentation ({})".format("Searx-{}.tex".format(VERSION_STRING))
html_show_sourcelink = False
# LaTeX ----------------------------------------------------------------
latex_documents = [
(master_doc, "searx-{}.tex".format(VERSION_STRING), html_title, author, "manual")
]

View File

@ -0,0 +1,180 @@
.. _how to contribute:
=================
How to contribute
=================
Prime directives: Privacy, Hackability
======================================
Searx has two prime directives, **privacy-by-design and hackability** . The
hackability comes in three levels:
- support of search engines
- plugins to alter search behaviour
- hacking searx itself
Note the lack of "world domination" among the directives. Searx has no
intention of wide mass-adoption, rounded corners, etc. The prime directive
"privacy" deserves a separate chapter, as it's quite uncommon unfortunately.
Privacy-by-design
-----------------
Searx was born out of the need for a **privacy-respecting** search tool which
can be extended easily to maximize both, its search and its privacy protecting
capabilities.
A few widely used features work differently or turned off by default or not
implemented at all **as a consequence of privacy-by-design**.
If a feature reduces the privacy preserving aspects of searx, it should be
switched off by default or should not implemented at all. There are plenty of
search engines already providing such features. If a feature reduces the
protection of searx, users must be informed about the effect of choosing to
enable it. Features that protect privacy but differ from the expectations of
the user should also be explained.
Also, if you think that something works weird with searx, it's might be because
of the tool you use is designed in a way to interfere with the privacy respect.
Submitting a bugreport to the vendor of the tool that misbehaves might be a good
feedback to reconsider the disrespect to its customers (e.g. ``GET`` vs ``POST``
requests in various browsers).
Remember the other prime directive of searx is to be hackable, so if the above
privacy concerns do not fancy you, simply fork it.
*Happy hacking.*
Code
====
.. _PEP8: https://www.python.org/dev/peps/pep-0008/
.. _Conventional Commits: https://www.conventionalcommits.org/
.. _Git Commit Good Practice: https://wiki.openstack.org/wiki/GitCommitMessages
.. _Structural split of changes:
https://wiki.openstack.org/wiki/GitCommitMessages#Structural_split_of_changes
.. _gitmoji: https://gitmoji.carloscuesta.me/
.. _Semantic PR: https://github.com/zeke/semantic-pull-requests
.. sidebar:: Create good commits!
- `Structural split of changes`_
- `Conventional Commits`_
- `Git Commit Good Practice`_
- some like to use: gitmoji_
- not yet active: `Semantic PR`_
In order to submit a patch, please follow the steps below:
- Follow coding conventions.
- PEP8_ standards apply, except the convention of line length
- Maximum line length is 120 characters
- The cardinal rule for creating good commits is to ensure there is only one
*logical change* per commit / read `Structural split of changes`_
- Check if your code breaks existing tests. If so, update the tests or fix your
code.
- If your code can be unit-tested, add unit tests.
- Add yourself to the :origin:`AUTHORS.rst` file.
- Choose meaning full commit messages, read `Conventional Commits`_
.. code::
<type>[optional scope]: <description>
[optional body]
[optional footer(s)]
- Create a pull request.
For more help on getting started with searx development, see :ref:`devquickstart`.
Translation
===========
Translation currently takes place on :ref:`transifex <translation>`.
.. caution::
Please, do not update translation files in the repo.
.. _contrib docs:
Documentation
=============
.. _Sphinx: http://www.sphinx-doc.org
.. _reST: http://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html
.. sidebar:: The reST sources
has been moved from ``gh-branch`` into ``master`` (:origin:`docs`).
The documentation is built using Sphinx_. So in order to be able to generate
the required files, you have to install it on your system. Much easier, use
our :ref:`makefile`.
Here is an example which makes a complete rebuild:
.. code:: sh
$ make docs-clean docs
...
The HTML pages are in dist/docs.
.. _make docs-live:
live build
----------
.. sidebar:: docs-clean
It is recommended to assert a complete rebuild before deploying (use
``docs-clean``).
Live build is like WYSIWYG. If you want to edit the documentation, its
recommended to use. The Makefile target ``docs-live`` builds the docs, opens
URL in your favorite browser and rebuilds every time a reST file has been
changed.
.. code:: sh
$ make docs-live
...
The HTML pages are in dist/docs.
... Serving on http://0.0.0.0:8080
... Start watching changes
.. _deploy on github.io:
deploy on github.io
-------------------
To deploy documentation at :docs:`github.io <.>` use Makefile target
:ref:`make gh-pages`, which will builds the documentation, clones searx into a sub
folder ``gh-pages``, cleans it, copies the doc build into and runs all the
needed git add, commit and push:
.. code:: sh
$ make docs-clean gh-pages
...
SPHINX docs --> file://<...>/dist/docs
The HTML pages are in dist/docs.
...
Cloning into 'gh-pages' ...
...
cd gh-pages; git checkout gh-pages >/dev/null
Switched to a new branch 'gh-pages'
...
doc available at --> https://asciimoo.github.io/searx

6
docs/dev/csv_table.txt Normal file
View File

@ -0,0 +1,6 @@
stub col row 1, column, "loremLorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy
eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam
voluptua."
stub col row 1, "At vero eos et accusam et justo duo dolores et ea rebum. Stet clita
kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet.", column
stub col row 1, column, column

View File

@ -0,0 +1,267 @@
.. _engines-dev:
===============
Engine overview
===============
.. _metasearch-engine: https://en.wikipedia.org/wiki/Metasearch_engine
searx is a metasearch-engine_, so it uses different search engines to provide
better results.
Because there is no general search API which could be used for every search
engine, an adapter has to be built between searx and the external search
engines. Adapters are stored under the folder :origin:`searx/engines`.
.. contents::
:depth: 3
:backlinks: entry
general engine configuration
============================
It is required to tell searx the type of results the engine provides. The
arguments can be set in the engine file or in the settings file
(normally ``settings.yml``). The arguments in the settings file override
the ones in the engine file.
It does not matter if an option is stored in the engine file or in the
settings. However, the standard way is the following:
.. _engine file:
engine file
-----------
======================= =========== ===========================================
argument type information
======================= =========== ===========================================
categories list pages, in which the engine is working
paging boolean support multible pages
language_support boolean support language choosing
time_range_support boolean support search time range
offline boolean engine runs offline
======================= =========== ===========================================
.. _engine settings:
settings.yml
------------
======================= =========== ===========================================
argument type information
======================= =========== ===========================================
name string name of search-engine
engine string name of searx-engine
(filename without ``.py``)
shortcut string shortcut of search-engine
timeout string specific timeout for search-engine
======================= =========== ===========================================
overrides
---------
A few of the options have default values in the engine, but are often
overwritten by the settings. If ``None`` is assigned to an option in the engine
file, it has to be redefined in the settings, otherwise searx will not start
with that engine.
The naming of overrides is arbitrary. But the recommended overrides are the
following:
======================= =========== ===========================================
argument type information
======================= =========== ===========================================
base_url string base-url, can be overwritten to use same
engine on other URL
number_of_results int maximum number of results per request
language string ISO code of language and country like en_US
api_key string api-key if required by engine
======================= =========== ===========================================
example code
------------
.. code:: python
# engine dependent config
categories = ['general']
paging = True
language_support = True
making a request
================
To perform a search an URL have to be specified. In addition to specifying an
URL, arguments can be passed to the query.
passed arguments
----------------
These arguments can be used to construct the search query. Furthermore,
parameters with default value can be redefined for special purposes.
====================== ============ ========================================================================
argument type default-value, information
====================== ============ ========================================================================
url string ``''``
method string ``'GET'``
headers set ``{}``
data set ``{}``
cookies set ``{}``
verify boolean ``True``
headers.User-Agent string a random User-Agent
category string current category, like ``'general'``
started datetime current date-time
pageno int current pagenumber
language string specific language code like ``'en_US'``, or ``'all'`` if unspecified
====================== ============ ========================================================================
parsed arguments
----------------
The function ``def request(query, params):`` always returns the ``params``
variable. Inside searx, the following paramters can be used to specify a search
request:
============ =========== =========================================================
argument type information
============ =========== =========================================================
url string requested url
method string HTTP request method
headers set HTTP header information
data set HTTP data information (parsed if ``method != 'GET'``)
cookies set HTTP cookies
verify boolean Performing SSL-Validity check
============ =========== =========================================================
example code
------------
.. code:: python
# search-url
base_url = 'https://example.com/'
search_string = 'search?{query}&page={page}'
# do search-request
def request(query, params):
search_path = search_string.format(
query=urlencode({'q': query}),
page=params['pageno'])
params['url'] = base_url + search_path
return params
returned results
================
Searx is able to return results of different media-types. Currently the
following media-types are supported:
- default_
- images_
- videos_
- torrent_
- map_
To set another media-type as default, the parameter ``template`` must be set to
the desired type.
default
-------
========================= =====================================================
result-parameter information
========================= =====================================================
url string, url of the result
title string, title of the result
content string, general result-text
publishedDate :py:class:`datetime.datetime`, time of publish
========================= =====================================================
images
------
To use this template, the parameter:
========================= =====================================================
result-parameter information
========================= =====================================================
template is set to ``images.html``
url string, url to the result site
title string, title of the result *(partly implemented)*
content *(partly implemented)*
publishedDate :py:class:`datetime.datetime`,
time of publish *(partly implemented)*
img\_src string, url to the result image
thumbnail\_src string, url to a small-preview image
========================= =====================================================
videos
------
========================= =====================================================
result-parameter information
========================= =====================================================
template is set to ``videos.html``
url string, url of the result
title string, title of the result
content *(not implemented yet)*
publishedDate :py:class:`datetime.datetime`, time of publish
thumbnail string, url to a small-preview image
========================= =====================================================
torrent
-------
.. _magnetlink: https://en.wikipedia.org/wiki/Magnet_URI_scheme
========================= =====================================================
result-parameter information
========================= =====================================================
template is set to ``torrent.html``
url string, url of the result
title string, title of the result
content string, general result-text
publishedDate :py:class:`datetime.datetime`,
time of publish *(not implemented yet)*
seed int, number of seeder
leech int, number of leecher
filesize int, size of file in bytes
files int, number of files
magnetlink string, magnetlink_ of the result
torrentfile string, torrentfile of the result
========================= =====================================================
map
---
========================= =====================================================
result-parameter information
========================= =====================================================
url string, url of the result
title string, title of the result
content string, general result-text
publishedDate :py:class:`datetime.datetime`, time of publish
latitude latitude of result (in decimal format)
longitude longitude of result (in decimal format)
boundingbox boundingbox of result (array of 4. values
``[lat-min, lat-max, lon-min, lon-max]``)
geojson geojson of result (http://geojson.org)
osm.type type of osm-object (if OSM-Result)
osm.id id of osm-object (if OSM-Result)
address.name name of object
address.road street name of object
address.house_number house number of object
address.locality city, place of object
address.postcode postcode of object
address.country country of object
========================= =====================================================

3
docs/dev/hello.dot Normal file
View File

@ -0,0 +1,3 @@
graph G {
Hello -- World
}

15
docs/dev/index.rst Normal file
View File

@ -0,0 +1,15 @@
=======================
Developer documentation
=======================
.. toctree::
:maxdepth: 1
quickstart
contribution_guide
engine_overview
search_api
plugins
translation
makefile
reST

221
docs/dev/makefile.rst Normal file
View File

@ -0,0 +1,221 @@
.. _makefile:
================
Makefile Targets
================
.. _gnu-make: https://www.gnu.org/software/make/manual/make.html#Introduction
.. sidebar:: build environment
Before looking deeper at the targets, first read about :ref:`makefile setup`
and :ref:`make pyenv`.
With the aim to simplify development cycles, started with :pull:`1756` a
``Makefile`` based boilerplate was added. If you are not familiar with
Makefiles, we recommend to read gnu-make_ introduction.
The usage is simple, just type ``make {target-name}`` to *build* a target.
Calling the ``help`` target gives a first overview::
$ make help
test - run developer tests
docs - build documentation
docs-live - autobuild HTML documentation while editing
run - run developer instance
install - developer install (./local)
uninstall - uninstall (./local)
gh-pages - build docs & deploy on gh-pages branch
clean - drop builds and environments
...
.. contents:: Contents
:depth: 2
:local:
:backlinks: entry
.. _makefile setup:
Setup
=====
.. _git stash: https://git-scm.com/docs/git-stash
The main setup is done in the :origin:`Makefile`::
export GIT_URL=https://github.com/asciimoo/searx
export SEARX_URL=https://searx.me
export DOCS_URL=https://asciimoo.github.io/searx
.. sidebar:: fork & upstream
Commit changes in your (local) branch, fork or whatever, but do not push them
upstream / `git stash`_ is your friend.
:GIT_URL: Changes this, to point to your searx fork.
:SEARX_URL: Changes this, to point to your searx instance.
:DOCS_URL: If you host your own (branded) documentation, change this URL.
.. _make pyenv:
Python environment
==================
.. sidebar:: activate environment
``source ./local/py3/bin/activate``
With Makefile we do no longer need to build up the virualenv manually (as
described in the :ref:`devquickstart` guide). Jump into your git working tree
and release a ``make pyenv``:
.. code:: sh
$ cd ~/searx-clone
$ make pyenv
PYENV usage: source ./local/py3/bin/activate
...
With target ``pyenv`` a development environment (aka virtualenv) was build up in
``./local/py3/``. To make a *developer install* of searx (:origin:`setup.py`)
into this environment, use make target ``install``:
.. code:: sh
$ make install
PYENV usage: source ./local/py3/bin/activate
PYENV using virtualenv from ./local/py3
PYENV install .
You have never to think about intermediate targets like ``pyenv`` or
``install``, the ``Makefile`` chains them as requisites. Just run your main
target.
.. sidebar:: drop environment
To get rid of the existing environment before re-build use :ref:`clean target
<make clean>` first.
If you think, something goes wrong with your ./local environment or you change
the :origin:`setup.py` file (or the requirements listed in
:origin:`requirements-dev.txt` and :origin:`requirements.txt`), you have to call
:ref:`make clean`.
.. _make run:
``make run``
============
To get up a running a developer instance simply call ``make run``. This enables
*debug* option in :origin:`searx/settings.yml`, starts a ``./searx/webapp.py``
instance, disables *debug* option again and opens the URL in your favorite WEB
browser (:man:`xdg-open`):
.. code:: sh
$ make run
PYENV usage: source ./local/py3/bin/activate
PYENV install .
./local/py3/bin/python ./searx/webapp.py
...
INFO:werkzeug: * Running on http://127.0.0.1:8888/ (Press CTRL+C to quit)
...
.. _make clean:
``make clean``
==============
Drop all intermediate files, all builds, but keep sources untouched. Includes
target ``pyclean`` which drops ./local environment. Before calling ``make
clean`` stop all processes using :ref:`make pyenv`.
.. code:: sh
$ make clean
CLEAN pyclean
CLEAN clean
.. _make docs:
``make docs docs-live docs-clean``
==================================
We describe the usage of the ``doc*`` targets in the :ref:`How to contribute /
Documentation <contrib docs>` section. If you want to edit the documentation
read our :ref:`make docs-live` section. If you are working in your own brand,
adjust your :ref:`Makefile setup <makefile setup>`.
.. _make gh-pages:
``make gh-pages``
=================
To deploy on github.io first adjust your :ref:`Makefile setup <makefile
setup>`. For any further read :ref:`deploy on github.io`.
.. _make test:
``make test``
=============
Runs a series of tests: ``test.pep8``, ``test.unit``, ``test.robot`` and does
additional :ref:`pylint checks <make pylint>`. You can run tests selective,
e.g.:
.. code:: sh
$ make test.pep8 test.unit
. ./local/py3/bin/activate; ./manage.sh pep8_check
[!] Running pep8 check
. ./local/py3/bin/activate; ./manage.sh unit_tests
[!] Running unit tests
.. _make pylint:
``make pylint``
===============
.. _Pylint: https://www.pylint.org/
Before commiting its recommend to do some (more) linting. Pylint_ is known as
one of the best source-code, bug and quality checker for the Python programming
language. Pylint_ is not yet a quality gate within our searx project (like
:ref:`test.pep8 <make test>` it is), but Pylint_ can help to improve code
quality anyway. The pylint profile we use at searx project is found in
project's root folder :origin:`.pylintrc`.
Code quality is a ongoing process. Don't try to fix all messages from Pylint,
run Pylint and check if your changed lines are bringing up new messages. If so,
fix it. By this, code quality gets incremental better and if there comes the
day, the linting is balanced out, we might decide to add Pylint as a quality
gate.
``make pybuild``
================
.. _PyPi: https://pypi.org/
.. _twine: https://twine.readthedocs.io/en/latest/
Build Python packages in ``./dist/py``.
.. code:: sh
$ make pybuild
...
BUILD pybuild
running sdist
running egg_info
...
$ ls ./dist/py/
searx-0.15.0-py3-none-any.whl searx-0.15.0.tar.gz
To upload packages to PyPi_, there is also a ``upload-pypi`` target. It needs
twine_ to be installed. Since you are not the owner of :pypi:`searx` you will
never need the latter.

54
docs/dev/plugins.rst Normal file
View File

@ -0,0 +1,54 @@
.. _dev plugin:
=======
Plugins
=======
.. sidebar:: Further reading ..
- :ref:`plugins generic`
Plugins can extend or replace functionality of various components of searx.
Example plugin
==============
.. code:: python
name = 'Example plugin'
description = 'This plugin extends the suggestions with the word "example"'
default_on = False # disabled by default
js_dependencies = tuple() # optional, list of static js files
css_dependencies = tuple() # optional, list of static css files
# attach callback to the post search hook
# request: flask request object
# ctx: the whole local context of the post search hook
def post_search(request, ctx):
ctx['search'].suggestions.add('example')
return True
Plugin entry points
===================
Entry points (hooks) define when a plugin runs. Right now only three hooks are
implemented. So feel free to implement a hook if it fits the behaviour of your
plugin.
Pre search hook
---------------
Runs BEFORE the search request. Function to implement: ``pre_search``
Post search hook
----------------
Runs AFTER the search request. Function to implement: ``post_search``
Result hook
-----------
Runs when a new result is added to the result list. Function to implement:
``on_result``

132
docs/dev/quickstart.rst Normal file
View File

@ -0,0 +1,132 @@
.. _devquickstart:
======================
Development Quickstart
======================
.. sidebar:: :ref:`makefile`
For additional developer purpose there are :ref:`makefile`.
This quickstart guide gets your environment set up with searx. Furthermore, it
gives a short introduction to the ``manage.sh`` script.
How to setup your development environment
=========================================
.. sidebar:: :ref:`make pyenv <make pyenv>`
Alternatively use the :ref:`make pyenv`.
First, clone the source code of searx to the desired folder. In this case the
source is cloned to ``~/myprojects/searx``. Then create and activate the
searx-ve virtualenv and install the required packages using ``manage.sh``.
.. code:: sh
cd ~/myprojects
git clone https://github.com/asciimoo/searx.git
cd searx
virtualenv searx-ve
. ./searx-ve/bin/activate
./manage.sh update_dev_packages
How to run tests
================
.. sidebar:: :ref:`make test.unit <make test>`
Alternatively use the ``test.pep8``, ``test.unit``, ``test.robot`` targets.
Tests can be run using the ``manage.sh`` script. Following tests and checks are
available:
- Unit tests
- Selenium tests
- PEP8 validation
- Unit test coverage check
For example unit tests are run with the command below:
.. code:: sh
./manage.sh unit_tests
For further test options, please consult the help of the ``manage.sh`` script or
read :ref:`make test`.
How to compile styles and javascript
====================================
.. _less: http://lesscss.org/
.. _NodeJS: https://nodejs.org
How to build styles
-------------------
Less_ is required to build the styles of searx. Less_ can be installed using
either NodeJS_ or Apt.
.. code:: sh
sudo -H apt-get install nodejs
sudo -H npm install -g less
OR
.. code:: sh
sudo -H apt-get install node-less
After satisfying the requirements styles can be build using ``manage.sh``
.. code:: sh
./manage.sh styles
How to build the source of the themes
=====================================
.. _grunt: https://gruntjs.com/
Grunt_ must be installed in order to build the javascript sources. It depends on
NodeJS, so first Node has to be installed.
.. code:: sh
sudo -H apt-get install nodejs
make node.env
After installing grunt, the files can be built using the following command:
.. code:: sh
make themes
Tips for debugging/development
==============================
.. sidebar:: :ref:`make run`
Makefile target ``run`` already enables debug option for your developer
session / see :ref:`make run`.
Turn on debug logging
Whether you are working on a new engine or trying to eliminate a bug, it is
always a good idea to turn on debug logging. When debug logging is enabled a
stack trace appears, instead of the cryptic ``Internal Server Error``
message. It can be turned on by setting ``debug: False`` to ``debug: True`` in
:origin:`settings.yml <searx/settings.yml>`.
.. sidebar:: :ref:`make test`
Alternatively use the :ref:`make test` targets.
Run ``./manage.sh tests`` before creating a PR.
Failing build on Travis is common because of PEP8 checks. So a new commit
must be created containing these format fixes. This phase can be skipped if
``./manage.sh tests`` is run locally before creating a PR.

1428
docs/dev/reST.rst Normal file

File diff suppressed because it is too large Load Diff

120
docs/dev/search_api.rst Normal file
View File

@ -0,0 +1,120 @@
.. _search API:
==========
Search API
==========
The search supports both ``GET`` and ``POST``.
Furthermore, two enpoints ``/`` and ``/search`` are available for querying.
``GET /``
``GET /search``
Parameters
==========
.. sidebar:: Further reading ..
- :ref:`engines-dev`
- :ref:`settings.yml`
- :ref:`engines generic`
``q`` : required
The search query. This string is passed to external search services. Thus,
searx supports syntax of each search service. For example, ``site:github.com
searx`` is a valid query for Google. However, if simply the query above is
passed to any search engine which does not filter its results based on this
syntax, you might not get the results you wanted.
See more at :ref:`search-syntax`
``categories`` : optional
Comma separated list, specifies the active search categories
``engines`` : optional
Comma separated list, specifies the active search engines.
``lang`` : default ``all``
Code of the language.
``pageno`` : default ``1``
Search page number.
``time_range`` : optional
[ ``day``, ``month``, ``year`` ]
Time range of search for engines which support it. See if an engine supports
time range search in the preferences page of an instance.
``format`` : optional
[ ``json``, ``csv``, ``rss`` ]
Output format of results.
``results_on_new_tab`` : default ``0``
[ ``0``, ``1`` ]
Open search results on new tab.
``image_proxy`` : default ``False``
[ ``True``, ``False`` ]
Proxy image results through searx.
``autocomplete`` : default *empty*
[ ``google``, ``dbpedia``, ``duckduckgo``, ``startpage``, ``wikipedia`` ]
Service which completes words as you type.
``safesearch`` : default ``None``
[ ``0``, ``1``, ``None`` ]
Filter search results of engines which support safe search. See if an engine
supports safe search in the preferences page of an instance.
``theme`` : default ``oscar``
[ ``oscar``, ``simple``, ``legacy``, ``pix-art``, ``courgette`` ]
Theme of instance.
Please note, available themes depend on an instance. It is possible that an
instance administrator deleted, created or renamed themes on his/her instance.
See the available options in the preferences page of the instance.
``oscar-style`` : default ``logicodev``
[ ``pointhi``, ``logicodev`` ]
Style of Oscar theme. It is only parsed if the theme of an instance is
``oscar``.
Please note, available styles depend on an instance. It is possible that an
instance administrator deleted, created or renamed styles on his/her
instance. See the available options in the preferences page of the instance.
``enabled_plugins`` : optional
List of enabled plugins.
:default: ``HTTPS_rewrite``, ``Self_Informations``,
``Search_on_category_select``, ``Tracker_URL_remover``
:values: [ ``DOAI_rewrite``, ``HTTPS_rewrite``, ``Infinite_scroll``,
``Vim-like_hotkeys``, ``Self_Informations``, ``Tracker_URL_remover``,
``Search_on_category_select`` ]
``disabled_plugins``: optional
List of disabled plugins.
:default: ``DOAI_rewrite``, ``Infinite_scroll``, ``Vim-like_hotkeys``
:values: ``DOAI_rewrite``, ``HTTPS_rewrite``, ``Infinite_scroll``,
``Vim-like_hotkeys``, ``Self_Informations``, ``Tracker_URL_remover``,
``Search_on_category_select``
``enabled_engines`` : optional : *all* :origin:`engines <searx/engines>`
List of enabled engines.
``disabled_engines`` : optional : *all* :origin:`engines <searx/engines>`
List of disabled engines.

10
docs/dev/svg_image.svg Normal file
View File

@ -0,0 +1,10 @@
<?xml version="1.0" encoding="UTF-8"?>
<!-- originate: https://commons.wikimedia.org/wiki/File:Variable_Resistor.svg -->
<svg xmlns="http://www.w3.org/2000/svg"
version="1.1" baseProfile="full"
width="70px" height="40px" viewBox="0 0 700 400">
<line x1="0" y1="200" x2="700" y2="200" stroke="black" stroke-width="20px"/>
<rect x="100" y="100" width="500" height="200" fill="white" stroke="black" stroke-width="20px"/>
<line x1="180" y1="370" x2="500" y2="50" stroke="black" stroke-width="15px"/>
<polygon points="585 0 525 25 585 50" transform="rotate(135 525 25)"/>
</svg>

After

Width:  |  Height:  |  Size: 580 B

71
docs/dev/translation.rst Normal file
View File

@ -0,0 +1,71 @@
.. _translation:
===========
Translation
===========
.. _searx@transifex: https://www.transifex.com/asciimoo/searx/
Translation currently takes place on `searx@transifex`_
Requirements
============
* Transifex account
* Installed CLI tool of Transifex
Init Transifex project
======================
After installing ``transifex`` using pip, run the following command to
initialize the project.
.. code:: sh
tx init # Transifex instance: https://www.transifex.com/asciimoo/searx/
After ``$HOME/.transifexrc`` is created, get a Transifex API key and insert it
into the configuration file.
Create a configuration file for ``tx`` named ``$HOME/.tx/config``.
.. code:: ini
[main]
host = https://www.transifex.com
[searx.messagespo]
file_filter = searx/translations/<lang>/LC_MESSAGES/messages.po
source_file = messages.pot
source_lang = en
type = PO
Then run ``tx set``:
.. code:: shell
tx set --auto-local -r searx.messagespo 'searx/translations/<lang>/LC_MESSAGES/messages.po' \
--source-lang en --type PO --source-file messages.pot --execute
Update translations
===================
To retrieve the latest translations, pull it from Transifex.
.. code:: sh
tx pull -a
Then check the new languages. If strings translated are not enough, delete those
folders, because those should not be compiled. Call the command below to compile
the ``.po`` files.
.. code:: shell
pybabel compile -d searx/translations
After the compilation is finished commit the ``.po`` and ``.mo`` files and
create a PR.

34
docs/index.rst Normal file
View File

@ -0,0 +1,34 @@
================
Welcome to searx
================
Search without being tracked.
.. sidebar:: Features
- Self hosted
- No user tracking
- No user profiling
- About 70 supported search engines
- Easy integration with any search engine
- Cookies are not used by default
- Secure, encrypted connections (HTTPS/SSL)
- Hosted by organizations, such as *La Quadrature du Net*, which promote
digital rights
Searx is a free internet metasearch engine which aggregates results from more
than 70 search services. Users are neither tracked nor profiled. Additionally,
searx can be used over Tor for online anonymity.
Get started with searx by using one of the Searx-instances_. If you don't trust
anyone, you can set up your own, see :ref:`installation`.
.. toctree::
:maxdepth: 2
user/index
admin/index
dev/index
blog/index
.. _Searx-instances: https://searx.space

BIN
docs/static/img/searx_logo_small.png vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.3 KiB

9
docs/user/index.rst Normal file
View File

@ -0,0 +1,9 @@
==================
User documentation
==================
.. toctree::
:maxdepth: 1
search_syntax
own-instance

View File

@ -0,0 +1,77 @@
===========================
Why use a private instance?
===========================
"Is it worth to run my own instance?" is a common question among searx users.
Before answering this question, see what options a searx user has.
Public instances are open to everyone who has access to its URL. Usually, these
are operated by unknown parties (from the users' point of view). Private
instances can be used by a select group of people. It is for example a searx of
group of friends or a company which can be accessed through VPN. Also it can be
single user one which runs on the user's laptop.
To gain more insight on how these instances work let's dive into how searx
protects its users.
How does searx protect privacy?
===============================
Searx protects the privacy of its users in multiple ways regardless of the type
of the instance (private, public). Removal of private data from search requests
comes in three forms:
1. removal of private data from requests going to search services
2. not forwarding anything from a third party services through search services
(e.g. advertisement)
3. removal of private data from requests going to the result pages
Removing private data means not sending cookies to external search engines and
generating a random browser profile for every request. Thus, it does not matter
if a public or private instance handles the request, because it is anonymized in
both cases. IP addresses will be the IP of the instance. But searx can be
configured to use proxy or Tor. `Result proxy
<https://github.com/asciimoo/morty>`__ is supported, too.
Searx does not serve ads or tracking content unlike most search services. So
private data is not forwarded to third parties who might monetize it. Besides
protecting users from search services, both referring page and search query are
hidden from visited result pages.
What are the consequences of using public instances?
----------------------------------------------------
If someone uses a public instance, he/she has to trust the administrator of that
instance. This means that the user of the public instance does not know whether
his/her requests are logged, aggregated and sent or sold to a third party.
Also, public instances without proper protection are more vulnerable to abusing
the search service, In this case the external service in exchange returns
CAPTCHAs or bans the IP of the instance. Thus, search requests return less
results.
I see. What about private instances?
------------------------------------
If users run their own instances, everything is in their control: the source
code, logging settings and private data. Unknown instance administrators do not
have to be trusted.
Furthermore, as the default settings of their instance is editable, there is no
need to use cookies to tailor searx to their needs. So preferences will not be
reset to defaults when clearing browser cookies. As settings are stored on
their computer, it will not be accessible to others as long as their computer is
not compromised.
Conclusion
==========
Always use an instance which is operated by people you trust. The privacy
features of searx are available to users no matter what kind of instance they
use.
If someone is on the go or just wants to try searx for the first time public
instances are the best choices. Additionally, public instance are making a
world a better place, because those who cannot or do not want to run an
instance, have access to a privacy respecting search service.

View File

@ -0,0 +1,3 @@
:orphan:
This page page has been moved to `searx.space <https://searx.space/>`__

View File

@ -0,0 +1,42 @@
.. _search-syntax:
=============
Search syntax
=============
Searx allows you to modify the default categories, engines and search language
via the search query.
Prefix ``!``
to set Category/engine
Prefix: ``:``
to set language
Prefix: ``?``
to add engines and categories to the currently selected categories
Abbrevations of the engines and languages are also accepted. Engine/category
modifiers are chainable and inclusive (e.g. with :search:`!it !ddg !wp qwer
<?q=%21it%20%21ddg%20%21wp%20qwer>` search in IT category **and** duckduckgo
**and** wikipedia for ``qwer``).
See the :search:`/preferences page <preferences>` for the list of engines,
categories and languages.
Examples
========
Search in wikipedia for ``qwer``:
- :search:`!wp qwer <?q=%21wp%20qwer>` or
- :search:`!wikipedia qwer :search:<?q=%21wikipedia%20qwer>`
Image search:
- :search:`!images Cthulhu <?q=%21images%20Cthulhu>`
Custom language in wikipedia:
- :search:`:hu !wp hackerspace <?q=%3Ahu%20%21wp%20hackerspace>`

100
manage.sh
View File

@ -12,6 +12,7 @@ PYTHONPATH="$BASE_DIR"
SEARX_DIR="$BASE_DIR/searx" SEARX_DIR="$BASE_DIR/searx"
ACTION="$1" ACTION="$1"
. "${BASE_DIR}/utils/brand.env"
# #
# Python # Python
@ -20,12 +21,12 @@ ACTION="$1"
update_packages() { update_packages() {
pip install --upgrade pip pip install --upgrade pip
pip install --upgrade setuptools pip install --upgrade setuptools
pip install -r "$BASE_DIR/requirements.txt" pip install -Ur "$BASE_DIR/requirements.txt"
} }
update_dev_packages() { update_dev_packages() {
update_packages update_packages
pip install -r "$BASE_DIR/requirements-dev.txt" pip install -Ur "$BASE_DIR/requirements-dev.txt"
} }
install_geckodriver() { install_geckodriver() {
@ -72,45 +73,6 @@ locales() {
pybabel compile -d "$SEARX_DIR/translations" pybabel compile -d "$SEARX_DIR/translations"
} }
update_useragents() {
echo '[!] Updating user agent versions'
python utils/fetch_firefox_version.py
}
pep8_check() {
echo '[!] Running pep8 check'
# ignored rules:
# E402 module level import not at top of file
# W503 line break before binary operator
pep8 --exclude=searx/static --max-line-length=120 --ignore "E402,W503" "$SEARX_DIR" "$BASE_DIR/tests"
}
unit_tests() {
echo '[!] Running unit tests'
python -m nose2 -s "$BASE_DIR/tests/unit"
}
py_test_coverage() {
echo '[!] Running python test coverage'
PYTHONPATH="`pwd`" python -m nose2 -C --log-capture --with-coverage --coverage "$SEARX_DIR" -s "$BASE_DIR/tests/unit" \
&& coverage report \
&& coverage html
}
robot_tests() {
echo '[!] Running robot tests'
PYTHONPATH="`pwd`" python "$SEARX_DIR/testing.py" robot
}
tests() {
set -e
pep8_check
unit_tests
install_geckodriver
robot_tests
set +e
}
# #
# Web # Web
@ -137,36 +99,6 @@ npm_packages() {
npm install npm install
} }
build_style() {
npm_path_setup
lessc --clean-css="--s1 --advanced --compatibility=ie9" "$BASE_DIR/searx/static/$1" "$BASE_DIR/searx/static/$2"
}
styles() {
npm_path_setup
echo '[!] Building legacy style'
build_style themes/legacy/less/style.less themes/legacy/css/style.css
build_style themes/legacy/less/style-rtl.less themes/legacy/css/style-rtl.css
echo '[!] Building courgette style'
build_style themes/courgette/less/style.less themes/courgette/css/style.css
build_style themes/courgette/less/style-rtl.less themes/courgette/css/style-rtl.css
echo '[!] Building pix-art style'
build_style themes/pix-art/less/style.less themes/pix-art/css/style.css
echo '[!] Building bootstrap style'
build_style less/bootstrap/bootstrap.less css/bootstrap.min.css
}
grunt_build() {
npm_path_setup
echo '[!] Grunt build : oscar theme'
grunt --gruntfile "$SEARX_DIR/static/themes/oscar/gruntfile.js"
echo '[!] Grunt build : simple theme'
grunt --gruntfile "$SEARX_DIR/static/themes/simple/gruntfile.js"
}
docker_build() { docker_build() {
# Check if it is a git repository # Check if it is a git repository
if [ ! -d .git ]; then if [ ! -d .git ]; then
@ -191,8 +123,9 @@ docker_build() {
SEARX_GIT_VERSION=$(git describe --match "v[0-9]*\.[0-9]*\.[0-9]*" HEAD 2>/dev/null | awk -F'-' '{OFS="-"; $1=substr($1, 2); $3=substr($3, 2); print}') SEARX_GIT_VERSION=$(git describe --match "v[0-9]*\.[0-9]*\.[0-9]*" HEAD 2>/dev/null | awk -F'-' '{OFS="-"; $1=substr($1, 2); $3=substr($3, 2); print}')
# add the suffix "-dirty" if the repository has uncommited change # add the suffix "-dirty" if the repository has uncommited change
# /!\ HACK for searx/searx: ignore searx/brand.py and utils/brand.env
git update-index -q --refresh git update-index -q --refresh
if [ ! -z "$(git diff-index --name-only HEAD --)" ]; then if [ ! -z "$(git diff-index --name-only HEAD -- | grep -v 'searx/brand.py' | grep -v 'utils/brand.env')" ]; then
SEARX_GIT_VERSION="${SEARX_GIT_VERSION}-dirty" SEARX_GIT_VERSION="${SEARX_GIT_VERSION}-dirty"
fi fi
@ -213,18 +146,18 @@ docker_build() {
fi fi
# define the docker image name # define the docker image name
# /!\ HACK to get the user name /!\ GITHUB_USER=$(echo "${GIT_URL}" | sed 's/.*github\.com\/\([^\/]*\).*/\1/')
GITHUB_USER=$(git remote get-url origin | sed 's/.*github\.com\/\([^\/]*\).*/\1/')
SEARX_IMAGE_NAME="${GITHUB_USER:-searx}/searx" SEARX_IMAGE_NAME="${GITHUB_USER:-searx}/searx"
# build Docker image # build Docker image
echo "Building image ${SEARX_IMAGE_NAME}:${SEARX_GIT_VERSION}" echo "Building image ${SEARX_IMAGE_NAME}:${SEARX_GIT_VERSION}"
sudo docker build \ sudo docker build \
--build-arg GIT_URL="${GIT_URL}" \
--build-arg SEARX_GIT_VERSION="${SEARX_GIT_VERSION}" \ --build-arg SEARX_GIT_VERSION="${SEARX_GIT_VERSION}" \
--build-arg VERSION_GITCOMMIT="${VERSION_GITCOMMIT}" \ --build-arg VERSION_GITCOMMIT="${VERSION_GITCOMMIT}" \
--build-arg LABEL_DATE=$(date -u +"%Y-%m-%dT%H:%M:%SZ") \ --build-arg LABEL_DATE=$(date -u +"%Y-%m-%dT%H:%M:%SZ") \
--build-arg LABEL_VCS_REF=$(git rev-parse HEAD) \ --build-arg LABEL_VCS_REF=$(git rev-parse HEAD) \
--build-arg LABEL_VCS_URL=$(git remote get-url origin) \ --build-arg LABEL_VCS_URL="${GIT_URL}" \
--build-arg TIMESTAMP_SETTINGS=$(git log -1 --format="%cd" --date=unix -- searx/settings.yml) \ --build-arg TIMESTAMP_SETTINGS=$(git log -1 --format="%cd" --date=unix -- searx/settings.yml) \
--build-arg TIMESTAMP_UWSGI=$(git log -1 --format="%cd" --date=unix -- dockerfiles/uwsgi.ini) \ --build-arg TIMESTAMP_UWSGI=$(git log -1 --format="%cd" --date=unix -- dockerfiles/uwsgi.ini) \
-t ${SEARX_IMAGE_NAME}:latest -t ${SEARX_IMAGE_NAME}:${SEARX_GIT_VERSION} . -t ${SEARX_IMAGE_NAME}:latest -t ${SEARX_IMAGE_NAME}:${SEARX_GIT_VERSION} .
@ -253,22 +186,17 @@ Commands
update_dev_packages - Check & update development and production dependency changes update_dev_packages - Check & update development and production dependency changes
install_geckodriver - Download & install geckodriver if not already installed (required for robot_tests) install_geckodriver - Download & install geckodriver if not already installed (required for robot_tests)
npm_packages - Download & install npm dependencies npm_packages - Download & install npm dependencies
update_useragents - Update useragents.json with the most recent versions of Firefox
Build Build
----- -----
locales - Compile locales locales - Compile locales
styles - Build less files
grunt_build - Build files for themes
docker_build - Build Docker image
Tests Environment:
----- GIT_URL: ${GIT_URL}
unit_tests - Run unit tests ISSUE_URL: ${ISSUE_URL}
pep8_check - Pep8 validation SEARX_URL: ${SEARX_URL}
robot_tests - Run selenium tests DOCS_URL: ${DOCS_URL}
tests - Run all python tests (pep8, unit, robot_tests) PUBLIC_INSTANCES: ${PUBLIC_INSTANCES}
py_test_coverage - Unit test coverage
" "
} }

View File

@ -1,10 +1,17 @@
pallets-sphinx-themes
Sphinx
sphinx-issues
mock==2.0.0 mock==2.0.0
nose2[coverage_plugin] nose2[coverage_plugin]
cov-core==1.15.0 cov-core==1.15.0
pep8==1.7.0 pep8==1.7.0
pylint
plone.testing==5.0.0 plone.testing==5.0.0
splinter==0.11.0 splinter==0.11.0
transifex-client==0.12.2 transifex-client==0.12.2
unittest2==1.1.0 unittest2==1.1.0
zope.testrunner==4.5.1 zope.testrunner==4.5.1
selenium==3.141.0 selenium==3.141.0
linuxdoc @ git+http://github.com/return42/linuxdoc.git
sphinx-jinja
sphinx-tabs

View File

@ -1,12 +1,12 @@
certifi==2019.3.9 certifi==2020.4.5.1
babel==2.7.0 babel==2.7.0
flask-babel==0.12.2 flask-babel==1.0.0
flask==1.0.2 flask==1.1.2
idna==2.8 idna==2.9
jinja2==2.10.1 jinja2==2.11.1
lxml==4.3.3 lxml==4.5.0
pygments==2.1.3 pygments==2.1.3
pyopenssl==19.0.0 pyopenssl==19.1.0
python-dateutil==2.8.0 python-dateutil==2.8.0
pyyaml==5.1 pyyaml==5.3.1
requests[socks]==2.22.0 requests[socks]==2.23.0

View File

@ -38,6 +38,7 @@ def check_settings_yml(file_name):
else: else:
return None return None
# find location of settings.yml # find location of settings.yml
if 'SEARX_SETTINGS_PATH' in environ: if 'SEARX_SETTINGS_PATH' in environ:
# if possible set path to settings using the # if possible set path to settings using the

View File

@ -165,6 +165,14 @@ def startpage(query, lang):
return [] return []
def swisscows(query, lang):
# swisscows autocompleter
url = 'https://swisscows.ch/api/suggest?{query}&itemsCount=5'
resp = loads(get(url.format(query=urlencode({'query': query}))).text)
return resp
def qwant(query, lang): def qwant(query, lang):
# qwant autocompleter (additional parameter : lang=en_en&count=xxx ) # qwant autocompleter (additional parameter : lang=en_en&count=xxx )
url = 'https://api.qwant.com/api/suggest?{query}' url = 'https://api.qwant.com/api/suggest?{query}'
@ -196,6 +204,7 @@ backends = {'dbpedia': dbpedia,
'duckduckgo': duckduckgo, 'duckduckgo': duckduckgo,
'google': google, 'google': google,
'startpage': startpage, 'startpage': startpage,
'swisscows': swisscows,
'qwant': qwant, 'qwant': qwant,
'wikipedia': wikipedia 'wikipedia': wikipedia
} }

5
searx/brand.py Normal file
View File

@ -0,0 +1,5 @@
GIT_URL = 'https://github.com/asciimoo/searx'
ISSUE_URL = 'https://github.com/asciimoo/searx/issues'
SEARX_URL = 'https://searx.me'
DOCS_URL = 'https://asciimoo.github.io/searx'
PUBLIC_INSTANCES = 'https://searx.space'

File diff suppressed because it is too large Load Diff

View File

@ -1,14 +1,11 @@
{ {
"versions": [ "versions": [
"70.0.1", "75.0",
"70.0", "74.0.1",
"69.0.3", "74.0"
"69.0.2",
"69.0.1",
"69.0"
], ],
"os": [ "os": [
"Windows NT 10; WOW64", "Windows NT 10.0; WOW64",
"X11; Linux x86_64" "X11; Linux x86_64"
], ],
"ua": "Mozilla/5.0 ({os}; rv:{version}) Gecko/20100101 Firefox/{version}" "ua": "Mozilla/5.0 ({os}; rv:{version}) Gecko/20100101 Firefox/{version}"

View File

@ -54,7 +54,8 @@ engine_default_args = {'paging': False,
'suspend_end_time': 0, 'suspend_end_time': 0,
'continuous_errors': 0, 'continuous_errors': 0,
'time_range_support': False, 'time_range_support': False,
'offline': False} 'offline': False,
'tokens': []}
def load_engine(engine_data): def load_engine(engine_data):
@ -160,7 +161,7 @@ def to_percentage(stats, maxvalue):
return stats return stats
def get_engines_stats(): def get_engines_stats(preferences):
# TODO refactor # TODO refactor
pageloads = [] pageloads = []
engine_times = [] engine_times = []
@ -171,8 +172,12 @@ def get_engines_stats():
max_pageload = max_engine_times = max_results = max_score = max_errors = max_score_per_result = 0 # noqa max_pageload = max_engine_times = max_results = max_score = max_errors = max_score_per_result = 0 # noqa
for engine in engines.values(): for engine in engines.values():
if not preferences.validate_token(engine):
continue
if engine.stats['search_count'] == 0: if engine.stats['search_count'] == 0:
continue continue
results_num = \ results_num = \
engine.stats['result_count'] / float(engine.stats['search_count']) engine.stats['result_count'] / float(engine.stats['search_count'])

View File

@ -89,8 +89,7 @@ def response(resp):
'content': content}) 'content': content})
try: try:
result_len_container = "".join(eval_xpath(dom, '//span[@class="sb_count"]/text()')) result_len_container = "".join(eval_xpath(dom, '//span[@class="sb_count"]//text()'))
result_len_container = utils.to_string(result_len_container)
if "-" in result_len_container: if "-" in result_len_container:
# Remove the part "from-to" for paginated request ... # Remove the part "from-to" for paginated request ...
result_len_container = result_len_container[result_len_container.find("-") * 2 + 2:] result_len_container = result_len_container[result_len_container.find("-") * 2 + 2:]
@ -102,7 +101,7 @@ def response(resp):
logger.debug('result error :\n%s', e) logger.debug('result error :\n%s', e)
pass pass
if _get_offset_from_pageno(resp.search_params.get("pageno", 0)) > result_len: if result_len and _get_offset_from_pageno(resp.search_params.get("pageno", 0)) > result_len:
return [] return []
results.append({'number_of_results': result_len}) results.append({'number_of_results': result_len})
@ -111,13 +110,18 @@ def response(resp):
# get supported languages from their site # get supported languages from their site
def _fetch_supported_languages(resp): def _fetch_supported_languages(resp):
supported_languages = [] lang_tags = set()
dom = html.fromstring(resp.text)
options = eval_xpath(dom, '//div[@id="limit-languages"]//input')
for option in options:
code = eval_xpath(option, './@id')[0].replace('_', '-')
if code == 'nb':
code = 'no'
supported_languages.append(code)
return supported_languages setmkt = re.compile('setmkt=([^&]*)')
dom = html.fromstring(resp.text)
lang_links = eval_xpath(dom, "//li/a[contains(@href, 'setmkt')]")
for a in lang_links:
href = eval_xpath(a, './@href')[0]
match = setmkt.search(href)
l_tag = match.groups()[0]
_lang, _nation = l_tag.split('-', 1)
l_tag = _lang.lower() + '-' + _nation.upper()
lang_tags.add(l_tag)
return list(lang_tags)

View File

@ -18,6 +18,8 @@ import re
from searx.url_utils import urlencode from searx.url_utils import urlencode
from searx.utils import match_language from searx.utils import match_language
from searx.engines.bing import _fetch_supported_languages, supported_languages_url, language_aliases
# engine dependent config # engine dependent config
categories = ['images'] categories = ['images']
paging = True paging = True
@ -103,22 +105,3 @@ def response(resp):
continue continue
return results return results
# get supported languages from their site
def _fetch_supported_languages(resp):
supported_languages = []
dom = html.fromstring(resp.text)
regions_xpath = '//div[@id="region-section-content"]' \
+ '//ul[@class="b_vList"]/li/a/@href'
regions = dom.xpath(regions_xpath)
for region in regions:
code = re.search('setmkt=[^\&]+', region).group()[7:]
if code == 'nb-NO':
code = 'no-NO'
supported_languages.append(code)
return supported_languages

View File

@ -15,9 +15,10 @@ from datetime import datetime
from dateutil import parser from dateutil import parser
from lxml import etree from lxml import etree
from searx.utils import list_get, match_language from searx.utils import list_get, match_language
from searx.engines.bing import _fetch_supported_languages, supported_languages_url, language_aliases
from searx.url_utils import urlencode, urlparse, parse_qsl from searx.url_utils import urlencode, urlparse, parse_qsl
from searx.engines.bing import _fetch_supported_languages, supported_languages_url, language_aliases
# engine dependent config # engine dependent config
categories = ['news'] categories = ['news']
paging = True paging = True
@ -58,6 +59,7 @@ def _get_url(query, language, offset, time_range):
offset=offset, offset=offset,
interval=time_range_dict[time_range]) interval=time_range_dict[time_range])
else: else:
# e.g. setmkt=de-de&setlang=de
search_path = search_string.format( search_path = search_string.format(
query=urlencode({'q': query, 'setmkt': language}), query=urlencode({'q': query, 'setmkt': language}),
offset=offset) offset=offset)

View File

@ -12,10 +12,10 @@
from json import loads from json import loads
from lxml import html from lxml import html
from searx.engines.bing_images import _fetch_supported_languages, supported_languages_url
from searx.url_utils import urlencode from searx.url_utils import urlencode
from searx.utils import match_language from searx.utils import match_language
from searx.engines.bing import _fetch_supported_languages, supported_languages_url, language_aliases
categories = ['videos'] categories = ['videos']
paging = True paging = True
@ -67,6 +67,10 @@ def request(query, params):
if params['time_range'] in time_range_dict: if params['time_range'] in time_range_dict:
params['url'] += time_range_string.format(interval=time_range_dict[params['time_range']]) params['url'] += time_range_string.format(interval=time_range_dict[params['time_range']])
# bing videos did not like "older" versions < 70.0.1 when selectin other
# languages then 'en' .. very strange ?!?!
params['headers']['User-Agent'] = 'Mozilla/5.0 (X11; Linux x86_64; rv:73.0.1) Gecko/20100101 Firefox/73.0.1'
return params return params

View File

@ -0,0 +1,12 @@
"""
Dummy Offline
@results one result
@stable yes
"""
def search(query, request_params):
return [{
'result': 'this is what you get',
}]

54
searx/engines/etools.py Normal file
View File

@ -0,0 +1,54 @@
"""
eTools (Web)
@website https://www.etools.ch
@provide-api no
@using-api no
@results HTML
@stable no (HTML can change)
@parse url, title, content
"""
from lxml import html
from searx.engines.xpath import extract_text
from searx.url_utils import quote
from searx.utils import eval_xpath
categories = ['general']
paging = False
language_support = False
safesearch = True
base_url = 'https://www.etools.ch'
search_path = '/searchAdvancedSubmit.do'\
'?query={search_term}'\
'&pageResults=20'\
'&safeSearch={safesearch}'
def request(query, params):
if params['safesearch']:
safesearch = 'true'
else:
safesearch = 'false'
params['url'] = base_url + search_path.format(search_term=quote(query), safesearch=safesearch)
return params
def response(resp):
results = []
dom = html.fromstring(resp.text)
for result in eval_xpath(dom, '//table[@class="result"]//td[@class="record"]'):
url = eval_xpath(result, './a/@href')[0]
title = extract_text(eval_xpath(result, './a//text()'))
content = extract_text(eval_xpath(result, './/div[@class="text"]//text()'))
results.append({'url': url,
'title': title,
'content': content})
return results

View File

@ -1,96 +0,0 @@
"""
Faroo (Web, News)
@website http://www.faroo.com
@provide-api yes (http://www.faroo.com/hp/api/api.html), require API-key
@using-api no
@results JSON
@stable yes
@parse url, title, content, publishedDate, img_src
"""
from json import loads
import datetime
from searx.utils import searx_useragent
from searx.url_utils import urlencode
# engine dependent config
categories = ['general', 'news']
paging = True
language_support = True
number_of_results = 10
# search-url
url = 'http://www.faroo.com/'
search_url = url + 'instant.json?{query}'\
'&start={offset}'\
'&length={number_of_results}'\
'&l={language}'\
'&src={categorie}'\
'&i=false'\
'&c=false'
search_category = {'general': 'web',
'news': 'news'}
# do search-request
def request(query, params):
offset = (params['pageno'] - 1) * number_of_results + 1
categorie = search_category.get(params['category'], 'web')
if params['language'] == 'all':
language = 'en'
else:
language = params['language'].split('-')[0]
# if language is not supported, put it in english
if language != 'en' and\
language != 'de' and\
language != 'zh':
language = 'en'
params['url'] = search_url.format(offset=offset,
number_of_results=number_of_results,
query=urlencode({'q': query}),
language=language,
categorie=categorie)
params['headers']['Referer'] = url
return params
# get response from search-request
def response(resp):
# HTTP-Code 429: rate limit exceeded
if resp.status_code == 429:
raise Exception("rate limit has been exceeded!")
results = []
search_res = loads(resp.text)
# return empty array if there are no results
if not search_res.get('results', {}):
return []
# parse results
for result in search_res['results']:
publishedDate = None
result_json = {'url': result['url'], 'title': result['title'],
'content': result['kwic']}
if result['news']:
result_json['publishedDate'] = \
datetime.datetime.fromtimestamp(result['date'] / 1000.0)
# append image result if image url is set
if result['iurl']:
result_json['template'] = 'videos.html'
result_json['thumbnail'] = result['iurl']
results.append(result_json)
# return results
return results

View File

@ -109,14 +109,22 @@ def response(resp):
else: else:
url = build_flickr_url(photo['ownerNsid'], photo['id']) url = build_flickr_url(photo['ownerNsid'], photo['id'])
results.append({'url': url, result = {
'title': title, 'url': url,
'img_src': img_src, 'img_src': img_src,
'thumbnail_src': thumbnail_src, 'thumbnail_src': thumbnail_src,
'content': content,
'author': author,
'source': source, 'source': source,
'img_format': img_format, 'img_format': img_format,
'template': 'images.html'}) 'template': 'images.html'
}
try:
result['author'] = author
result['title'] = title
result['content'] = content
except:
result['author'] = ''
result['title'] = ''
result['content'] = ''
results.append(result)
return results return results

View File

@ -72,6 +72,7 @@ def parse_album(hit):
result.update({'content': 'Released: {}'.format(year)}) result.update({'content': 'Released: {}'.format(year)})
return result return result
parse = {'lyric': parse_lyric, 'song': parse_lyric, 'artist': parse_artist, 'album': parse_album} parse = {'lyric': parse_lyric, 'song': parse_lyric, 'artist': parse_artist, 'album': parse_album}

View File

@ -14,6 +14,7 @@ import random
from json import loads from json import loads
from time import time from time import time
from lxml.html import fromstring from lxml.html import fromstring
from searx.poolrequests import get
from searx.url_utils import urlencode from searx.url_utils import urlencode
from searx.utils import eval_xpath from searx.utils import eval_xpath
@ -31,13 +32,9 @@ search_string = 'search?{query}'\
'&c=main'\ '&c=main'\
'&s={offset}'\ '&s={offset}'\
'&format=json'\ '&format=json'\
'&qh=0'\ '&langcountry={lang}'\
'&qlang={lang}'\
'&ff={safesearch}'\ '&ff={safesearch}'\
'&rxiec={rxieu}'\ '&rand={rxikd}'
'&ulse={ulse}'\
'&rand={rxikd}'\
'&dbez={dbez}'
# specific xpath variables # specific xpath variables
results_xpath = '//response//result' results_xpath = '//response//result'
url_xpath = './/url' url_xpath = './/url'
@ -46,9 +43,26 @@ content_xpath = './/sum'
supported_languages_url = 'https://gigablast.com/search?&rxikd=1' supported_languages_url = 'https://gigablast.com/search?&rxikd=1'
extra_param = '' # gigablast requires a random extra parameter
# which can be extracted from the source code of the search page
def parse_extra_param(text):
global extra_param
param_lines = [x for x in text.splitlines() if x.startswith('var url=') or x.startswith('url=url+')]
extra_param = ''
for l in param_lines:
extra_param += l.split("'")[1]
extra_param = extra_param.split('&')[-1]
def init(engine_settings=None):
parse_extra_param(get('http://gigablast.com/search?c=main&qlangcountry=en-us&q=south&s=10').text)
# do search-request # do search-request
def request(query, params): def request(query, params):
print("EXTRAPARAM:", extra_param)
offset = (params['pageno'] - 1) * number_of_results offset = (params['pageno'] - 1) * number_of_results
if params['language'] == 'all': if params['language'] == 'all':
@ -67,14 +81,11 @@ def request(query, params):
search_path = search_string.format(query=urlencode({'q': query}), search_path = search_string.format(query=urlencode({'q': query}),
offset=offset, offset=offset,
number_of_results=number_of_results, number_of_results=number_of_results,
rxikd=int(time() * 1000),
rxieu=random.randint(1000000000, 9999999999),
ulse=random.randint(100000000, 999999999),
lang=language, lang=language,
safesearch=safesearch, rxikd=int(time() * 1000),
dbez=random.randint(100000000, 999999999)) safesearch=safesearch)
params['url'] = base_url + search_path params['url'] = base_url + search_path + '&' + extra_param
return params return params
@ -84,7 +95,11 @@ def response(resp):
results = [] results = []
# parse results # parse results
try:
response_json = loads(resp.text) response_json = loads(resp.text)
except:
parse_extra_param(resp.text)
raise Exception('extra param expired, please reload')
for result in response_json['results']: for result in response_json['results']:
# append result # append result

View File

@ -54,7 +54,7 @@ def request(query, params):
if params['language'] != 'all': if params['language'] != 'all':
language = match_language(params['language'], supported_languages, language_aliases).split('-')[0] language = match_language(params['language'], supported_languages, language_aliases).split('-')[0]
if language: if language:
params['url'] += '&lr=lang_' + language params['url'] += '&hl=' + language
return params return params

View File

@ -32,7 +32,7 @@ base_url = 'https://www.ina.fr'
search_url = base_url + '/layout/set/ajax/recherche/result?autopromote=&hf={ps}&b={start}&type=Video&r=&{query}' search_url = base_url + '/layout/set/ajax/recherche/result?autopromote=&hf={ps}&b={start}&type=Video&r=&{query}'
# specific xpath variables # specific xpath variables
results_xpath = '//div[contains(@class,"search-results--list")]/div[@class="media"]' results_xpath = '//div[contains(@class,"search-results--list")]//div[@class="media-body"]'
url_xpath = './/a/@href' url_xpath = './/a/@href'
title_xpath = './/h3[@class="h3--title media-heading"]' title_xpath = './/h3[@class="h3--title media-heading"]'
thumbnail_xpath = './/img/@src' thumbnail_xpath = './/img/@src'
@ -65,8 +65,11 @@ def response(resp):
videoid = result.xpath(url_xpath)[0] videoid = result.xpath(url_xpath)[0]
url = base_url + videoid url = base_url + videoid
title = p.unescape(extract_text(result.xpath(title_xpath))) title = p.unescape(extract_text(result.xpath(title_xpath)))
try:
thumbnail = extract_text(result.xpath(thumbnail_xpath)[0]) thumbnail = extract_text(result.xpath(thumbnail_xpath)[0])
if thumbnail[0] == '/': except:
thumbnail = ''
if thumbnail and thumbnail[0] == '/':
thumbnail = base_url + thumbnail thumbnail = base_url + thumbnail
d = extract_text(result.xpath(publishedDate_xpath)[0]) d = extract_text(result.xpath(publishedDate_xpath)[0])
d = d.split('/') d = d.split('/')

View File

@ -45,6 +45,8 @@ def request(query, params):
def response(resp): def response(resp):
results = [] results = []
response_data = loads(resp.text) response_data = loads(resp.text)
if not response_data:
return results
for result in response_data['results']: for result in response_data['results']:
url = _get_url(result) url = _get_url(result)

View File

@ -24,7 +24,7 @@ result_base_url = 'https://openstreetmap.org/{osm_type}/{osm_id}'
# do search-request # do search-request
def request(query, params): def request(query, params):
params['url'] = base_url + search_string.format(query=query) params['url'] = base_url + search_string.format(query=query.decode('utf-8'))
return params return params

View File

@ -50,6 +50,7 @@ def request(query, params):
language = match_language(params['language'], supported_languages, language_aliases) language = match_language(params['language'], supported_languages, language_aliases)
params['url'] += '&locale=' + language.replace('-', '_').lower() params['url'] += '&locale=' + language.replace('-', '_').lower()
params['headers']['User-Agent'] = 'Mozilla/5.0 (X11; Linux x86_64; rv:69.0) Gecko/20100101 Firefox/69.0'
return params return params

View File

@ -29,7 +29,7 @@ def request(query, params):
params['url'] = search_url params['url'] = search_url
params['method'] = 'POST' params['method'] = 'POST'
params['headers']['Content-type'] = "application/json" params['headers']['Content-type'] = "application/json"
params['data'] = dumps({"query": query, params['data'] = dumps({"query": query.decode('utf-8'),
"searchField": "ALL", "searchField": "ALL",
"sortDirection": "ASC", "sortDirection": "ASC",
"sortOrder": "RELEVANCY", "sortOrder": "RELEVANCY",

View File

@ -12,10 +12,14 @@
from json import loads from json import loads
from searx.url_utils import urlencode from searx.url_utils import urlencode
import requests
import base64
# engine dependent config # engine dependent config
categories = ['music'] categories = ['music']
paging = True paging = True
api_client_id = None
api_client_secret = None
# search-url # search-url
url = 'https://api.spotify.com/' url = 'https://api.spotify.com/'
@ -31,6 +35,16 @@ def request(query, params):
params['url'] = search_url.format(query=urlencode({'q': query}), offset=offset) params['url'] = search_url.format(query=urlencode({'q': query}), offset=offset)
r = requests.post(
'https://accounts.spotify.com/api/token',
data={'grant_type': 'client_credentials'},
headers={'Authorization': 'Basic ' + base64.b64encode(
"{}:{}".format(api_client_id, api_client_secret).encode('utf-8')
).decode('utf-8')}
)
j = loads(r.text)
params['headers'] = {'Authorization': 'Bearer {}'.format(j.get('access_token'))}
return params return params

View File

@ -99,11 +99,14 @@ def response(resp):
if re.match(r"^([1-9]|[1-2][0-9]|3[0-1]) [A-Z][a-z]{2} [0-9]{4} \.\.\. ", content): if re.match(r"^([1-9]|[1-2][0-9]|3[0-1]) [A-Z][a-z]{2} [0-9]{4} \.\.\. ", content):
date_pos = content.find('...') + 4 date_pos = content.find('...') + 4
date_string = content[0:date_pos - 5] date_string = content[0:date_pos - 5]
published_date = parser.parse(date_string, dayfirst=True)
# fix content string # fix content string
content = content[date_pos:] content = content[date_pos:]
try:
published_date = parser.parse(date_string, dayfirst=True)
except ValueError:
pass
# check if search result starts with something like: "5 days ago ... " # check if search result starts with something like: "5 days ago ... "
elif re.match(r"^[0-9]+ days? ago \.\.\. ", content): elif re.match(r"^[0-9]+ days? ago \.\.\. ", content):
date_pos = content.find('...') + 4 date_pos = content.find('...') + 4

View File

@ -21,7 +21,8 @@ search_url = base_url + u'w/api.php?'\
'action=query'\ 'action=query'\
'&format=json'\ '&format=json'\
'&{query}'\ '&{query}'\
'&prop=extracts|pageimages'\ '&prop=extracts|pageimages|pageprops'\
'&ppprop=disambiguation'\
'&exintro'\ '&exintro'\
'&explaintext'\ '&explaintext'\
'&pithumbsize=300'\ '&pithumbsize=300'\
@ -79,12 +80,15 @@ def response(resp):
# wikipedia article's unique id # wikipedia article's unique id
# first valid id is assumed to be the requested article # first valid id is assumed to be the requested article
if 'pages' not in search_result['query']:
return results
for article_id in search_result['query']['pages']: for article_id in search_result['query']['pages']:
page = search_result['query']['pages'][article_id] page = search_result['query']['pages'][article_id]
if int(article_id) > 0: if int(article_id) > 0:
break break
if int(article_id) < 0: if int(article_id) < 0 or 'disambiguation' in page.get('pageprops', {}):
return [] return []
title = page.get('title') title = page.get('title')
@ -96,6 +100,7 @@ def response(resp):
extract = page.get('extract') extract = page.get('extract')
summary = extract_first_paragraph(extract, title, image) summary = extract_first_paragraph(extract, title, image)
summary = summary.replace('() ', '')
# link to wikipedia article # link to wikipedia article
wikipedia_link = base_url.format(language=url_lang(resp.search_params['language'])) \ wikipedia_link = base_url.format(language=url_lang(resp.search_params['language'])) \

View File

@ -3,9 +3,11 @@
# this file is generated automatically by utils/update_search_languages.py # this file is generated automatically by utils/update_search_languages.py
language_codes = ( language_codes = (
(u"af-NA", u"Afrikaans", u"", u"Afrikaans"),
(u"ar-SA", u"العربية", u"", u"Arabic"), (u"ar-SA", u"العربية", u"", u"Arabic"),
(u"be-BY", u"Беларуская", u"", u"Belarusian"),
(u"bg-BG", u"Български", u"", u"Bulgarian"), (u"bg-BG", u"Български", u"", u"Bulgarian"),
(u"ca-ES", u"Català", u"", u"Catalan"), (u"ca-AD", u"Català", u"", u"Catalan"),
(u"cs-CZ", u"Čeština", u"", u"Czech"), (u"cs-CZ", u"Čeština", u"", u"Czech"),
(u"da-DK", u"Dansk", u"", u"Danish"), (u"da-DK", u"Dansk", u"", u"Danish"),
(u"de", u"Deutsch", u"", u"German"), (u"de", u"Deutsch", u"", u"German"),
@ -17,11 +19,15 @@ language_codes = (
(u"en-AU", u"English", u"Australia", u"English"), (u"en-AU", u"English", u"Australia", u"English"),
(u"en-CA", u"English", u"Canada", u"English"), (u"en-CA", u"English", u"Canada", u"English"),
(u"en-GB", u"English", u"United Kingdom", u"English"), (u"en-GB", u"English", u"United Kingdom", u"English"),
(u"en-IE", u"English", u"Ireland", u"English"),
(u"en-IN", u"English", u"India", u"English"), (u"en-IN", u"English", u"India", u"English"),
(u"en-MY", u"English", u"Malaysia", u"English"), (u"en-NZ", u"English", u"New Zealand", u"English"),
(u"en-PH", u"English", u"Philippines", u"English"),
(u"en-SG", u"English", u"Singapore", u"English"),
(u"en-US", u"English", u"United States", u"English"), (u"en-US", u"English", u"United States", u"English"),
(u"es", u"Español", u"", u"Spanish"), (u"es", u"Español", u"", u"Spanish"),
(u"es-AR", u"Español", u"Argentina", u"Spanish"), (u"es-AR", u"Español", u"Argentina", u"Spanish"),
(u"es-CL", u"Español", u"Chile", u"Spanish"),
(u"es-ES", u"Español", u"España", u"Spanish"), (u"es-ES", u"Español", u"España", u"Spanish"),
(u"es-MX", u"Español", u"México", u"Spanish"), (u"es-MX", u"Español", u"México", u"Spanish"),
(u"et-EE", u"Eesti", u"", u"Estonian"), (u"et-EE", u"Eesti", u"", u"Estonian"),
@ -35,6 +41,7 @@ language_codes = (
(u"he-IL", u"עברית", u"", u"Hebrew"), (u"he-IL", u"עברית", u"", u"Hebrew"),
(u"hr-HR", u"Hrvatski", u"", u"Croatian"), (u"hr-HR", u"Hrvatski", u"", u"Croatian"),
(u"hu-HU", u"Magyar", u"", u"Hungarian"), (u"hu-HU", u"Magyar", u"", u"Hungarian"),
(u"hy-AM", u"Հայերեն", u"", u"Armenian"),
(u"id-ID", u"Indonesia", u"", u"Indonesian"), (u"id-ID", u"Indonesia", u"", u"Indonesian"),
(u"is-IS", u"Íslenska", u"", u"Icelandic"), (u"is-IS", u"Íslenska", u"", u"Icelandic"),
(u"it-IT", u"Italiano", u"", u"Italian"), (u"it-IT", u"Italiano", u"", u"Italian"),
@ -42,7 +49,7 @@ language_codes = (
(u"ko-KR", u"한국어", u"", u"Korean"), (u"ko-KR", u"한국어", u"", u"Korean"),
(u"lt-LT", u"Lietuvių", u"", u"Lithuanian"), (u"lt-LT", u"Lietuvių", u"", u"Lithuanian"),
(u"lv-LV", u"Latviešu", u"", u"Latvian"), (u"lv-LV", u"Latviešu", u"", u"Latvian"),
(u"ms-MY", u"Bahasa Melayu", u"", u"Malay"), (u"ms-MY", u"Melayu", u"", u"Malay"),
(u"nb-NO", u"Norsk Bokmål", u"", u"Norwegian Bokmål"), (u"nb-NO", u"Norsk Bokmål", u"", u"Norwegian Bokmål"),
(u"nl", u"Nederlands", u"", u"Dutch"), (u"nl", u"Nederlands", u"", u"Dutch"),
(u"nl-BE", u"Nederlands", u"België", u"Dutch"), (u"nl-BE", u"Nederlands", u"België", u"Dutch"),
@ -55,8 +62,9 @@ language_codes = (
(u"ru-RU", u"Русский", u"", u"Russian"), (u"ru-RU", u"Русский", u"", u"Russian"),
(u"sk-SK", u"Slovenčina", u"", u"Slovak"), (u"sk-SK", u"Slovenčina", u"", u"Slovak"),
(u"sl-SI", u"Slovenščina", u"", u"Slovenian"), (u"sl-SI", u"Slovenščina", u"", u"Slovenian"),
(u"sr-RS", u"Српски", u"", u"Serbian"), (u"sr-RS", u"Srpski", u"", u"Serbian"),
(u"sv-SE", u"Svenska", u"", u"Swedish"), (u"sv-SE", u"Svenska", u"", u"Swedish"),
(u"sw-KE", u"Kiswahili", u"", u"Swahili"),
(u"th-TH", u"ไทย", u"", u"Thai"), (u"th-TH", u"ไทย", u"", u"Thai"),
(u"tr-TR", u"Türkçe", u"", u"Turkish"), (u"tr-TR", u"Türkçe", u"", u"Turkish"),
(u"uk-UA", u"Українська", u"", u"Ukrainian"), (u"uk-UA", u"Українська", u"", u"Ukrainian"),

View File

@ -1,12 +1,20 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
"""Searx preferences implementation.
"""
# pylint: disable=useless-object-inheritance
from base64 import urlsafe_b64encode, urlsafe_b64decode from base64 import urlsafe_b64encode, urlsafe_b64decode
from zlib import compress, decompress from zlib import compress, decompress
from sys import version from sys import version
from searx import settings, autocomplete from searx import settings, autocomplete
from searx.languages import language_codes as languages from searx.languages import language_codes as languages
from searx.utils import match_language
from searx.url_utils import parse_qs, urlencode from searx.url_utils import parse_qs, urlencode
if version[0] == '3': if version[0] == '3':
# pylint: disable=invalid-name
unicode = str unicode = str
@ -19,11 +27,14 @@ DOI_RESOLVERS = list(settings['doi_resolvers'])
class MissingArgumentException(Exception): class MissingArgumentException(Exception):
pass """Exption from ``cls._post_init`` when a argument is missed.
"""
class ValidationException(Exception): class ValidationException(Exception):
pass
"""Exption from ``cls._post_init`` when configuration value is invalid.
"""
class Setting(object): class Setting(object):
@ -41,33 +52,45 @@ class Setting(object):
pass pass
def parse(self, data): def parse(self, data):
"""Parse ``data`` and store the result at ``self.value``
If needed, its overwritten in the inheritance.
"""
self.value = data self.value = data
def get_value(self): def get_value(self):
"""Returns the value of the setting
If needed, its overwritten in the inheritance.
"""
return self.value return self.value
def save(self, name, resp): def save(self, name, resp):
"""Save cookie ``name`` in the HTTP reponse obect
If needed, its overwritten in the inheritance."""
resp.set_cookie(name, self.value, max_age=COOKIE_MAX_AGE) resp.set_cookie(name, self.value, max_age=COOKIE_MAX_AGE)
class StringSetting(Setting): class StringSetting(Setting):
"""Setting of plain string values""" """Setting of plain string values"""
pass
class EnumStringSetting(Setting): class EnumStringSetting(Setting):
"""Setting of a value which can only come from the given choices""" """Setting of a value which can only come from the given choices"""
def _validate_selection(self, selection):
if selection not in self.choices:
raise ValidationException('Invalid value: "{0}"'.format(selection))
def _post_init(self): def _post_init(self):
if not hasattr(self, 'choices'): if not hasattr(self, 'choices'):
raise MissingArgumentException('Missing argument: choices') raise MissingArgumentException('Missing argument: choices')
self._validate_selection(self.value) self._validate_selection(self.value)
def _validate_selection(self, selection):
if selection not in self.choices: # pylint: disable=no-member
raise ValidationException('Invalid value: "{0}"'.format(selection))
def parse(self, data): def parse(self, data):
"""Parse and validate ``data`` and store the result at ``self.value``
"""
self._validate_selection(data) self._validate_selection(data)
self.value = data self.value = data
@ -77,7 +100,7 @@ class MultipleChoiceSetting(EnumStringSetting):
def _validate_selections(self, selections): def _validate_selections(self, selections):
for item in selections: for item in selections:
if item not in self.choices: if item not in self.choices: # pylint: disable=no-member
raise ValidationException('Invalid value: "{0}"'.format(selections)) raise ValidationException('Invalid value: "{0}"'.format(selections))
def _post_init(self): def _post_init(self):
@ -86,6 +109,8 @@ class MultipleChoiceSetting(EnumStringSetting):
self._validate_selections(self.value) self._validate_selections(self.value)
def parse(self, data): def parse(self, data):
"""Parse and validate ``data`` and store the result at ``self.value``
"""
if data == '': if data == '':
self.value = [] self.value = []
return return
@ -94,24 +119,67 @@ class MultipleChoiceSetting(EnumStringSetting):
self._validate_selections(elements) self._validate_selections(elements)
self.value = elements self.value = elements
def parse_form(self, data): def parse_form(self, data): # pylint: disable=missing-function-docstring
self.value = [] self.value = []
for choice in data: for choice in data:
if choice in self.choices and choice not in self.value: if choice in self.choices and choice not in self.value: # pylint: disable=no-member
self.value.append(choice) self.value.append(choice)
def save(self, name, resp): def save(self, name, resp):
"""Save cookie ``name`` in the HTTP reponse obect
"""
resp.set_cookie(name, ','.join(self.value), max_age=COOKIE_MAX_AGE) resp.set_cookie(name, ','.join(self.value), max_age=COOKIE_MAX_AGE)
class SetSetting(Setting):
"""Setting of values of type ``set`` (comma separated string) """
def _post_init(self):
if not hasattr(self, 'values'):
self.values = set()
def get_value(self):
"""Returns a string with comma separated values.
"""
return ','.join(self.values)
def parse(self, data):
"""Parse and validate ``data`` and store the result at ``self.value``
"""
if data == '':
self.values = set() # pylint: disable=attribute-defined-outside-init
return
elements = data.split(',')
for element in elements:
self.values.add(element)
def parse_form(self, data): # pylint: disable=missing-function-docstring
elements = data.split(',')
self.values = set(elements) # pylint: disable=attribute-defined-outside-init
def save(self, name, resp):
"""Save cookie ``name`` in the HTTP reponse obect
"""
resp.set_cookie(name, ','.join(self.values), max_age=COOKIE_MAX_AGE)
class SearchLanguageSetting(EnumStringSetting): class SearchLanguageSetting(EnumStringSetting):
"""Available choices may change, so user's value may not be in choices anymore""" """Available choices may change, so user's value may not be in choices anymore"""
def _validate_selection(self, selection):
if selection != "" and not match_language(
# pylint: disable=no-member
selection, self.choices, fallback=None):
raise ValidationException('Invalid language code: "{0}"'.format(selection))
def parse(self, data): def parse(self, data):
if data not in self.choices and data != self.value: """Parse and validate ``data`` and store the result at ``self.value``
"""
if data not in self.choices and data != self.value: # pylint: disable=no-member
# hack to give some backwards compatibility with old language cookies # hack to give some backwards compatibility with old language cookies
data = str(data).replace('_', '-') data = str(data).replace('_', '-')
lang = data.split('-')[0] lang = data.split('-')[0]
# pylint: disable=no-member
if data in self.choices: if data in self.choices:
pass pass
elif lang in self.choices: elif lang in self.choices:
@ -127,16 +195,21 @@ class MapSetting(Setting):
def _post_init(self): def _post_init(self):
if not hasattr(self, 'map'): if not hasattr(self, 'map'):
raise MissingArgumentException('missing argument: map') raise MissingArgumentException('missing argument: map')
if self.value not in self.map.values(): if self.value not in self.map.values(): # pylint: disable=no-member
raise ValidationException('Invalid default value') raise ValidationException('Invalid default value')
def parse(self, data): def parse(self, data):
"""Parse and validate ``data`` and store the result at ``self.value``
"""
# pylint: disable=no-member
if data not in self.map: if data not in self.map:
raise ValidationException('Invalid choice: {0}'.format(data)) raise ValidationException('Invalid choice: {0}'.format(data))
self.value = self.map[data] self.value = self.map[data]
self.key = data self.key = data # pylint: disable=attribute-defined-outside-init
def save(self, name, resp): def save(self, name, resp):
"""Save cookie ``name`` in the HTTP reponse obect
"""
if hasattr(self, 'key'): if hasattr(self, 'key'):
resp.set_cookie(name, self.key, max_age=COOKIE_MAX_AGE) resp.set_cookie(name, self.key, max_age=COOKIE_MAX_AGE)
@ -150,24 +223,26 @@ class SwitchableSetting(Setting):
if not hasattr(self, 'choices'): if not hasattr(self, 'choices'):
raise MissingArgumentException('missing argument: choices') raise MissingArgumentException('missing argument: choices')
def transform_form_items(self, items): def transform_form_items(self, items): # pylint: disable=missing-function-docstring
# pylint: disable=no-self-use
return items return items
def transform_values(self, values): def transform_values(self, values): # pylint: disable=missing-function-docstring
# pylint: disable=no-self-use
return values return values
def parse_cookie(self, data): def parse_cookie(self, data): # pylint: disable=missing-function-docstring
# pylint: disable=attribute-defined-outside-init
if data[DISABLED] != '': if data[DISABLED] != '':
self.disabled = set(data[DISABLED].split(',')) self.disabled = set(data[DISABLED].split(','))
if data[ENABLED] != '': if data[ENABLED] != '':
self.enabled = set(data[ENABLED].split(',')) self.enabled = set(data[ENABLED].split(','))
def parse_form(self, items): def parse_form(self, items): # pylint: disable=missing-function-docstring
items = self.transform_form_items(items) items = self.transform_form_items(items)
self.disabled = set() # pylint: disable=attribute-defined-outside-init
self.disabled = set() self.enabled = set() # pylint: disable=attribute-defined-outside-init
self.enabled = set() for choice in self.choices: # pylint: disable=no-member
for choice in self.choices:
if choice['default_on']: if choice['default_on']:
if choice['id'] in items: if choice['id'] in items:
self.disabled.add(choice['id']) self.disabled.add(choice['id'])
@ -175,31 +250,34 @@ class SwitchableSetting(Setting):
if choice['id'] not in items: if choice['id'] not in items:
self.enabled.add(choice['id']) self.enabled.add(choice['id'])
def save(self, resp): def save(self, resp): # pylint: disable=arguments-differ
"""Save cookie in the HTTP reponse obect
"""
resp.set_cookie('disabled_{0}'.format(self.value), ','.join(self.disabled), max_age=COOKIE_MAX_AGE) resp.set_cookie('disabled_{0}'.format(self.value), ','.join(self.disabled), max_age=COOKIE_MAX_AGE)
resp.set_cookie('enabled_{0}'.format(self.value), ','.join(self.enabled), max_age=COOKIE_MAX_AGE) resp.set_cookie('enabled_{0}'.format(self.value), ','.join(self.enabled), max_age=COOKIE_MAX_AGE)
def get_disabled(self): def get_disabled(self): # pylint: disable=missing-function-docstring
disabled = self.disabled disabled = self.disabled
for choice in self.choices: for choice in self.choices: # pylint: disable=no-member
if not choice['default_on'] and choice['id'] not in self.enabled: if not choice['default_on'] and choice['id'] not in self.enabled:
disabled.add(choice['id']) disabled.add(choice['id'])
return self.transform_values(disabled) return self.transform_values(disabled)
def get_enabled(self): def get_enabled(self): # pylint: disable=missing-function-docstring
enabled = self.enabled enabled = self.enabled
for choice in self.choices: for choice in self.choices: # pylint: disable=no-member
if choice['default_on'] and choice['id'] not in self.disabled: if choice['default_on'] and choice['id'] not in self.disabled:
enabled.add(choice['id']) enabled.add(choice['id'])
return self.transform_values(enabled) return self.transform_values(enabled)
class EnginesSetting(SwitchableSetting): class EnginesSetting(SwitchableSetting):
"""Engine settings"""
def _post_init(self): def _post_init(self):
super(EnginesSetting, self)._post_init() super(EnginesSetting, self)._post_init()
transformed_choices = [] transformed_choices = []
for engine_name, engine in self.choices.items(): for engine_name, engine in self.choices.items(): # pylint: disable=no-member,access-member-before-definition
for category in engine.categories: for category in engine.categories:
transformed_choice = dict() transformed_choice = dict()
transformed_choice['default_on'] = not engine.disabled transformed_choice['default_on'] = not engine.disabled
@ -221,11 +299,12 @@ class EnginesSetting(SwitchableSetting):
class PluginsSetting(SwitchableSetting): class PluginsSetting(SwitchableSetting):
"""Plugin settings"""
def _post_init(self): def _post_init(self):
super(PluginsSetting, self)._post_init() super(PluginsSetting, self)._post_init()
transformed_choices = [] transformed_choices = []
for plugin in self.choices: for plugin in self.choices: # pylint: disable=access-member-before-definition
transformed_choice = dict() transformed_choice = dict()
transformed_choice['default_on'] = plugin.default_on transformed_choice['default_on'] = plugin.default_on
transformed_choice['id'] = plugin.id transformed_choice['id'] = plugin.id
@ -242,29 +321,60 @@ class Preferences(object):
def __init__(self, themes, categories, engines, plugins): def __init__(self, themes, categories, engines, plugins):
super(Preferences, self).__init__() super(Preferences, self).__init__()
self.key_value_settings = {'categories': MultipleChoiceSetting(['general'], choices=categories + ['none']), self.key_value_settings = {
'language': SearchLanguageSetting(settings['search']['language'], 'categories': MultipleChoiceSetting(
choices=LANGUAGE_CODES), ['general'], choices=categories + ['none']
'locale': EnumStringSetting(settings['ui']['default_locale'], ),
choices=list(settings['locales'].keys()) + ['']), 'language': SearchLanguageSetting(
'autocomplete': EnumStringSetting(settings['search']['autocomplete'], settings['search'].get('default_lang', ''),
choices=list(autocomplete.backends.keys()) + ['']), choices=list(LANGUAGE_CODES) + ['']
'image_proxy': MapSetting(settings['server']['image_proxy'], ),
map={'': settings['server']['image_proxy'], 'locale': EnumStringSetting(
settings['ui'].get('default_locale', ''),
choices=list(settings['locales'].keys()) + ['']
),
'autocomplete': EnumStringSetting(
settings['search'].get('autocomplete', ''),
choices=list(autocomplete.backends.keys()) + ['']
),
'image_proxy': MapSetting(
settings['server'].get('image_proxy', False),
map={
'': settings['server'].get('image_proxy', 0),
'0': False, '0': False,
'1': True, '1': True,
'True': True, 'True': True,
'False': False}), 'False': False
'method': EnumStringSetting('POST', choices=('GET', 'POST')), }
'safesearch': MapSetting(settings['search']['safe_search'], map={'0': 0, ),
'method': EnumStringSetting(
'POST',
choices=('GET', 'POST')
),
'safesearch': MapSetting(
settings['search'].get('safe_search', 0),
map={
'0': 0,
'1': 1, '1': 1,
'2': 2}), '2': 2
'theme': EnumStringSetting(settings['ui']['default_theme'], choices=themes), }
'results_on_new_tab': MapSetting(False, map={'0': False, ),
'theme': EnumStringSetting(
settings['ui'].get('default_theme', 'oscar'),
choices=themes
),
'results_on_new_tab': MapSetting(
False,
map={
'0': False,
'1': True, '1': True,
'False': False, 'False': False,
'True': True}), 'True': True
'doi_resolver': MultipleChoiceSetting(['oadoi.org'], choices=DOI_RESOLVERS), }
),
'doi_resolver': MultipleChoiceSetting(
['oadoi.org'], choices=DOI_RESOLVERS
),
'oscar-style': EnumStringSetting( 'oscar-style': EnumStringSetting(
settings['ui'].get('theme_args', {}).get('oscar_style', 'logicodev'), settings['ui'].get('theme_args', {}).get('oscar_style', 'logicodev'),
choices=['', 'logicodev', 'logicodev-dark', 'pointhi']), choices=['', 'logicodev', 'logicodev-dark', 'pointhi']),
@ -272,9 +382,11 @@ class Preferences(object):
self.engines = EnginesSetting('engines', choices=engines) self.engines = EnginesSetting('engines', choices=engines)
self.plugins = PluginsSetting('plugins', choices=plugins) self.plugins = PluginsSetting('plugins', choices=plugins)
self.tokens = SetSetting('tokens')
self.unknown_params = {} self.unknown_params = {}
def get_as_url_params(self): def get_as_url_params(self):
"""Return preferences as URL parameters"""
settings_kv = {} settings_kv = {}
for k, v in self.key_value_settings.items(): for k, v in self.key_value_settings.items():
if isinstance(v, MultipleChoiceSetting): if isinstance(v, MultipleChoiceSetting):
@ -288,9 +400,12 @@ class Preferences(object):
settings_kv['disabled_plugins'] = ','.join(self.plugins.disabled) settings_kv['disabled_plugins'] = ','.join(self.plugins.disabled)
settings_kv['enabled_plugins'] = ','.join(self.plugins.enabled) settings_kv['enabled_plugins'] = ','.join(self.plugins.enabled)
settings_kv['tokens'] = ','.join(self.tokens.values)
return urlsafe_b64encode(compress(urlencode(settings_kv).encode('utf-8'))).decode('utf-8') return urlsafe_b64encode(compress(urlencode(settings_kv).encode('utf-8'))).decode('utf-8')
def parse_encoded_data(self, input_data): def parse_encoded_data(self, input_data):
"""parse (base64) preferences from request (``flask.request.form['preferences']``)"""
decoded_data = decompress(urlsafe_b64decode(input_data.encode('utf-8'))) decoded_data = decompress(urlsafe_b64decode(input_data.encode('utf-8')))
dict_data = {} dict_data = {}
for x, y in parse_qs(decoded_data).items(): for x, y in parse_qs(decoded_data).items():
@ -298,6 +413,7 @@ class Preferences(object):
self.parse_dict(dict_data) self.parse_dict(dict_data)
def parse_dict(self, input_data): def parse_dict(self, input_data):
"""parse preferences from request (``flask.request.form``)"""
for user_setting_name, user_setting in input_data.items(): for user_setting_name, user_setting in input_data.items():
if user_setting_name in self.key_value_settings: if user_setting_name in self.key_value_settings:
self.key_value_settings[user_setting_name].parse(user_setting) self.key_value_settings[user_setting_name].parse(user_setting)
@ -307,6 +423,8 @@ class Preferences(object):
elif user_setting_name == 'disabled_plugins': elif user_setting_name == 'disabled_plugins':
self.plugins.parse_cookie((input_data.get('disabled_plugins', ''), self.plugins.parse_cookie((input_data.get('disabled_plugins', ''),
input_data.get('enabled_plugins', ''))) input_data.get('enabled_plugins', '')))
elif user_setting_name == 'tokens':
self.tokens.parse(user_setting)
elif not any(user_setting_name.startswith(x) for x in [ elif not any(user_setting_name.startswith(x) for x in [
'enabled_', 'enabled_',
'disabled_', 'disabled_',
@ -316,6 +434,7 @@ class Preferences(object):
self.unknown_params[user_setting_name] = user_setting self.unknown_params[user_setting_name] = user_setting
def parse_form(self, input_data): def parse_form(self, input_data):
"""Parse formular (``<input>``) data from a ``flask.request.form``"""
disabled_engines = [] disabled_engines = []
enabled_categories = [] enabled_categories = []
disabled_plugins = [] disabled_plugins = []
@ -328,6 +447,8 @@ class Preferences(object):
enabled_categories.append(user_setting_name[len('category_'):]) enabled_categories.append(user_setting_name[len('category_'):])
elif user_setting_name.startswith('plugin_'): elif user_setting_name.startswith('plugin_'):
disabled_plugins.append(user_setting_name) disabled_plugins.append(user_setting_name)
elif user_setting_name == 'tokens':
self.tokens.parse_form(user_setting)
else: else:
self.unknown_params[user_setting_name] = user_setting self.unknown_params[user_setting_name] = user_setting
self.key_value_settings['categories'].parse_form(enabled_categories) self.key_value_settings['categories'].parse_form(enabled_categories)
@ -336,16 +457,34 @@ class Preferences(object):
# cannot be used in case of engines or plugins # cannot be used in case of engines or plugins
def get_value(self, user_setting_name): def get_value(self, user_setting_name):
"""Returns the value for ``user_setting_name``
"""
ret_val = None
if user_setting_name in self.key_value_settings: if user_setting_name in self.key_value_settings:
return self.key_value_settings[user_setting_name].get_value() ret_val = self.key_value_settings[user_setting_name].get_value()
if user_setting_name in self.unknown_params: if user_setting_name in self.unknown_params:
return self.unknown_params[user_setting_name] ret_val = self.unknown_params[user_setting_name]
return ret_val
def save(self, resp): def save(self, resp):
"""Save cookie in the HTTP reponse obect
"""
for user_setting_name, user_setting in self.key_value_settings.items(): for user_setting_name, user_setting in self.key_value_settings.items():
user_setting.save(user_setting_name, resp) user_setting.save(user_setting_name, resp)
self.engines.save(resp) self.engines.save(resp)
self.plugins.save(resp) self.plugins.save(resp)
self.tokens.save('tokens', resp)
for k, v in self.unknown_params.items(): for k, v in self.unknown_params.items():
resp.set_cookie(k, v, max_age=COOKIE_MAX_AGE) resp.set_cookie(k, v, max_age=COOKIE_MAX_AGE)
return resp return resp
def validate_token(self, engine): # pylint: disable=missing-function-docstring
valid = True
if hasattr(engine, 'tokens') and engine.tokens:
valid = False
for token in self.tokens.values:
if token in engine.tokens:
valid = True
break
return valid

View File

@ -177,7 +177,8 @@ class RawTextQuery(object):
class SearchQuery(object): class SearchQuery(object):
"""container for all the search parameters (query, language, etc...)""" """container for all the search parameters (query, language, etc...)"""
def __init__(self, query, engines, categories, lang, safesearch, pageno, time_range, timeout_limit=None): def __init__(self, query, engines, categories, lang, safesearch, pageno, time_range,
timeout_limit=None, preferences=None):
self.query = query.encode('utf-8') self.query = query.encode('utf-8')
self.engines = engines self.engines = engines
self.categories = categories self.categories = categories
@ -186,6 +187,7 @@ class SearchQuery(object):
self.pageno = pageno self.pageno = pageno
self.time_range = None if time_range in ('', 'None', None) else time_range self.time_range = None if time_range in ('', 'None', None) else time_range
self.timeout_limit = timeout_limit self.timeout_limit = timeout_limit
self.preferences = preferences
def __str__(self): def __str__(self):
return str(self.query) + ";" + str(self.engines) return str(self.query) + ";" + str(self.engines)

View File

@ -345,8 +345,8 @@ class ResultContainer(object):
return 0 return 0
return resultnum_sum / len(self._number_of_results) return resultnum_sum / len(self._number_of_results)
def add_unresponsive_engine(self, engine_error): def add_unresponsive_engine(self, engine_name, error_type, error_message=None):
self.unresponsive_engines.add(engine_error) self.unresponsive_engines.add((engine_name, error_type, error_message))
def add_timing(self, engine_name, engine_time, page_load_time): def add_timing(self, engine_name, engine_time, page_load_time):
self.timings.append({ self.timings.append({

View File

@ -127,11 +127,7 @@ def search_one_offline_request_safe(engine_name, query, request_params, result_c
logger.exception('engine {0} : invalid input : {1}'.format(engine_name, e)) logger.exception('engine {0} : invalid input : {1}'.format(engine_name, e))
except Exception as e: except Exception as e:
record_offline_engine_stats_on_error(engine, result_container, start_time) record_offline_engine_stats_on_error(engine, result_container, start_time)
result_container.add_unresponsive_engine(engine_name, 'unexpected crash', str(e))
result_container.add_unresponsive_engine((
engine_name,
u'{0}: {1}'.format(gettext('unexpected crash'), e),
))
logger.exception('engine {0} : exception : {1}'.format(engine_name, e)) logger.exception('engine {0} : exception : {1}'.format(engine_name, e))
@ -186,24 +182,21 @@ def search_one_http_request_safe(engine_name, query, request_params, result_cont
engine.stats['errors'] += 1 engine.stats['errors'] += 1
if (issubclass(e.__class__, requests.exceptions.Timeout)): if (issubclass(e.__class__, requests.exceptions.Timeout)):
result_container.add_unresponsive_engine((engine_name, gettext('timeout'))) result_container.add_unresponsive_engine(engine_name, 'timeout')
# requests timeout (connect or read) # requests timeout (connect or read)
logger.error("engine {0} : HTTP requests timeout" logger.error("engine {0} : HTTP requests timeout"
"(search duration : {1} s, timeout: {2} s) : {3}" "(search duration : {1} s, timeout: {2} s) : {3}"
.format(engine_name, engine_time, timeout_limit, e.__class__.__name__)) .format(engine_name, engine_time, timeout_limit, e.__class__.__name__))
requests_exception = True requests_exception = True
elif (issubclass(e.__class__, requests.exceptions.RequestException)): elif (issubclass(e.__class__, requests.exceptions.RequestException)):
result_container.add_unresponsive_engine((engine_name, gettext('request exception'))) result_container.add_unresponsive_engine(engine_name, 'request exception')
# other requests exception # other requests exception
logger.exception("engine {0} : requests exception" logger.exception("engine {0} : requests exception"
"(search duration : {1} s, timeout: {2} s) : {3}" "(search duration : {1} s, timeout: {2} s) : {3}"
.format(engine_name, engine_time, timeout_limit, e)) .format(engine_name, engine_time, timeout_limit, e))
requests_exception = True requests_exception = True
else: else:
result_container.add_unresponsive_engine(( result_container.add_unresponsive_engine(engine_name, 'unexpected crash', str(e))
engine_name,
u'{0}: {1}'.format(gettext('unexpected crash'), e),
))
# others errors # others errors
logger.exception('engine {0} : exception : {1}'.format(engine_name, e)) logger.exception('engine {0} : exception : {1}'.format(engine_name, e))
@ -238,7 +231,7 @@ def search_multiple_requests(requests, result_container, start_time, timeout_lim
remaining_time = max(0.0, timeout_limit - (time() - start_time)) remaining_time = max(0.0, timeout_limit - (time() - start_time))
th.join(remaining_time) th.join(remaining_time)
if th.isAlive(): if th.isAlive():
result_container.add_unresponsive_engine((th._engine_name, gettext('timeout'))) result_container.add_unresponsive_engine(th._engine_name, 'timeout')
logger.warning('engine timeout: {0}'.format(th._engine_name)) logger.warning('engine timeout: {0}'.format(th._engine_name))
@ -407,7 +400,7 @@ def get_search_query_from_webapp(preferences, form):
return (SearchQuery(query, query_engines, query_categories, return (SearchQuery(query, query_engines, query_categories,
query_lang, query_safesearch, query_pageno, query_lang, query_safesearch, query_pageno,
query_time_range, query_timeout), query_time_range, query_timeout, preferences),
raw_text_query) raw_text_query)
@ -459,6 +452,9 @@ class Search(object):
engine = engines[selected_engine['name']] engine = engines[selected_engine['name']]
if not search_query.preferences.validate_token(engine):
continue
# skip suspended engines # skip suspended engines
if engine.suspend_end_time >= time(): if engine.suspend_end_time >= time():
logger.debug('Engine currently suspended: %s', selected_engine['name']) logger.debug('Engine currently suspended: %s', selected_engine['name'])

View File

@ -4,8 +4,8 @@ general:
search: search:
safe_search : 0 # Filter results. 0: None, 1: Moderate, 2: Strict safe_search : 0 # Filter results. 0: None, 1: Moderate, 2: Strict
autocomplete : "" # Existing autocomplete backends: "dbpedia", "duckduckgo", "google", "startpage", "wikipedia" - leave blank to turn it off by default autocomplete : "" # Existing autocomplete backends: "dbpedia", "duckduckgo", "google", "startpage", "swisscows", "qwant", "wikipedia" - leave blank to turn it off by default
language : "en-US" default_lang : "" # Default search language - leave blank to detect from browser information or use codes from 'languages.py'
ban_time_on_fail : 5 # ban time in seconds after engine errors ban_time_on_fail : 5 # ban time in seconds after engine errors
max_ban_time_on_fail : 120 # max ban time in seconds after engine errors max_ban_time_on_fail : 120 # max ban time in seconds after engine errors
@ -79,9 +79,10 @@ engines:
categories : science categories : science
timeout : 4.0 timeout : 4.0
- name : base # tmp suspended: dh key too small
engine : base # - name : base
shortcut : bs # engine : base
# shortcut : bs
- name : wikipedia - name : wikipedia
engine : wikipedia engine : wikipedia
@ -202,6 +203,11 @@ engines:
timeout: 3.0 timeout: 3.0
disabled : True disabled : True
- name : etools
engine : etools
shortcut : eto
disabled : True
- name : etymonline - name : etymonline
engine : xpath engine : xpath
paging : True paging : True
@ -213,11 +219,6 @@ engines:
shortcut : et shortcut : et
disabled : True disabled : True
- name : faroo
engine : faroo
shortcut : fa
disabled : True
- name : 1x - name : 1x
engine : www1x engine : www1x
shortcut : 1x shortcut : 1x
@ -408,7 +409,7 @@ engines:
- name : library genesis - name : library genesis
engine : xpath engine : xpath
search_url : http://libgen.io/search.php?req={query} search_url : https://libgen.is/search.php?req={query}
url_xpath : //a[contains(@href,"bookfi.net")]/@href url_xpath : //a[contains(@href,"bookfi.net")]/@href
title_xpath : //a[contains(@href,"book/")]/text()[1] title_xpath : //a[contains(@href,"book/")]/text()[1]
content_xpath : //td/a[1][contains(@href,"=author")]/text() content_xpath : //td/a[1][contains(@href,"=author")]/text()
@ -464,7 +465,7 @@ engines:
- name : openairedatasets - name : openairedatasets
engine : json_engine engine : json_engine
paging : True paging : True
search_url : http://api.openaire.eu/search/datasets?format=json&page={pageno}&size=10&title={query} search_url : https://api.openaire.eu/search/datasets?format=json&page={pageno}&size=10&title={query}
results_query : response/results/result results_query : response/results/result
url_query : metadata/oaf:entity/oaf:result/children/instance/webresource/url/$ url_query : metadata/oaf:entity/oaf:result/children/instance/webresource/url/$
title_query : metadata/oaf:entity/oaf:result/title/$ title_query : metadata/oaf:entity/oaf:result/title/$
@ -476,7 +477,7 @@ engines:
- name : openairepublications - name : openairepublications
engine : json_engine engine : json_engine
paging : True paging : True
search_url : http://api.openaire.eu/search/publications?format=json&page={pageno}&size=10&title={query} search_url : https://api.openaire.eu/search/publications?format=json&page={pageno}&size=10&title={query}
results_query : response/results/result results_query : response/results/result
url_query : metadata/oaf:entity/oaf:result/children/instance/webresource/url/$ url_query : metadata/oaf:entity/oaf:result/children/instance/webresource/url/$
title_query : metadata/oaf:entity/oaf:result/title/$ title_query : metadata/oaf:entity/oaf:result/title/$
@ -552,10 +553,11 @@ engines:
timeout : 10.0 timeout : 10.0
disabled : True disabled : True
- name : scanr structures # tmp suspended: bad certificate
shortcut: scs # - name : scanr structures
engine : scanr_structures # shortcut: scs
disabled : True # engine : scanr_structures
# disabled : True
- name : soundcloud - name : soundcloud
engine : soundcloud engine : soundcloud
@ -598,9 +600,12 @@ engines:
shortcut : se shortcut : se
categories : science categories : science
- name : spotify # Spotify needs API credentials
engine : spotify # - name : spotify
shortcut : stf # engine : spotify
# shortcut : stf
# api_client_id : *******
# api_client_secret : *******
- name : startpage - name : startpage
engine : startpage engine : startpage
@ -676,6 +681,69 @@ engines:
engine : vimeo engine : vimeo
shortcut : vm shortcut : vm
- name : wikibooks
engine : mediawiki
shortcut : wb
categories : general
base_url : "https://{language}.wikibooks.org/"
number_of_results : 5
search_type : text
disabled : True
- name : wikinews
engine : mediawiki
shortcut : wn
categories : news
base_url : "https://{language}.wikinews.org/"
number_of_results : 5
search_type : text
disabled : True
- name : wikiquote
engine : mediawiki
shortcut : wq
categories : general
base_url : "https://{language}.wikiquote.org/"
number_of_results : 5
search_type : text
disabled : True
- name : wikisource
engine : mediawiki
shortcut : ws
categories : general
base_url : "https://{language}.wikisource.org/"
number_of_results : 5
search_type : text
disabled : True
- name : wiktionary
engine : mediawiki
shortcut : wt
categories : general
base_url : "https://{language}.wiktionary.org/"
number_of_results : 5
search_type : text
disabled : True
- name : wikiversity
engine : mediawiki
shortcut : wv
categories : general
base_url : "https://{language}.wikiversity.org/"
number_of_results : 5
search_type : text
disabled : True
- name : wikivoyage
engine : mediawiki
shortcut : wy
categories : general
base_url : "https://{language}.wikivoyage.org/"
number_of_results : 5
search_type : text
disabled : True
- name : wolframalpha - name : wolframalpha
shortcut : wa shortcut : wa
# You can use the engine using the official stable API, but you need an API key # You can use the engine using the official stable API, but you need an API key
@ -753,6 +821,20 @@ engines:
engine : seedpeer engine : seedpeer
categories: files, music, videos categories: files, music, videos
- name : rubygems
shortcut: rbg
engine: xpath
paging : True
search_url : https://rubygems.org/search?page={pageno}&query={query}
results_xpath: /html/body/main/div/a[@class="gems__gem"]
url_xpath : ./@href
title_xpath : ./span/h2
content_xpath : ./span/p
suggestion_xpath : /html/body/main/div/div[@class="search__suggestions"]/p/a
first_page_num : 1
categories: it
disabled : True
# - name : yacy # - name : yacy
# engine : yacy # engine : yacy
# shortcut : ya # shortcut : ya
@ -771,6 +853,7 @@ locales:
en : English en : English
ar : العَرَبِيَّة (Arabic) ar : العَرَبِيَّة (Arabic)
bg : Български (Bulgarian) bg : Български (Bulgarian)
bo : བོད་སྐད་ (Tibetian)
ca : Català (Catalan) ca : Català (Catalan)
cs : Čeština (Czech) cs : Čeština (Czech)
cy : Cymraeg (Welsh) cy : Cymraeg (Welsh)
@ -779,6 +862,7 @@ locales:
el_GR : Ελληνικά (Greek_Greece) el_GR : Ελληνικά (Greek_Greece)
eo : Esperanto (Esperanto) eo : Esperanto (Esperanto)
es : Español (Spanish) es : Español (Spanish)
et : Eesti (Estonian)
eu : Euskara (Basque) eu : Euskara (Basque)
fa_IR : (fārsī) فارسى (Persian) fa_IR : (fārsī) فارسى (Persian)
fi : Suomi (Finnish) fi : Suomi (Finnish)
@ -791,8 +875,10 @@ locales:
ia : Interlingua (Interlingua) ia : Interlingua (Interlingua)
it : Italiano (Italian) it : Italiano (Italian)
ja : 日本語 (Japanese) ja : 日本語 (Japanese)
lt : Lietuvių (Lithuanian)
nl : Nederlands (Dutch) nl : Nederlands (Dutch)
nl_BE : Vlaams (Dutch_Belgium) nl_BE : Vlaams (Dutch_Belgium)
oc : Lenga D'òc (Occitan)
pl : Polski (Polish) pl : Polski (Polish)
pt : Português (Portuguese) pt : Português (Portuguese)
pt_BR : Português (Portuguese_Brazil) pt_BR : Português (Portuguese_Brazil)
@ -803,6 +889,7 @@ locales:
sr : српски (Serbian) sr : српски (Serbian)
sv : Svenska (Swedish) sv : Svenska (Swedish)
te : తెలుగు (telugu) te : తెలుగు (telugu)
ta : தமிழ் (Tamil)
tr : Türkçe (Turkish) tr : Türkçe (Turkish)
uk : українська мова (Ukrainian) uk : українська мова (Ukrainian)
vi : tiếng việt (Vietnamese) vi : tiếng việt (Vietnamese)
@ -812,7 +899,7 @@ locales:
doi_resolvers : doi_resolvers :
oadoi.org : 'https://oadoi.org/' oadoi.org : 'https://oadoi.org/'
doi.org : 'https://doi.org/' doi.org : 'https://doi.org/'
doai.io : 'http://doai.io/' doai.io : 'https://doai.io/'
sci-hub.tw : 'http://sci-hub.tw/' sci-hub.tw : 'https://sci-hub.tw/'
default_doi_resolver : 'oadoi.org' default_doi_resolver : 'oadoi.org'

View File

@ -3,8 +3,6 @@ general:
instance_name : "searx_test" instance_name : "searx_test"
search: search:
safe_search : 0
autocomplete : ""
language: "all" language: "all"
server: server:
@ -12,14 +10,12 @@ server:
bind_address : 127.0.0.1 bind_address : 127.0.0.1
secret_key : "ultrasecretkey" # change this! secret_key : "ultrasecretkey" # change this!
base_url : False base_url : False
image_proxy : False
http_protocol_version : "1.0" http_protocol_version : "1.0"
ui: ui:
static_path : "" static_path : ""
templates_path : "" templates_path : ""
default_theme : oscar default_theme : oscar
default_locale : ""
outgoing: outgoing:
request_timeout : 1.0 # seconds request_timeout : 1.0 # seconds
@ -43,7 +39,7 @@ locales:
doi_resolvers : doi_resolvers :
oadoi.org : 'https://oadoi.org/' oadoi.org : 'https://oadoi.org/'
doi.org : 'https://doi.org/' doi.org : 'https://doi.org/'
doai.io : 'http://doai.io/' doai.io : 'https://doai.io/'
sci-hub.tw : 'http://sci-hub.tw/' sci-hub.tw : 'https://sci-hub.tw/'
default_doi_resolver : 'oadoi.org' default_doi_resolver : 'oadoi.org'

View File

@ -1,18 +1,40 @@
$(document).ready(function() { function hasScrollbar() {
var win = $(window); var root = document.compatMode=='BackCompat'? document.body : document.documentElement;
win.scroll(function() { return root.scrollHeight>root.clientHeight;
if ($(document).height() - win.height() - win.scrollTop() < 150) { }
function loadNextPage() {
var formData = $('#pagination form:last').serialize(); var formData = $('#pagination form:last').serialize();
if (formData) { if (formData) {
$('#pagination').html('<div class="loading-spinner"></div>'); $('#pagination').html('<div class="loading-spinner"></div>');
$.post('./', formData, function (data) { $.ajax({
type: "POST",
url: './',
data: formData,
dataType: 'html',
success: function(data) {
var body = $(data); var body = $(data);
$('#pagination').remove(); $('#pagination').remove();
$('#main_results').append('<hr/>'); $('#main_results').append('<hr/>');
$('#main_results').append(body.find('.result')); $('#main_results').append(body.find('.result'));
$('#main_results').append(body.find('#pagination')); $('#main_results').append(body.find('#pagination'));
}); if(!hasScrollbar()) {
loadNextPage();
} }
} }
}); });
}
}
$(document).ready(function() {
var win = $(window);
if(!hasScrollbar()) {
loadNextPage();
}
win.scroll(function() {
$("#pagination button").css("visibility", "hidden");
if ($(document).height() - win.height() - win.scrollTop() < 150) {
loadNextPage();
}
});
}); });

View File

@ -125,6 +125,14 @@ $(document).ready(function() {
} }
}); });
function nextResult(current, direction) {
var next = current[direction]();
while (!next.is('.result') && next.length !== 0) {
next = next[direction]();
}
return next
}
function highlightResult(which) { function highlightResult(which) {
return function() { return function() {
var current = $('.result[data-vim-selected]'); var current = $('.result[data-vim-selected]');
@ -157,13 +165,13 @@ $(document).ready(function() {
} }
break; break;
case 'down': case 'down':
next = current.next('.result'); next = nextResult(current, 'next');
if (next.length === 0) { if (next.length === 0) {
next = $('.result:first'); next = $('.result:first');
} }
break; break;
case 'up': case 'up':
next = current.prev('.result'); next = nextResult(current, 'prev');
if (next.length === 0) { if (next.length === 0) {
next = $('.result:last'); next = $('.result:last');
} }

View File

@ -1 +1 @@
node_modules/ /node_modules

View File

@ -13,7 +13,7 @@ module.exports = function(grunt) {
}, },
uglify: { uglify: {
options: { options: {
banner: '/*! oscar/searx.min.js | <%= grunt.template.today("dd-mm-yyyy") %> | https://github.com/asciimoo/searx */\n' banner: '/*! oscar/searx.min.js | <%= grunt.template.today("dd-mm-yyyy") %> | <%= process.env.GIT_URL %> */\n'
}, },
dist: { dist: {
files: { files: {
@ -38,7 +38,6 @@ module.exports = function(grunt) {
development: { development: {
options: { options: {
paths: ["less/pointhi", "less/logicodev", "less/logicodev-dark"] paths: ["less/pointhi", "less/logicodev", "less/logicodev-dark"]
//banner: '/*! less/oscar/oscar.css | <%= grunt.template.today("dd-mm-yyyy") %> | https://github.com/asciimoo/searx */\n'
}, },
files: {"css/pointhi.css": "less/pointhi/oscar.less", files: {"css/pointhi.css": "less/pointhi/oscar.less",
"css/logicodev.css": "less/logicodev-dark/oscar.less", "css/logicodev.css": "less/logicodev-dark/oscar.less",
@ -47,7 +46,6 @@ module.exports = function(grunt) {
production: { production: {
options: { options: {
paths: ["less/pointhi", "less/logicodev", "less/logicodev-dark"], paths: ["less/pointhi", "less/logicodev", "less/logicodev-dark"],
//banner: '/*! less/oscar/oscar.css | <%= grunt.template.today("dd-mm-yyyy") %> | https://github.com/asciimoo/searx */\n',
cleancss: true cleancss: true
}, },
files: {"css/pointhi.min.css": "less/pointhi/oscar.less", files: {"css/pointhi.min.css": "less/pointhi/oscar.less",

View File

@ -86,6 +86,9 @@ $(document).ready(function(){
}, },
source: searx.searchResults.ttAdapter() source: searx.searchResults.ttAdapter()
}); });
$('#q').bind('typeahead:selected', function(ev, suggestion) {
$("#search_form").submit();
});
} }
}); });
;/** ;/**

View File

@ -1,2 +1,2 @@
/*! oscar/searx.min.js | 06-08-2019 | https://github.com/asciimoo/searx */ /*! oscar/searx.min.js | 23-03-2020 | https://github.com/asciimoo/searx */
requirejs.config({baseUrl:"./static/themes/oscar/js",paths:{app:"../app"}}),window.searx=function(a){"use strict";var b=a.currentScript||function(){var b=a.getElementsByTagName("script");return b[b.length-1]}();return{autocompleter:"true"===b.getAttribute("data-autocompleter"),method:b.getAttribute("data-method")}}(document),searx.autocompleter&&(searx.searchResults=new Bloodhound({datumTokenizer:Bloodhound.tokenizers.obj.whitespace("value"),queryTokenizer:Bloodhound.tokenizers.whitespace,remote:"./autocompleter?q=%QUERY"}),searx.searchResults.initialize()),$(document).ready(function(){searx.autocompleter&&$("#q").typeahead(null,{name:"search-results",displayKey:function(a){return a},source:searx.searchResults.ttAdapter()})}),$(document).ready(function(){$("#q.autofocus").focus(),$(".select-all-on-click").click(function(){$(this).select()}),$(".btn-collapse").click(function(){var a=$(this).data("btn-text-collapsed"),b=$(this).data("btn-text-not-collapsed");""!==a&&""!==b&&($(this).hasClass("collapsed")?new_html=$(this).html().replace(a,b):new_html=$(this).html().replace(b,a),$(this).html(new_html))}),$(".btn-toggle .btn").click(function(){var a="btn-"+$(this).data("btn-class"),b=$(this).data("btn-label-default"),c=$(this).data("btn-label-toggled");""!==c&&($(this).hasClass("btn-default")?new_html=$(this).html().replace(b,c):new_html=$(this).html().replace(c,b),$(this).html(new_html)),$(this).toggleClass(a),$(this).toggleClass("btn-default")}),$(".media-loader").click(function(){var a=$(this).data("target"),b=$(a+" > iframe"),c=b.attr("src");void 0!==c&&!1!==c||b.attr("src",b.data("src"))}),$(".btn-sm").dblclick(function(){var a="btn-"+$(this).data("btn-class");$(this).hasClass("btn-default")?($(".btn-sm > input").attr("checked","checked"),$(".btn-sm > input").prop("checked",!0),$(".btn-sm").addClass(a),$(".btn-sm").addClass("active"),$(".btn-sm").removeClass("btn-default")):($(".btn-sm > input").attr("checked",""),$(".btn-sm > input").removeAttr("checked"),$(".btn-sm > input").checked=!1,$(".btn-sm").removeClass(a),$(".btn-sm").removeClass("active"),$(".btn-sm").addClass("btn-default"))})}),$(document).ready(function(){$(".searx_overpass_request").on("click",function(a){var b="https://overpass-api.de/api/interpreter?data=",c=b+"[out:json][timeout:25];(",d=");out meta;",e=$(this).data("osm-id"),f=$(this).data("osm-type"),g=$(this).data("result-table"),h="#"+$(this).data("result-table-loadicon"),i=["addr:city","addr:country","addr:housenumber","addr:postcode","addr:street"];if(e&&f&&g){g="#"+g;var j=null;switch(f){case"node":j=c+"node("+e+");"+d;break;case"way":j=c+"way("+e+");"+d;break;case"relation":j=c+"relation("+e+");"+d}if(j){$.ajax(j).done(function(a){if(a&&a.elements&&a.elements[0]){var b=a.elements[0],c=$(g).html();for(var d in b.tags)if(null===b.tags.name||-1==i.indexOf(d)){switch(c+="<tr><td>"+d+"</td><td>",d){case"phone":case"fax":c+='<a href="tel:'+b.tags[d].replace(/ /g,"")+'">'+b.tags[d]+"</a>";break;case"email":c+='<a href="mailto:'+b.tags[d]+'">'+b.tags[d]+"</a>";break;case"website":case"url":c+='<a href="'+b.tags[d]+'">'+b.tags[d]+"</a>";break;case"wikidata":c+='<a href="https://www.wikidata.org/wiki/'+b.tags[d]+'">'+b.tags[d]+"</a>";break;case"wikipedia":if(-1!=b.tags[d].indexOf(":")){c+='<a href="https://'+b.tags[d].substring(0,b.tags[d].indexOf(":"))+".wikipedia.org/wiki/"+b.tags[d].substring(b.tags[d].indexOf(":")+1)+'">'+b.tags[d]+"</a>";break}default:c+=b.tags[d]}c+="</td></tr>"}$(g).html(c),$(g).removeClass("hidden"),$(h).addClass("hidden")}}).fail(function(){$(h).html($(h).html()+'<p class="text-muted">could not load data!</p>')})}}$(this).off(a)}),$(".searx_init_map").on("click",function(a){var b=$(this).data("leaflet-target"),c=$(this).data("map-lon"),d=$(this).data("map-lat"),e=$(this).data("map-zoom"),f=$(this).data("map-boundingbox"),g=$(this).data("map-geojson");require(["leaflet-0.7.3.min"],function(a){f&&(southWest=L.latLng(f[0],f[2]),northEast=L.latLng(f[1],f[3]),map_bounds=L.latLngBounds(southWest,northEast)),L.Icon.Default.imagePath="./static/themes/oscar/img/map";var h=L.map(b),i="https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png",j='Map data © <a href="https://openstreetmap.org">OpenStreetMap</a> contributors',k=new L.TileLayer(i,{minZoom:1,maxZoom:19,attribution:j}),l="https://maps.wikimedia.org/osm-intl/{z}/{x}/{y}.png",m='Wikimedia maps beta | Maps data © <a href="https://openstreetmap.org">OpenStreetMap</a> contributors';new L.TileLayer(l,{minZoom:1,maxZoom:19,attribution:m});map_bounds?setTimeout(function(){h.fitBounds(map_bounds,{maxZoom:17})},0):c&&d&&(e?h.setView(new L.LatLng(d,c),e):h.setView(new L.LatLng(d,c),8)),h.addLayer(k);var n={"OSM Mapnik":k};L.control.layers(n).addTo(h),g&&L.geoJson(g).addTo(h)}),$(this).off(a)})}); requirejs.config({baseUrl:"./static/themes/oscar/js",paths:{app:"../app"}}),window.searx=function(a){"use strict";var b=a.currentScript||function(){var b=a.getElementsByTagName("script");return b[b.length-1]}();return{autocompleter:"true"===b.getAttribute("data-autocompleter"),method:b.getAttribute("data-method")}}(document),searx.autocompleter&&(searx.searchResults=new Bloodhound({datumTokenizer:Bloodhound.tokenizers.obj.whitespace("value"),queryTokenizer:Bloodhound.tokenizers.whitespace,remote:"./autocompleter?q=%QUERY"}),searx.searchResults.initialize()),$(document).ready(function(){searx.autocompleter&&($("#q").typeahead(null,{name:"search-results",displayKey:function(a){return a},source:searx.searchResults.ttAdapter()}),$("#q").bind("typeahead:selected",function(a,b){$("#search_form").submit()}))}),$(document).ready(function(){$("#q.autofocus").focus(),$(".select-all-on-click").click(function(){$(this).select()}),$(".btn-collapse").click(function(){var a=$(this).data("btn-text-collapsed"),b=$(this).data("btn-text-not-collapsed");""!==a&&""!==b&&($(this).hasClass("collapsed")?new_html=$(this).html().replace(a,b):new_html=$(this).html().replace(b,a),$(this).html(new_html))}),$(".btn-toggle .btn").click(function(){var a="btn-"+$(this).data("btn-class"),b=$(this).data("btn-label-default"),c=$(this).data("btn-label-toggled");""!==c&&($(this).hasClass("btn-default")?new_html=$(this).html().replace(b,c):new_html=$(this).html().replace(c,b),$(this).html(new_html)),$(this).toggleClass(a),$(this).toggleClass("btn-default")}),$(".media-loader").click(function(){var a=$(this).data("target"),b=$(a+" > iframe"),c=b.attr("src");void 0!==c&&c!==!1||b.attr("src",b.data("src"))}),$(".btn-sm").dblclick(function(){var a="btn-"+$(this).data("btn-class");$(this).hasClass("btn-default")?($(".btn-sm > input").attr("checked","checked"),$(".btn-sm > input").prop("checked",!0),$(".btn-sm").addClass(a),$(".btn-sm").addClass("active"),$(".btn-sm").removeClass("btn-default")):($(".btn-sm > input").attr("checked",""),$(".btn-sm > input").removeAttr("checked"),$(".btn-sm > input").checked=!1,$(".btn-sm").removeClass(a),$(".btn-sm").removeClass("active"),$(".btn-sm").addClass("btn-default"))})}),$(document).ready(function(){$(".searx_overpass_request").on("click",function(a){var b="https://overpass-api.de/api/interpreter?data=",c=b+"[out:json][timeout:25];(",d=");out meta;",e=$(this).data("osm-id"),f=$(this).data("osm-type"),g=$(this).data("result-table"),h="#"+$(this).data("result-table-loadicon"),i=["addr:city","addr:country","addr:housenumber","addr:postcode","addr:street"];if(e&&f&&g){g="#"+g;var j=null;switch(f){case"node":j=c+"node("+e+");"+d;break;case"way":j=c+"way("+e+");"+d;break;case"relation":j=c+"relation("+e+");"+d}if(j){$.ajax(j).done(function(a){if(a&&a.elements&&a.elements[0]){var b=a.elements[0],c=$(g).html();for(var d in b.tags)if(null===b.tags.name||i.indexOf(d)==-1){switch(c+="<tr><td>"+d+"</td><td>",d){case"phone":case"fax":c+='<a href="tel:'+b.tags[d].replace(/ /g,"")+'">'+b.tags[d]+"</a>";break;case"email":c+='<a href="mailto:'+b.tags[d]+'">'+b.tags[d]+"</a>";break;case"website":case"url":c+='<a href="'+b.tags[d]+'">'+b.tags[d]+"</a>";break;case"wikidata":c+='<a href="https://www.wikidata.org/wiki/'+b.tags[d]+'">'+b.tags[d]+"</a>";break;case"wikipedia":if(b.tags[d].indexOf(":")!=-1){c+='<a href="https://'+b.tags[d].substring(0,b.tags[d].indexOf(":"))+".wikipedia.org/wiki/"+b.tags[d].substring(b.tags[d].indexOf(":")+1)+'">'+b.tags[d]+"</a>";break}default:c+=b.tags[d]}c+="</td></tr>"}$(g).html(c),$(g).removeClass("hidden"),$(h).addClass("hidden")}}).fail(function(){$(h).html($(h).html()+'<p class="text-muted">could not load data!</p>')})}}$(this).off(a)}),$(".searx_init_map").on("click",function(a){var b=$(this).data("leaflet-target"),c=$(this).data("map-lon"),d=$(this).data("map-lat"),e=$(this).data("map-zoom"),f=$(this).data("map-boundingbox"),g=$(this).data("map-geojson");require(["leaflet-0.7.3.min"],function(a){f&&(southWest=L.latLng(f[0],f[2]),northEast=L.latLng(f[1],f[3]),map_bounds=L.latLngBounds(southWest,northEast)),L.Icon.Default.imagePath="./static/themes/oscar/img/map";var h=L.map(b),i="https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png",j='Map data © <a href="https://openstreetmap.org">OpenStreetMap</a> contributors',k=new L.TileLayer(i,{minZoom:1,maxZoom:19,attribution:j}),l="https://maps.wikimedia.org/osm-intl/{z}/{x}/{y}.png",m='Wikimedia maps beta | Maps data © <a href="https://openstreetmap.org">OpenStreetMap</a> contributors';new L.TileLayer(l,{minZoom:1,maxZoom:19,attribution:m});map_bounds?setTimeout(function(){h.fitBounds(map_bounds,{maxZoom:17})},0):c&&d&&(e?h.setView(new L.LatLng(d,c),e):h.setView(new L.LatLng(d,c),8)),h.addLayer(k);var n={"OSM Mapnik":k};L.control.layers(n).addTo(h),g&&L.geoJson(g).addTo(h)}),$(this).off(a)})});

View File

@ -33,5 +33,8 @@ $(document).ready(function(){
}, },
source: searx.searchResults.ttAdapter() source: searx.searchResults.ttAdapter()
}); });
$('#q').bind('typeahead:selected', function(ev, suggestion) {
$("#search_form").submit();
});
} }
}); });

1
searx/static/themes/simple/.gitignore vendored Normal file
View File

@ -0,0 +1 @@
/node_modules

View File

@ -36,7 +36,7 @@ module.exports = function(grunt) {
}, },
uglify: { uglify: {
options: { options: {
banner: '/*! simple/searx.min.js | <%= grunt.template.today("dd-mm-yyyy") %> | https://github.com/asciimoo/searx */\n', banner: '/*! simple/searx.min.js | <%= grunt.template.today("dd-mm-yyyy") %> | <%= process.env.GIT_URL %> */\n',
output: { output: {
comments: 'some' comments: 'some'
}, },
@ -57,7 +57,7 @@ module.exports = function(grunt) {
development: { development: {
options: { options: {
paths: ["less"], paths: ["less"],
banner: '/*! searx | <%= grunt.template.today("dd-mm-yyyy") %> | https://github.com/asciimoo/searx */\n' banner: '/*! searx | <%= grunt.template.today("dd-mm-yyyy") %> | <%= process.env.GIT_URL %> */\n'
}, },
files: { files: {
"css/searx.css": "less/style.less", "css/searx.css": "less/style.less",
@ -73,7 +73,7 @@ module.exports = function(grunt) {
compatibility: '*' compatibility: '*'
}) })
], ],
banner: '/*! searx | <%= grunt.template.today("dd-mm-yyyy") %> | https://github.com/asciimoo/searx */\n' banner: '/*! searx | <%= grunt.template.today("dd-mm-yyyy") %> | <%= process.env.GIT_URL %> */\n'
}, },
files: { files: {
"css/searx.min.css": "less/style.less", "css/searx.min.css": "less/style.less",

View File

@ -1,63 +1,97 @@
<div{% if rtl %} dir="ltr"{% endif %}> <div{% if rtl %} dir="ltr"{% endif %}>
<h1>About <a href="{{ url_for('index') }}">searx</a></h1> <h1>About <a href="{{ url_for('index') }}">searx</a></h1>
<p>Searx is a <a href="https://en.wikipedia.org/wiki/Metasearch_engine">metasearch engine</a>, aggregating the results of other <a href="{{ url_for('preferences') }}">search engines</a> while not storing information about its users. <p>
</p> Searx is a <a href="https://en.wikipedia.org/wiki/Metasearch_engine">metasearch engine</a>,
<h2>Why use searx?</h2> aggregating the results of other <a href="{{ url_for('preferences') }}">search engines</a>
<ul> while not storing information about its users.
<li>searx may not offer you as personalised results as Google, but it doesn't generate a profile about you</li>
<li>searx doesn't care about what you search for, never shares anything with a third party, and it can't be used to compromise you</li>
<li>searx is free software, the code is 100% open and you can help to make it better. See more on <a href="https://github.com/asciimoo/searx">github</a></li>
</ul>
<p>If you do care about privacy, want to be a conscious user, or otherwise believe
in digital freedom, make searx your default search engine or run it on your own server</p>
<h2>Technical details - How does it work?</h2>
<p>Searx is a <a href="https://en.wikipedia.org/wiki/Metasearch_engine">metasearch engine</a>,
inspired by the <a href="https://beniz.github.io/seeks/">seeks project</a>.<br />
It provides basic privacy by mixing your queries with searches on other platforms without storing search data. Queries are made using a POST request on every browser (except chrome*). Therefore they show up in neither our logs, nor your url history. In case of Chrome* users there is an exception, searx uses the search bar to perform GET requests.<br />
Searx can be added to your browser's search bar; moreover, it can be set as the default search engine.
</p> </p>
<h2>How can I make it my own?</h2> <p>More about searx ...</p>
<p>Searx appreciates your concern regarding logs, so take the <a href="https://github.com/asciimoo/searx">code</a> and run it yourself! <br />Add your Searx to this <a href="https://github.com/asciimoo/searx/wiki/Searx-instances">list</a> to help other people reclaim their privacy and make the Internet freer!
<br />The more decentralized the Internet is, the more freedom we have!</p>
<h2>More about searx</h2>
<ul> <ul>
<li><a href="https://github.com/asciimoo/searx">github</a></li> <li><a href="https://github.com/asciimoo/searx">github</a></li>
<li><a href="https://www.ohloh.net/p/searx/">ohloh</a></li>
<li><a href="https://twitter.com/Searx_engine">twitter</a></li> <li><a href="https://twitter.com/Searx_engine">twitter</a></li>
<li>IRC: #searx @ freenode (<a href="https://kiwiirc.com/client/irc.freenode.com/searx">webclient</a>)</li> <li>IRC: #searx @ freenode (<a href="https://kiwiirc.com/client/irc.freenode.com/searx">webclient</a>)</li>
<li><a href="https://www.transifex.com/projects/p/searx/">transifex</a></li> <li><a href="https://www.transifex.com/projects/p/searx/">transifex</a></li>
</ul> </ul>
<hr /> <hr />
<h2 id="faq">FAQ</h2> <h2>Why use searx?</h2>
<h3>How to add to firefox?</h3>
<p><a href="#" onclick="window.external.AddSearchProvider(window.location.protocol + '//' + window.location.host + '{{ url_for('opensearch') }}');">Install</a> searx as a search engine on any version of Firefox! (javascript required)</p>
<h2 id="dev_faq">Developer FAQ</h2>
<h3>New engines?</h3>
<ul> <ul>
<li>Edit your <a href="https://raw.github.com/asciimoo/searx/master/searx/settings.yml">settings.yml</a></li> <li>
<li>Create your custom engine module, check the <a href="https://github.com/asciimoo/searx/blob/master/examples/basic_engine.py">example engine</a></li> Searx may not offer you as personalised results as Google, but it doesn't
generate a profile about you.
</li>
<li>
Searx doesn't care about what you search for, never shares anything with a
third party, and it can't be used to compromise you.
</li>
<li>
Searx is free software, the code is 100% open and you can help to make it
better. See more on <a href="https://github.com/asciimoo/searx">github</a>.
</li>
</ul> </ul>
<p>Don't forget to restart searx after config edit!</p>
<h3>Installation/WSGI support?</h3> <p>
<p>See the <a href="https://github.com/asciimoo/searx/wiki/Installation">installation and setup</a> wiki page</p> If you do care about privacy, want to be a conscious user, or otherwise
believe in digital freedom, make searx your default search engine or run it
on your own server
</p>
<h3>How to debug engines?</h3> <h2>Technical details - How does it work?</h2>
<p><a href="{{ url_for('stats') }}">Stats page</a> contains some useful data about the engines used.</p>
<p>
Searx is a <a href="https://en.wikipedia.org/wiki/Metasearch_engine">metasearch engine</a>,
inspired by the <a href="https://beniz.github.io/seeks/">seeks project</a>.
It provides basic privacy by mixing your queries with searches on other
platforms without storing search data. Queries are made using a POST request
on every browser (except chrome*). Therefore they show up in neither our
logs, nor your url history. In case of Chrome* users there is an exception,
searx uses the search bar to perform GET requests.
Searx can be added to your browser's search bar; moreover, it can be set as
the default search engine.
</p>
<h2 id='add to browser'>How to set as the default search engine?</h2>
<dt>Firefox</dt>
<dd>
<a href="#" onclick="window.external.AddSearchProvider(window.location.protocol + '//' + window.location.host + '{{ url_for('opensearch') }}');">Install</a>
searx as a search engine on any version of Firefox! (javascript required)
</dd>
<h2>Where to find anonymous usage statistics of this instance ?</h2>
<p>
<a href="{{ url_for('stats') }}">Stats page</a> contains some useful data about the engines used.
</p>
<h2>How can I make it my own?</h2>
<p>
Searx appreciates your concern regarding logs, so take the
code from the <a href="https://github.com/asciimoo/searx">original searx project</a> and
run it yourself!
</p>
<p>
Add your searx instance to this <a href="{{ brand.PUBLIC_INSTANCES }}"> list
of public searx instances</a> to help other people reclaim their privacy and
make the Internet freer! The more decentralized the Internet is, the more
freedom we have!
</p>
<h2>Where are the docs & code of this instance?</h2>
<p>
See the <a href="{{ brand.DOCS_URL }}">{{ brand.DOCS_URL }}</a>
and <a href="{{ brand.GIT_URL }}">{{ brand.GIT_URL }}</a>
</p>
</div> </div>
{% include "__common__/aboutextend.html" ignore missing %} {% include "__common__/aboutextend.html" ignore missing %}

View File

@ -25,5 +25,29 @@
{% if r.pubdate %}<pubDate>{{ r.pubdate }}</pubDate>{% endif %} {% if r.pubdate %}<pubDate>{{ r.pubdate }}</pubDate>{% endif %}
</item> </item>
{% endfor %} {% endfor %}
{% if answers %}
{% for a in answers %}
<item>
<title>{{ a }}</title>
<type>answer</type>
</item>
{% endfor %}
{% endif %}
{% if corrections %}
{% for a in corrections %}
<item>
<title>{{ a }}</title>
<type>correction</type>
</item>
{% endfor %}
{% endif %}
{% if suggestions %}
{% for a in suggestions %}
<item>
<title>{{ a }}</title>
<type>suggestion</type>
</item>
{% endfor %}
{% endif %}
</channel> </channel>
</rss> </rss>

View File

@ -85,10 +85,10 @@
{% endblock %} {% endblock %}
<p class="text-muted"> <p class="text-muted">
<small> <small>
{{ _('Powered by') }} <a href="https://asciimoo.github.io/searx/">searx</a> - {{ searx_version }} - {{ _('a privacy-respecting, hackable metasearch engine') }}<br/> {{ _('Powered by') }} <a href="{{ brand.DOCS_URL }}">searx</a> - {{ searx_version }} - {{ _('a privacy-respecting, hackable metasearch engine') }}<br/>
<a href="https://github.com/asciimoo/searx">{{ _('Source code') }}</a> | <a href="{{ brand.GIT_URL }}">{{ _('Source code') }}</a> |
<a href="https://github.com/asciimoo/searx/issues">{{ _('Issue tracker') }}</a> | <a href="{{ brand.ISSUE_URL }}">{{ _('Issue tracker') }}</a> |
<a href="https://github.com/asciimoo/searx/wiki/Searx-instances">{{ _('Public instances') }}</a> <a href="{{ brand.PUBLIC_INSTANCES }}">{{ _('Public instances') }}</a>
</small> </small>
</p> </p>
</div> </div>

View File

@ -6,7 +6,7 @@
<div class="panel-body"> <div class="panel-body">
{% if infobox.img_src %}<img class="img-responsive center-block infobox_part" src="{{ image_proxify(infobox.img_src) }}" alt="{{ infobox.infobox }}" />{% endif %} {% if infobox.img_src %}<img class="img-responsive center-block infobox_part" src="{{ image_proxify(infobox.img_src) }}" alt="{{ infobox.infobox }}" />{% endif %}
{% if infobox.content %}<bdi><p class="infobox_part">{{ infobox.content }}</p></bdi>{% endif %} {% if infobox.content %}<bdi><p class="infobox_part">{{ infobox.content | safe }}</p></bdi>{% endif %}
{% if infobox.attributes -%} {% if infobox.attributes -%}
<table class="table table-striped infobox_part"> <table class="table table-striped infobox_part">

View File

@ -131,6 +131,12 @@
{% endfor %} {% endfor %}
</select> </select>
{{ preferences_item_footer(info, label, rtl) }} {{ preferences_item_footer(info, label, rtl) }}
{% set label = _('Engine tokens') %}
{% set info = _('Access tokens for private engines') %}
{{ preferences_item_header(info, label, rtl) }}
<input class="form-control" id='tokens' name='tokens' value='{{ preferences.tokens.get_value() }}'/>
{{ preferences_item_footer(info, label, rtl) }}
</div> </div>
</fieldset> </fieldset>
</div> </div>

View File

@ -15,7 +15,68 @@
{% include 'oscar/search.html' %} {% include 'oscar/search.html' %}
<div class="row"> <div class="row">
<div class="col-sm-8" id="main_results"> <div class="col-sm-4 col-sm-push-8" id="sidebar_results">
{% if number_of_results != '0' -%}
<p><small>{{ _('Number of results') }}: {{ number_of_results }}</small></p>
{%- endif %}
{% if unresponsive_engines and results|length >= 1 -%}
<div class="alert alert-danger fade in" role="alert">
<p>{{ _('Engines cannot retrieve results') }}:</p>
{%- for engine_name, error_type in unresponsive_engines -%}
{{- engine_name }} ({{ error_type }}){% if not loop.last %}, {% endif %}{{- "" -}}
{%- endfor -%}
</div>
{%- endif %}
{% if infoboxes -%}
{% for infobox in infoboxes %}
{% include 'oscar/infobox.html' %}{{- "\n\n" -}}
{% endfor %}
{%- endif %}
{% if suggestions %}
<div class="panel panel-default">
<div class="panel-heading">
<h4 class="panel-title">{{ _('Suggestions') }}</h4>
</div>
<div class="panel-body">
{% for suggestion in suggestions %}
<form method="{{ method or 'POST' }}" action="{{ url_for('index') }}" role="navigation" class="form-inline pull-{% if rtl %}right{% else %}left{% endif %} suggestion_item">
<input type="hidden" name="q" value="{{ suggestion.url }}">
<button type="submit" class="btn btn-default btn-xs">{{ suggestion.title }}</button>
</form>
{% endfor %}
</div>
</div>
{%- endif %}
<div class="panel panel-default">
<div class="panel-heading">{{- "" -}}
<h4 class="panel-title">{{ _('Links') }}</h4>{{- "" -}}
</div>
<div class="panel-body">
<form role="form">{{- "" -}}
<div class="form-group">{{- "" -}}
<label for="search_url">{{ _('Search URL') }}</label>{{- "" -}}
<input id="search_url" type="url" class="form-control select-all-on-click cursor-text" name="search_url" value="{{ search_url() }}" readonly>{{- "" -}}
</div>{{- "" -}}
</form>
<label>{{ _('Download results') }}</label>
<div class="clearfix"></div>
{% for output_type in ('csv', 'json', 'rss') %}
<form method="{{ method or 'POST' }}" action="{{ url_for('index') }}" class="form-inline pull-{% if rtl %}right{% else %}left{% endif %} result_download">
{{- search_form_attrs(pageno) -}}
<input type="hidden" name="format" value="{{ output_type }}">{{- "" -}}
<button type="submit" class="btn btn-default">{{ output_type }}</button>{{- "" -}}
</form>
{% endfor %}
<div class="clearfix"></div>
</div>
</div>
</div><!-- /#sidebar_results -->
<div class="col-sm-8 col-sm-pull-4" id="main_results">
<h1 class="sr-only">{{ _('Search results') }}</h1> <h1 class="sr-only">{{ _('Search results') }}</h1>
{% if corrections -%} {% if corrections -%}
@ -91,66 +152,5 @@
{% endif %} {% endif %}
{% endif %} {% endif %}
</div><!-- /#main_results --> </div><!-- /#main_results -->
<div class="col-sm-4" id="sidebar_results">
{% if number_of_results != '0' -%}
<p><small>{{ _('Number of results') }}: {{ number_of_results }}</small></p>
{%- endif %}
{% if unresponsive_engines and results|length >= 1 -%}
<div class="alert alert-danger fade in" role="alert">
<p>{{ _('Engines cannot retrieve results') }}:</p>
{%- for engine_name, error_type in unresponsive_engines -%}
{{- engine_name }} ({{ error_type }}){% if not loop.last %}, {% endif %}{{- "" -}}
{%- endfor -%}
</div>
{%- endif %}
{% if infoboxes -%}
{% for infobox in infoboxes %}
{% include 'oscar/infobox.html' %}{{- "\n\n" -}}
{% endfor %}
{%- endif %}
{% if suggestions %}
<div class="panel panel-default">
<div class="panel-heading">
<h4 class="panel-title">{{ _('Suggestions') }}</h4>
</div>
<div class="panel-body">
{% for suggestion in suggestions %}
<form method="{{ method or 'POST' }}" action="{{ url_for('index') }}" role="navigation" class="form-inline pull-{% if rtl %}right{% else %}left{% endif %} suggestion_item">
<input type="hidden" name="q" value="{{ suggestion.url }}">
<button type="submit" class="btn btn-default btn-xs">{{ suggestion.title }}</button>
</form>
{% endfor %}
</div>
</div>
{%- endif %}
<div class="panel panel-default">
<div class="panel-heading">{{- "" -}}
<h4 class="panel-title">{{ _('Links') }}</h4>{{- "" -}}
</div>
<div class="panel-body">
<form role="form">{{- "" -}}
<div class="form-group">{{- "" -}}
<label for="search_url">{{ _('Search URL') }}</label>{{- "" -}}
<input id="search_url" type="url" class="form-control select-all-on-click cursor-text" name="search_url" value="{{ search_url() }}" readonly>{{- "" -}}
</div>{{- "" -}}
</form>
<label>{{ _('Download results') }}</label>
<div class="clearfix"></div>
{% for output_type in ('csv', 'json', 'rss') %}
<form method="{{ method or 'POST' }}" action="{{ url_for('index') }}" class="form-inline pull-{% if rtl %}right{% else %}left{% endif %} result_download">
{{- search_form_attrs(pageno) -}}
<input type="hidden" name="format" value="{{ output_type }}">{{- "" -}}
<button type="submit" class="btn btn-default">{{ output_type }}</button>{{- "" -}}
</form>
{% endfor %}
<div class="clearfix"></div>
</div>
</div>
</div><!-- /#sidebar_results -->
</div> </div>
{% endblock %} {% endblock %}

View File

@ -6,6 +6,7 @@
<input type="search" name="q" class="form-control" id="q" placeholder="{{ _('Search for...') }}" aria-label="{{ _('Search for...') }}" autocomplete="off" value="{{ q }}" accesskey="s"> <input type="search" name="q" class="form-control" id="q" placeholder="{{ _('Search for...') }}" aria-label="{{ _('Search for...') }}" autocomplete="off" value="{{ q }}" accesskey="s">
<span class="input-group-btn"> <span class="input-group-btn">
<button type="submit" class="btn btn-default" aria-label="{{ _('Start search') }}"><span class="hide_if_nojs">{{ icon('search') }}</span><span class="hidden active_if_nojs">{{ _('Start search') }}</span></button> <button type="submit" class="btn btn-default" aria-label="{{ _('Start search') }}"><span class="hide_if_nojs">{{ icon('search') }}</span><span class="hidden active_if_nojs">{{ _('Start search') }}</span></button>
<button type="reset" class="btn btn-default" aria-label="{{ _('Clear search') }}"><span class="hide_if_nojs">{{ icon('remove') }}</span><span class="hidden active_if_nojs">{{ _('Clear') }}</span></button>
</span> </span>
</div> </div>
</div> </div>

View File

@ -9,6 +9,7 @@
<input type="search" name="q" class="form-control input-lg autofocus" id="q" placeholder="{{ _('Search for...') }}" aria-label="{{ _('Search for...') }}" autocomplete="off" value="{{ q }}" accesskey="s"> <input type="search" name="q" class="form-control input-lg autofocus" id="q" placeholder="{{ _('Search for...') }}" aria-label="{{ _('Search for...') }}" autocomplete="off" value="{{ q }}" accesskey="s">
<span class="input-group-btn"> <span class="input-group-btn">
<button type="submit" class="btn btn-default input-lg" aria-label="{{ _('Start search') }}"><span class="hide_if_nojs">{{ icon('search') }}</span><span class="hidden active_if_nojs">{{ _('Start search') }}</span></button> <button type="submit" class="btn btn-default input-lg" aria-label="{{ _('Start search') }}"><span class="hide_if_nojs">{{ icon('search') }}</span><span class="hidden active_if_nojs">{{ _('Start search') }}</span></button>
<button type="reset" class="btn btn-default input-lg" aria-label="{{ _('Clear search') }}"><span class="hide_if_nojs">{{ icon('remove') }}</span><span class="hidden active_if_nojs">{{ _('Clear') }}</span></button>
</span> </span>
</div> </div>
<div class="col-md-8 col-md-offset-2 advanced"> <div class="col-md-8 col-md-offset-2 advanced">

Some files were not shown because too many files have changed in this diff Show More