summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.github/workflows/integration.yml2
-rw-r--r--docs/conf.py8
-rw-r--r--docs/dev/contribution_guide.rst6
-rw-r--r--docs/dev/plugins.rst58
-rw-r--r--docs/dev/search_api.rst8
-rw-r--r--docs/src/searx.search.rst38
-rw-r--r--requirements-dev.txt1
-rw-r--r--searx/network/__init__.py15
-rw-r--r--searx/plugins/__init__.py2
-rw-r--r--searx/plugins/ahmia_filter.py8
-rw-r--r--searx/plugins/hostname_replace.py32
-rw-r--r--searx/plugins/oa_doi_rewrite.py15
-rw-r--r--searx/results.py91
-rw-r--r--searx/search/__init__.py32
-rw-r--r--searx/search/models.py1
-rw-r--r--searx/settings.yml12
-rw-r--r--searx/templates/__common__/about.html2
-rw-r--r--searx/templates/oscar/base.html1
-rw-r--r--searx/templates/oscar/stats.html2
-rw-r--r--searx/templates/simple/base.html1
-rw-r--r--searx/templates/simple/stats.html2
-rwxr-xr-xsearx/webapp.py4
22 files changed, 241 insertions, 100 deletions
diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml
index 84ce51a04..85b225a5a 100644
--- a/.github/workflows/integration.yml
+++ b/.github/workflows/integration.yml
@@ -13,7 +13,7 @@ jobs:
strategy:
matrix:
os: [ubuntu-20.04]
- python-version: [3.6, 3.7, 3.8, 3.9]
+ python-version: ["3.6", "3.7", "3.8", "3.9", "3.10.0-rc.2"]
steps:
- name: Checkout
uses: actions/checkout@v2
diff --git a/docs/conf.py b/docs/conf.py
index 978d6a660..160063ceb 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -9,9 +9,9 @@ from searx.version import VERSION_STRING, GIT_URL, GIT_BRANCH
# Project --------------------------------------------------------------
-project = u'searx'
-copyright = u'2015-2020, Adam Tauber, Noémi Ványi'
-author = u'Adam Tauber'
+project = 'SearXNG'
+copyright = '2021 SearXNG team, 2015-2021 Adam Tauber, Noémi Ványi'
+author = '2021 SearXNG team, 2015-2021 Adam Tauber'
release, version = VERSION_STRING, VERSION_STRING
SEARX_URL = get_setting('server.base_url') or 'https://example.org/searx'
@@ -132,7 +132,7 @@ html_sidebars = {
singlehtml_sidebars = {"index": ["project.html", "localtoc.html"]}
html_static_path = ["static"]
html_logo = "static/img/searx_logo_small.png"
-html_title = "Searx Documentation ({})".format("Searx-{}.tex".format(VERSION_STRING))
+html_title = "Searx Documentation ({})".format(VERSION_STRING)
html_show_sourcelink = False
# LaTeX ----------------------------------------------------------------
diff --git a/docs/dev/contribution_guide.rst b/docs/dev/contribution_guide.rst
index ed1c223c5..a8e5095be 100644
--- a/docs/dev/contribution_guide.rst
+++ b/docs/dev/contribution_guide.rst
@@ -105,11 +105,7 @@ For more help on getting started with searx development, see :ref:`devquickstart
Translation
===========
-Translation currently takes place on :ref:`transifex <translation>`.
-
-.. caution::
-
- Please, do not update translation files in the repo.
+Translation currently takes place on :ref:`weblate <translation>`.
.. _contrib docs:
diff --git a/docs/dev/plugins.rst b/docs/dev/plugins.rst
index 16262ea6d..44401e34f 100644
--- a/docs/dev/plugins.rst
+++ b/docs/dev/plugins.rst
@@ -26,8 +26,8 @@ Example plugin
# attach callback to the post search hook
# request: flask request object
# ctx: the whole local context of the post search hook
- def post_search(request, ctx):
- ctx['search'].suggestions.add('example')
+ def post_search(request, search):
+ search.result_container.suggestions.add('example')
return True
External plugins
@@ -50,20 +50,52 @@ Plugin entry points
Entry points (hooks) define when a plugin runs. Right now only three hooks are
implemented. So feel free to implement a hook if it fits the behaviour of your
-plugin.
+plugin. A plugin doesn't need to implement all the hooks.
-Pre search hook
----------------
-Runs BEFORE the search request. Function to implement: ``pre_search``
+.. py:function:: pre_search(request, search) -> bool
-Post search hook
-----------------
+ Runs BEFORE the search request.
-Runs AFTER the search request. Function to implement: ``post_search``
+ `search.result_container` can be changed.
-Result hook
------------
+ Return a boolean:
-Runs when a new result is added to the result list. Function to implement:
-``on_result``
+ * True to continue the search
+ * False to stop the search
+
+ :param flask.request request:
+ :param searx.search.SearchWithPlugins search:
+ :return: False to stop the search
+ :rtype: bool
+
+
+.. py:function:: post_search(request, search) -> None
+
+ Runs AFTER the search request.
+
+ :param flask.request request: Flask request.
+ :param searx.search.SearchWithPlugins search: Context.
+
+
+.. py:function:: on_result(request, search, result) -> bool
+
+ Runs for each result of each engine.
+
+ `result` can be changed.
+
+ If `result["url"]` is defined, then `result["parsed_url"] = urlparse(result['url'])`
+
+ .. warning::
+ `result["url"]` can be changed, but `result["parsed_url"]` must be updated too.
+
+ Return a boolean:
+
+ * True to keep the result
+ * False to remove the result
+
+ :param flask.request request:
+ :param searx.search.SearchWithPlugins search:
+ :param typing.Dict result: Result, see - :ref:`engine results`
+ :return: True to keep the result
+ :rtype: bool
diff --git a/docs/dev/search_api.rst b/docs/dev/search_api.rst
index 5fcdc4560..7a5f3cf98 100644
--- a/docs/dev/search_api.rst
+++ b/docs/dev/search_api.rst
@@ -100,17 +100,17 @@ Parameters
:default: ``HTTPS_rewrite``, ``Self_Informations``,
``Search_on_category_select``, ``Tracker_URL_remover``
- :values: [ ``DOAI_rewrite``, ``HTTPS_rewrite``, ``Infinite_scroll``,
+ :values: ``DOAI_rewrite``, ``HTTPS_rewrite``, ``Infinite_scroll``,
``Vim-like_hotkeys``, ``Self_Informations``, ``Tracker_URL_remover``,
- ``Search_on_category_select`` ]
+ ``Search_on_category_select``, ``Hostname_replace``
``disabled_plugins``: optional
List of disabled plugins.
- :default: ``DOAI_rewrite``, ``Infinite_scroll``, ``Vim-like_hotkeys``
+ :default: ``DOAI_rewrite``, ``Infinite_scroll``, ``Vim-like_hotkeys``, ``Hostname_replace``
:values: ``DOAI_rewrite``, ``HTTPS_rewrite``, ``Infinite_scroll``,
``Vim-like_hotkeys``, ``Self_Informations``, ``Tracker_URL_remover``,
- ``Search_on_category_select``
+ ``Search_on_category_select``, ``Hostname_replace``
``enabled_engines`` : optional : *all* :origin:`engines <searx/engines>`
List of enabled engines.
diff --git a/docs/src/searx.search.rst b/docs/src/searx.search.rst
new file mode 100644
index 000000000..ad76d4183
--- /dev/null
+++ b/docs/src/searx.search.rst
@@ -0,0 +1,38 @@
+.. _searx.search:
+
+======
+Search
+======
+
+.. autoclass:: searx.search.EngineRef
+ :members:
+
+.. autoclass:: searx.search.SearchQuery
+ :members:
+
+.. autoclass:: searx.search.Search
+
+ .. attribute:: search_query
+ :type: searx.search.SearchQuery
+
+ .. attribute:: result_container
+ :type: searx.results.ResultContainer
+
+ .. automethod:: search() -> searx.results.ResultContainer
+
+.. autoclass:: searx.search.SearchWithPlugins
+ :members:
+
+ .. attribute:: search_query
+ :type: searx.search.SearchQuery
+
+ .. attribute:: result_container
+ :type: searx.results.ResultContainer
+
+ .. attribute:: ordered_plugin_list
+ :type: typing.List
+
+ .. attribute:: request
+ :type: flask.request
+
+ .. automethod:: search() -> searx.results.ResultContainer
diff --git a/requirements-dev.txt b/requirements-dev.txt
index 42bd11726..ae60b6c8f 100644
--- a/requirements-dev.txt
+++ b/requirements-dev.txt
@@ -4,7 +4,6 @@ cov-core==1.15.0
pycodestyle==2.7.0
pylint==2.10.2
splinter==0.15.0
-transifex-client==0.14.3
selenium==3.141.0
twine==3.4.2
Pallets-Sphinx-Themes==2.0.1
diff --git a/searx/network/__init__.py b/searx/network/__init__.py
index 7b0396a12..21c4c27b5 100644
--- a/searx/network/__init__.py
+++ b/searx/network/__init__.py
@@ -173,9 +173,17 @@ async def stream_chunk_to_queue(network, queue, method, url, **kwargs):
if len(chunk) > 0:
queue.put(chunk)
except httpx.ResponseClosed:
- # the response was closed
+ # the response was queued before the exception.
+ # the exception was raised on aiter_raw.
+ # we do nothing here: in the finally block, None will be queued
+ # so stream(method, url, **kwargs) generator can stop
pass
- except (httpx.HTTPError, OSError, h2.exceptions.ProtocolError) as e:
+ except Exception as e: # pylint: disable=broad-except
+ # broad except to avoid this scenario:
+ # exception in network.stream(method, url, **kwargs)
+ # -> the exception is not catch here
+ # -> queue None (in finally)
+ # -> the function below steam(method, url, **kwargs) has nothing to return
queue.put(e)
finally:
queue.put(None)
@@ -201,8 +209,9 @@ def stream(method, url, **kwargs):
the httpx.AsyncHTTPTransport declared above.
"""
queue = SimpleQueue()
+ network = get_context_network()
future = asyncio.run_coroutine_threadsafe(
- stream_chunk_to_queue(get_network(), queue, method, url, **kwargs),
+ stream_chunk_to_queue(network, queue, method, url, **kwargs),
get_loop()
)
diff --git a/searx/plugins/__init__.py b/searx/plugins/__init__.py
index 3a35f7025..1153c9ed1 100644
--- a/searx/plugins/__init__.py
+++ b/searx/plugins/__init__.py
@@ -31,6 +31,7 @@ from searx.plugins import (oa_doi_rewrite,
hash_plugin,
infinite_scroll,
self_info,
+ hostname_replace,
search_on_category_select,
tracker_url_remover,
vim_hotkeys)
@@ -182,6 +183,7 @@ plugins.register(oa_doi_rewrite)
plugins.register(hash_plugin)
plugins.register(infinite_scroll)
plugins.register(self_info)
+plugins.register(hostname_replace)
plugins.register(search_on_category_select)
plugins.register(tracker_url_remover)
plugins.register(vim_hotkeys)
diff --git a/searx/plugins/ahmia_filter.py b/searx/plugins/ahmia_filter.py
index 83b05e4d2..70f216ee1 100644
--- a/searx/plugins/ahmia_filter.py
+++ b/searx/plugins/ahmia_filter.py
@@ -20,14 +20,8 @@ def get_ahmia_blacklist():
return ahmia_blacklist
-def not_blacklisted(result):
+def on_result(request, search, result):
if not result.get('is_onion') or not result.get('parsed_url'):
return True
result_hash = md5(result['parsed_url'].hostname.encode()).hexdigest()
return result_hash not in get_ahmia_blacklist()
-
-
-def post_search(request, search):
- filtered_results = list(filter(not_blacklisted, search.result_container._merged_results))
- search.result_container._merged_results = filtered_results
- return True
diff --git a/searx/plugins/hostname_replace.py b/searx/plugins/hostname_replace.py
new file mode 100644
index 000000000..778b84615
--- /dev/null
+++ b/searx/plugins/hostname_replace.py
@@ -0,0 +1,32 @@
+# SPDX-License-Identifier: AGPL-3.0-or-later
+
+import re
+from urllib.parse import urlunparse
+from searx import settings
+from searx.plugins import logger
+from flask_babel import gettext
+
+name = gettext('Hostname replace')
+description = gettext('Rewrite result hostnames or remove results based on the hostname')
+default_on = False
+preference_section = 'general'
+
+plugin_id = 'hostname_replace'
+
+replacements = {re.compile(p): r for (p, r) in settings[plugin_id].items()} if plugin_id in settings else {}
+
+logger = logger.getChild(plugin_id)
+parsed = 'parsed_url'
+
+
+def on_result(request, search, result):
+ if parsed not in result:
+ return True
+ for (pattern, replacement) in replacements.items():
+ if pattern.search(result[parsed].netloc):
+ if not replacement:
+ return False
+ result[parsed] = result[parsed]._replace(netloc=pattern.sub(replacement, result[parsed].netloc))
+ result['url'] = urlunparse(result[parsed])
+
+ return True
diff --git a/searx/plugins/oa_doi_rewrite.py b/searx/plugins/oa_doi_rewrite.py
index 02a712942..2dcc01e05 100644
--- a/searx/plugins/oa_doi_rewrite.py
+++ b/searx/plugins/oa_doi_rewrite.py
@@ -11,8 +11,6 @@ description = gettext('Avoid paywalls by redirecting to open-access versions of
default_on = False
preference_section = 'general'
-doi_resolvers = settings['doi_resolvers']
-
def extract_doi(url):
match = regex.search(url.path)
@@ -25,13 +23,12 @@ def extract_doi(url):
return None
-def get_doi_resolver(args, preference_doi_resolver):
+def get_doi_resolver(preferences):
doi_resolvers = settings['doi_resolvers']
- doi_resolver = args.get('doi_resolver', preference_doi_resolver)[0]
- if doi_resolver not in doi_resolvers:
- doi_resolver = settings['default_doi_resolver']
- doi_resolver_url = doi_resolvers[doi_resolver]
- return doi_resolver_url
+ selected_resolver = preferences.get_value('doi_resolver')[0]
+ if selected_resolver not in doi_resolvers:
+ selected_resolver = settings['default_doi_resolver']
+ return doi_resolvers[selected_resolver]
def on_result(request, search, result):
@@ -43,6 +40,6 @@ def on_result(request, search, result):
for suffix in ('/', '.pdf', '.xml', '/full', '/meta', '/abstract'):
if doi.endswith(suffix):
doi = doi[:-len(suffix)]
- result['url'] = get_doi_resolver(request.args, request.preferences.get_value('doi_resolver')) + doi
+ result['url'] = get_doi_resolver(request.preferences) + doi
result['parsed_url'] = urlparse(result['url'])
return True
diff --git a/searx/results.py b/searx/results.py
index d0cb4df3f..2e81f5dc4 100644
--- a/searx/results.py
+++ b/searx/results.py
@@ -145,7 +145,7 @@ class ResultContainer:
"""docstring for ResultContainer"""
__slots__ = '_merged_results', 'infoboxes', 'suggestions', 'answers', 'corrections', '_number_of_results',\
- '_ordered', 'paging', 'unresponsive_engines', 'timings', 'redirect_url', 'engine_data'
+ '_closed', 'paging', 'unresponsive_engines', 'timings', 'redirect_url', 'engine_data', 'on_result'
def __init__(self):
super().__init__()
@@ -156,43 +156,48 @@ class ResultContainer:
self.corrections = set()
self._number_of_results = []
self.engine_data = defaultdict(dict)
- self._ordered = False
+ self._closed = False
self.paging = False
self.unresponsive_engines = set()
self.timings = []
self.redirect_url = None
+ self.on_result = lambda _: True
def extend(self, engine_name, results):
+ if self._closed:
+ return
+
standard_result_count = 0
error_msgs = set()
for result in list(results):
result['engine'] = engine_name
- if 'suggestion' in result:
+ if 'suggestion' in result and self.on_result(result):
self.suggestions.add(result['suggestion'])
- elif 'answer' in result:
+ elif 'answer' in result and self.on_result(result):
self.answers[result['answer']] = result
- elif 'correction' in result:
+ elif 'correction' in result and self.on_result(result):
self.corrections.add(result['correction'])
- elif 'infobox' in result:
+ elif 'infobox' in result and self.on_result(result):
self._merge_infobox(result)
- elif 'number_of_results' in result:
+ elif 'number_of_results' in result and self.on_result(result):
self._number_of_results.append(result['number_of_results'])
- elif 'engine_data' in result:
+ elif 'engine_data' in result and self.on_result(result):
self.engine_data[engine_name][result['key']] = result['engine_data']
- else:
+ elif 'url' in result:
# standard result (url, title, content)
- if 'url' in result and not isinstance(result['url'], str):
- logger.debug('result: invalid URL: %s', str(result))
- error_msgs.add('invalid URL')
- elif 'title' in result and not isinstance(result['title'], str):
- logger.debug('result: invalid title: %s', str(result))
- error_msgs.add('invalid title')
- elif 'content' in result and not isinstance(result['content'], str):
- logger.debug('result: invalid content: %s', str(result))
- error_msgs.add('invalid content')
- else:
- self._merge_result(result, standard_result_count + 1)
- standard_result_count += 1
+ if not self._is_valid_url_result(result, error_msgs):
+ continue
+ # normalize the result
+ self._normalize_url_result(result)
+ # call on_result call searx.search.SearchWithPlugins._on_result
+ # which calls the plugins
+ if not self.on_result(result):
+ continue
+ self.__merge_url_result(result, standard_result_count + 1)
+ standard_result_count += 1
+ elif self.on_result(result):
+ self.__merge_result_no_url(result, standard_result_count + 1)
+ standard_result_count += 1
if len(error_msgs) > 0:
for msg in error_msgs:
@@ -219,14 +224,29 @@ class ResultContainer:
if add_infobox:
self.infoboxes.append(infobox)
- def _merge_result(self, result, position):
+ def _is_valid_url_result(self, result, error_msgs):
if 'url' in result:
- self.__merge_url_result(result, position)
- return
-
- self.__merge_result_no_url(result, position)
-
- def __merge_url_result(self, result, position):
+ if not isinstance(result['url'], str):
+ logger.debug('result: invalid URL: %s', str(result))
+ error_msgs.add('invalid URL')
+ return False
+
+ if 'title' in result and not isinstance(result['title'], str):
+ logger.debug('result: invalid title: %s', str(result))
+ error_msgs.add('invalid title')
+ return False
+
+ if 'content' in result:
+ if not isinstance(result['content'], str):
+ logger.debug('result: invalid content: %s', str(result))
+ error_msgs.add('invalid content')
+ return False
+
+ return True
+
+ def _normalize_url_result(self, result):
+ """Return True if the result is valid
+ """
result['parsed_url'] = urlparse(result['url'])
# if the result has no scheme, use http as default
@@ -234,12 +254,14 @@ class ResultContainer:
result['parsed_url'] = result['parsed_url']._replace(scheme="http")
result['url'] = result['parsed_url'].geturl()
- result['engines'] = set([result['engine']])
-
# strip multiple spaces and cariage returns from content
if result.get('content'):
result['content'] = WHITESPACE_REGEX.sub(' ', result['content'])
+ return True
+
+ def __merge_url_result(self, result, position):
+ result['engines'] = set([result['engine']])
duplicated = self.__find_duplicated_http_result(result)
if duplicated:
self.__merge_duplicated_http_result(duplicated, result, position)
@@ -295,7 +317,9 @@ class ResultContainer:
with RLock():
self._merged_results.append(result)
- def order_results(self):
+ def close(self):
+ self._closed = True
+
for result in self._merged_results:
score = result_score(result)
result['score'] = score
@@ -349,12 +373,11 @@ class ResultContainer:
categoryPositions[category] = {'index': len(gresults), 'count': 8}
# update _merged_results
- self._ordered = True
self._merged_results = gresults
def get_ordered_results(self):
- if not self._ordered:
- self.order_results()
+ if not self._closed:
+ self.close()
return self._merged_results
def results_length(self):
diff --git a/searx/search/__init__.py b/searx/search/__init__.py
index d8d3e1e1c..69d7ffb25 100644
--- a/searx/search/__init__.py
+++ b/searx/search/__init__.py
@@ -1,6 +1,6 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
-# pylint: disable=missing-module-docstring
+# pylint: disable=missing-module-docstring, too-few-public-methods
import typing
import threading
@@ -39,7 +39,7 @@ class Search:
__slots__ = "search_query", "result_container", "start_time", "actual_timeout"
- def __init__(self, search_query):
+ def __init__(self, search_query: SearchQuery):
# init vars
super().__init__()
self.search_query = search_query
@@ -163,7 +163,7 @@ class Search:
return True
# do search-request
- def search(self):
+ def search(self) -> ResultContainer:
self.start_time = default_timer()
if not self.search_external_bang():
if not self.search_answerers():
@@ -172,24 +172,32 @@ class Search:
class SearchWithPlugins(Search):
- """Similar to the Search class but call the plugins."""
+ """Inherit from the Search class, add calls to the plugins."""
__slots__ = 'ordered_plugin_list', 'request'
- def __init__(self, search_query, ordered_plugin_list, request):
+ def __init__(self, search_query: SearchQuery, ordered_plugin_list, request: "flask.Request"):
super().__init__(search_query)
self.ordered_plugin_list = ordered_plugin_list
- self.request = request
-
- def search(self):
+ self.result_container.on_result = self._on_result
+ # pylint: disable=line-too-long
+ # get the "real" request to use it outside the Flask context.
+ # see
+ # * https://github.com/pallets/flask/blob/d01d26e5210e3ee4cbbdef12f05c886e08e92852/src/flask/globals.py#L55
+ # * https://github.com/pallets/werkzeug/blob/3c5d3c9bd0d9ce64590f0af8997a38f3823b368d/src/werkzeug/local.py#L548-L559
+ # * https://werkzeug.palletsprojects.com/en/2.0.x/local/#werkzeug.local.LocalProxy._get_current_object
+ # pylint: enable=line-too-long
+ self.request = request._get_current_object()
+
+ def _on_result(self, result):
+ return plugins.call(self.ordered_plugin_list, 'on_result', self.request, self, result)
+
+ def search(self) -> ResultContainer:
if plugins.call(self.ordered_plugin_list, 'pre_search', self.request, self):
super().search()
plugins.call(self.ordered_plugin_list, 'post_search', self.request, self)
- results = self.result_container.get_ordered_results()
-
- for result in results:
- plugins.call(self.ordered_plugin_list, 'on_result', self.request, self, result)
+ self.result_container.close()
return self.result_container
diff --git a/searx/search/models.py b/searx/search/models.py
index 7233fac42..e48cb3611 100644
--- a/searx/search/models.py
+++ b/searx/search/models.py
@@ -4,6 +4,7 @@ import typing
class EngineRef:
+ """Reference by names to an engine and category"""
__slots__ = 'name', 'category'
diff --git a/searx/settings.yml b/searx/settings.yml
index e5eb9dd65..8e2aeb5e9 100644
--- a/searx/settings.yml
+++ b/searx/settings.yml
@@ -150,7 +150,17 @@ outgoing:
#
# enabled_plugins:
# - "HTTPS rewrite"
-# - ...
+# - "Hostname replace" # see configuration below
+
+# "Hostname replace" plugin configuration example:
+# hostname_replace:
+# '(.*\.)?youtube\.com$': 'invidious.example.com'
+# '(.*\.)?youtu\.be$': 'invidious.example.com'
+# '(.*\.)?youtube-noocookie\.com$': 'yotter.example.com'
+# '(.*\.)?reddit\.com$': 'teddit.example.com'
+# '(.*\.)?redd\.it$': 'teddit.example.com'
+# '(www\.)?twitter\.com$': 'nitter.example.com'
+# 'spam\.example\.com': false # remove results from spam.example.com
checker:
# disable checker when in debug mode
diff --git a/searx/templates/__common__/about.html b/searx/templates/__common__/about.html
index 707802d45..5a9065f03 100644
--- a/searx/templates/__common__/about.html
+++ b/searx/templates/__common__/about.html
@@ -12,7 +12,7 @@
<ul>
<li><a href="{{ searx_git_url }}">SearXNG sources</a></li>
- <li><a href="https://www.transifex.com/projects/p/searx/">transifex</a></li>
+ <li><a href="https://weblate.bubu1.eu/projects/searxng/">weblate</a></li>
</ul>
<hr />
diff --git a/searx/templates/oscar/base.html b/searx/templates/oscar/base.html
index 7cd38a25c..df06763fa 100644
--- a/searx/templates/oscar/base.html
+++ b/searx/templates/oscar/base.html
@@ -85,6 +85,7 @@
{{ _('Powered by') }} <a href="{{ get_setting('brand.docs_url') }}">SearXNG</a> - {{ searx_version }} - {{ _('a privacy-respecting, hackable metasearch engine') }}<br/>
<a href="{{ searx_git_url }}">{{ _('Source code') }}</a> |
<a href="{{ get_setting('brand.issue_url') }}">{{ _('Issue tracker') }}</a> |
+ <a href="{{ url_for('stats') }}">{{ _('Engine stats') }}</a> |
<a href="{{ get_setting('brand.public_instances') }}">{{ _('Public instances') }}</a>{% if get_setting('general.contact_url') %} |
<a href="{{ get_setting('general.contact_url') }}">{{ _('Contact instance maintainer') }}</a>{% endif %}
</small>
diff --git a/searx/templates/oscar/stats.html b/searx/templates/oscar/stats.html
index 4be8043ff..646cb9923 100644
--- a/searx/templates/oscar/stats.html
+++ b/searx/templates/oscar/stats.html
@@ -15,7 +15,7 @@
{% block content %}
<div class="container-fluid">
- <h1>{{ _('Engine stats') }}{% if selected_engine_name %} - {{ selected_engine_name }}{% endif %}</h1>
+ <h1>{% if selected_engine_name %}<a href="{{ url_for('stats') }}">{% endif %}{{ _('Engine stats') }}{% if selected_engine_name %}</a> - {{ selected_engine_name }}{% endif %}</h1>
<div class="row">
<div class="col-xs-12 col-sm-12 col-md-12">
<div class="table-responsive">
diff --git a/searx/templates/simple/base.html b/searx/templates/simple/base.html
index 7020de756..27c4b5a4c 100644
--- a/searx/templates/simple/base.html
+++ b/searx/templates/simple/base.html
@@ -53,6 +53,7 @@
{{ _('Powered by') }} <a href="{{ url_for('about') }}">searxng</a> - {{ searx_version }} — {{ _('a privacy-respecting, hackable metasearch engine') }}<br/>
<a href="{{ searx_git_url }}">{{ _('Source code') }}</a> |
<a href="{{ get_setting('brand.issue_url') }}">{{ _('Issue tracker') }}</a> |
+ <a href="{{ url_for('stats') }}">{{ _('Engine stats') }}</a> |
<a href="{{ get_setting('brand.public_instances') }}">{{ _('Public instances') }}</a>{% if get_setting('general.contact_url') %} |
<a href="{{ get_setting('general.contact_url') }}">{{ _('Contact instance maintainer') }}</a>{% endif %}
</p>
diff --git a/searx/templates/simple/stats.html b/searx/templates/simple/stats.html
index f423b6861..889b5d05a 100644
--- a/searx/templates/simple/stats.html
+++ b/searx/templates/simple/stats.html
@@ -18,7 +18,7 @@
<a href="{{ url_for('index') }}"><h1><span>searx</span></h1></a>
-<h2>{{ _('Engine stats') }}{% if selected_engine_name %} - {{ selected_engine_name }}{% endif %}</h2>
+<h2>{% if selected_engine_name %}<a href="{{ url_for('stats') }}">{% endif %}{{ _('Engine stats') }}{% if selected_engine_name %}</a> - {{ selected_engine_name }}{% endif %}</h2>
{% if not engine_stats.get('time') %}
{{ _('There is currently no data available. ') }}
diff --git a/searx/webapp.py b/searx/webapp.py
index 6fcf7c464..cffde08a3 100755
--- a/searx/webapp.py
+++ b/searx/webapp.py
@@ -1040,9 +1040,7 @@ def preferences():
themes = themes,
plugins = plugins,
doi_resolvers = settings['doi_resolvers'],
- current_doi_resolver = get_doi_resolver(
- request.args, request.preferences.get_value('doi_resolver')
- ),
+ current_doi_resolver = get_doi_resolver(request.preferences),
allowed_plugins = allowed_plugins,
theme = get_current_theme_name(),
preferences_url_params = request.preferences.get_as_url_params(),