summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.codecov.yml31
-rw-r--r--.landscape.yaml3
-rwxr-xr-xmanage17
-rw-r--r--searx/__init__.py1
-rw-r--r--searx/metrics/error_recorder.py4
-rw-r--r--searx/network/__init__.py6
-rw-r--r--searx/network/client.py8
-rw-r--r--searx/network/network.py7
-rw-r--r--searx/search/processors/__init__.py32
-rw-r--r--searx/search/processors/abstract.py41
-rw-r--r--searx/search/processors/offline.py14
-rw-r--r--searx/search/processors/online.py34
-rw-r--r--searx/search/processors/online_currency.py15
-rw-r--r--searx/search/processors/online_dictionary.py7
-rwxr-xr-xsearx/webapp.py76
-rw-r--r--tox.ini2
16 files changed, 162 insertions, 136 deletions
diff --git a/.codecov.yml b/.codecov.yml
deleted file mode 100644
index 6cdbc2885..000000000
--- a/.codecov.yml
+++ /dev/null
@@ -1,31 +0,0 @@
-comment: false
-coverage:
- status:
- project:
- default:
- # basic
- target: auto
- threshold: null
- base: auto
- # advanced
- branches: null
- if_no_uploads: error
- if_not_found: success
- if_ci_failed: error
- only_pulls: false
- flags: null
- paths: null
- patch:
- default:
- # basic
- target: auto
- threshold: null
- base: auto
- # advanced
- branches: null
- if_no_uploads: error
- if_not_found: success
- if_ci_failed: error
- only_pulls: false
- flags: null
- paths: null
diff --git a/.landscape.yaml b/.landscape.yaml
deleted file mode 100644
index 1bb397718..000000000
--- a/.landscape.yaml
+++ /dev/null
@@ -1,3 +0,0 @@
-strictness: high
-ignore-paths:
- - bootstrap.py
diff --git a/manage b/manage
index f42075b28..3ff1a6f88 100755
--- a/manage
+++ b/manage
@@ -343,14 +343,17 @@ pyenv.install() {
if pyenv.install.OK > /dev/null; then
return 0
fi
- pyenv
- pyenv.OK || die 42 "error while build pyenv (${PY_ENV_BIN})"
( set -e
- build_msg PYENV "[install] pip install -e 'searx${PY_SETUP_EXTRAS}'"
- "${PY_ENV_BIN}/python" -m pip install -e ".${PY_SETUP_EXTRAS}"
- buildenv
- ) || die 42 "error while pip install (${PY_ENV_BIN})"
+ pyenv
+ build_msg PYENV "[install] pip install -e 'searx${PY_SETUP_EXTRAS}'"
+ "${PY_ENV_BIN}/python" -m pip install -e ".${PY_SETUP_EXTRAS}"
+ buildenv
+ )
+ local exit_val=$?
+ if [ ! $exit_val -eq 0 ]; then
+ die 42 "error while pip install (${PY_ENV_BIN})"
+ fi
}
pyenv.uninstall() {
@@ -462,7 +465,7 @@ themes.simple() {
PYLINT_FILES=()
while IFS= read -r line; do
PYLINT_FILES+=("$line")
-done <<< $(pylint.FILES)
+done <<< "$(pylint.FILES)"
# shellcheck disable=SC2119
main() {
diff --git a/searx/__init__.py b/searx/__init__.py
index 71e00a49a..6aac98713 100644
--- a/searx/__init__.py
+++ b/searx/__init__.py
@@ -22,6 +22,7 @@ from os.path import realpath, dirname, join, abspath, isfile
searx_dir = abspath(dirname(__file__))
+searx_parent_dir = abspath(dirname(dirname(__file__)))
engine_dir = dirname(realpath(__file__))
static_path = abspath(join(dirname(__file__), 'static'))
settings, settings_load_message = searx.settings_loader.load_settings()
diff --git a/searx/metrics/error_recorder.py b/searx/metrics/error_recorder.py
index 2bf25fb0d..c5de008cc 100644
--- a/searx/metrics/error_recorder.py
+++ b/searx/metrics/error_recorder.py
@@ -5,7 +5,7 @@ from urllib.parse import urlparse
from httpx import HTTPError, HTTPStatusError
from searx.exceptions import (SearxXPathSyntaxException, SearxEngineXPathException, SearxEngineAPIException,
SearxEngineAccessDeniedException)
-from searx import logger
+from searx import logger, searx_parent_dir
errors_per_engines = {}
@@ -117,6 +117,8 @@ def get_exception_classname(exc: Exception) -> str:
def get_error_context(framerecords, exception_classname, log_message, log_parameters, secondary) -> ErrorContext:
searx_frame = get_trace(framerecords)
filename = searx_frame.filename
+ if filename.startswith(searx_parent_dir):
+ filename = filename[len(searx_parent_dir) + 1:]
function = searx_frame.function
line_no = searx_frame.lineno
code = searx_frame.code_context[0].strip()
diff --git a/searx/network/__init__.py b/searx/network/__init__.py
index 40665f7d6..981b2261a 100644
--- a/searx/network/__init__.py
+++ b/searx/network/__init__.py
@@ -9,7 +9,7 @@ import httpx
import h2.exceptions
from .network import get_network, initialize
-from .client import LOOP
+from .client import get_loop
from .raise_for_httperror import raise_for_httperror
# queue.SimpleQueue: Support Python 3.6
@@ -98,7 +98,7 @@ def request(method, url, **kwargs):
network = get_context_network()
# do request
- future = asyncio.run_coroutine_threadsafe(network.request(method, url, **kwargs), LOOP)
+ future = asyncio.run_coroutine_threadsafe(network.request(method, url, **kwargs), get_loop())
try:
response = future.result(timeout)
except concurrent.futures.TimeoutError as e:
@@ -179,7 +179,7 @@ def stream(method, url, **kwargs):
"""
q = SimpleQueue()
future = asyncio.run_coroutine_threadsafe(stream_chunk_to_queue(get_network(), q, method, url, **kwargs),
- LOOP)
+ get_loop())
chunk_or_exception = q.get()
while chunk_or_exception is not None:
if isinstance(chunk_or_exception, Exception):
diff --git a/searx/network/client.py b/searx/network/client.py
index 631e36f8f..b377e010f 100644
--- a/searx/network/client.py
+++ b/searx/network/client.py
@@ -120,7 +120,6 @@ class AsyncHTTPTransportFixed(httpx.AsyncHTTPTransport):
def get_transport_for_socks_proxy(verify, http2, local_address, proxy_url, limit, retries):
- global LOOP, TRANSPORT_KWARGS
# support socks5h (requests compatibility):
# https://requests.readthedocs.io/en/master/user/advanced/#socks
# socks5:// hostname is resolved on client side
@@ -136,7 +135,7 @@ def get_transport_for_socks_proxy(verify, http2, local_address, proxy_url, limit
return AsyncProxyTransportFixed(proxy_type=proxy_type, proxy_host=proxy_host, proxy_port=proxy_port,
username=proxy_username, password=proxy_password,
rdns=rdns,
- loop=LOOP,
+ loop=get_loop(),
verify=verify,
http2=http2,
local_address=local_address,
@@ -192,6 +191,11 @@ def new_client(enable_http, verify, enable_http2,
return httpx.AsyncClient(transport=transport, mounts=mounts, max_redirects=max_redirects)
+def get_loop():
+ global LOOP
+ return LOOP
+
+
def init():
# log
for logger_name in ('hpack.hpack', 'hpack.table'):
diff --git a/searx/network/network.py b/searx/network/network.py
index 15c23d193..bb822a7d3 100644
--- a/searx/network/network.py
+++ b/searx/network/network.py
@@ -7,7 +7,7 @@ from itertools import cycle
import httpx
-from .client import new_client, LOOP
+from .client import new_client, get_loop
DEFAULT_NAME = '__DEFAULT__'
@@ -291,8 +291,9 @@ def done():
So Network.aclose is called here using atexit.register
"""
try:
- if LOOP:
- future = asyncio.run_coroutine_threadsafe(Network.aclose_all(), LOOP)
+ loop = get_loop()
+ if loop:
+ future = asyncio.run_coroutine_threadsafe(Network.aclose_all(), loop)
# wait 3 seconds to close the HTTP clients
future.result(3)
finally:
diff --git a/searx/search/processors/__init__.py b/searx/search/processors/__init__.py
index 4cae3cd0f..caac74e65 100644
--- a/searx/search/processors/__init__.py
+++ b/searx/search/processors/__init__.py
@@ -1,37 +1,49 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
+# lint: pylint
+
+"""Implement request processores used by engine-types.
+
+"""
+
+__all__ = [
+ 'EngineProcessor',
+ 'OfflineProcessor',
+ 'OnlineProcessor',
+ 'OnlineDictionaryProcessor',
+ 'OnlineCurrencyProcessor',
+ 'processors',
+]
+
+from searx import logger
+import searx.engines as engines
from .online import OnlineProcessor
from .offline import OfflineProcessor
from .online_dictionary import OnlineDictionaryProcessor
from .online_currency import OnlineCurrencyProcessor
from .abstract import EngineProcessor
-from searx import logger
-import searx.engines as engines
-
-__all__ = ['EngineProcessor', 'OfflineProcessor', 'OnlineProcessor',
- 'OnlineDictionaryProcessor', 'OnlineCurrencyProcessor', 'processors']
logger = logger.getChild('search.processors')
processors = {}
-
+"""Cache request processores, stored by *engine-name* (:py:func:`initialize`)"""
def get_processor_class(engine_type):
+ """Return processor class according to the ``engine_type``"""
for c in [OnlineProcessor, OfflineProcessor, OnlineDictionaryProcessor, OnlineCurrencyProcessor]:
if c.engine_type == engine_type:
return c
return None
-
def get_processor(engine, engine_name):
+ """Return processor instance that fits to ``engine.engine.type``)"""
engine_type = getattr(engine, 'engine_type', 'online')
processor_class = get_processor_class(engine_type)
if processor_class:
return processor_class(engine, engine_name)
- else:
- return None
-
+ return None
def initialize(engine_list):
+ """Initialize all engines and store a processor for each engine in :py:obj:`processors`."""
engines.initialize_engines(engine_list)
for engine_name, engine in engines.engines.items():
processor = get_processor(engine, engine_name)
diff --git a/searx/search/processors/abstract.py b/searx/search/processors/abstract.py
index 854f6df6a..38811d87c 100644
--- a/searx/search/processors/abstract.py
+++ b/searx/search/processors/abstract.py
@@ -1,4 +1,9 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
+# lint: pylint
+
+"""Abstract base classes for engine request processores.
+
+"""
import threading
from abc import abstractmethod, ABC
@@ -10,12 +15,13 @@ from searx.network import get_time_for_thread, get_network
from searx.metrics import histogram_observe, counter_inc, count_exception, count_error
from searx.exceptions import SearxEngineAccessDeniedException
-
logger = logger.getChild('searx.search.processor')
SUSPENDED_STATUS = {}
+# pylint: disable=missing-function-docstring
class SuspendedStatus:
+ """Class to handle suspend state."""
__slots__ = 'suspend_end_time', 'suspend_reason', 'continuous_errors', 'lock'
@@ -49,6 +55,7 @@ class SuspendedStatus:
class EngineProcessor(ABC):
+ """Base classes used for all types of reqest processores."""
__slots__ = 'engine', 'engine_name', 'lock', 'suspended_status'
@@ -59,22 +66,28 @@ class EngineProcessor(ABC):
key = id(key) if key else self.engine_name
self.suspended_status = SUSPENDED_STATUS.setdefault(key, SuspendedStatus())
- def handle_exception(self, result_container, reason, exception, suspend=False, display_exception=True):
+ def handle_exception(self, result_container, exception_or_message, suspend=False):
# update result_container
- error_message = str(exception) if display_exception and exception else None
- result_container.add_unresponsive_engine(self.engine_name, reason, error_message)
+ if isinstance(exception_or_message, BaseException):
+ exception_class = exception_or_message.__class__
+ module_name = getattr(exception_class, '__module__', 'builtins')
+ module_name = '' if module_name == 'builtins' else module_name + '.'
+ error_message = module_name + exception_class.__qualname__
+ else:
+ error_message = exception_or_message
+ result_container.add_unresponsive_engine(self.engine_name, error_message)
# metrics
counter_inc('engine', self.engine_name, 'search', 'count', 'error')
- if exception:
- count_exception(self.engine_name, exception)
+ if isinstance(exception_or_message, BaseException):
+ count_exception(self.engine_name, exception_or_message)
else:
- count_error(self.engine_name, reason)
+ count_error(self.engine_name, exception_or_message)
# suspend the engine ?
if suspend:
suspended_time = None
- if isinstance(exception, SearxEngineAccessDeniedException):
- suspended_time = exception.suspended_time
- self.suspended_status.suspend(suspended_time, reason) # pylint: disable=no-member
+ if isinstance(exception_or_message, SearxEngineAccessDeniedException):
+ suspended_time = exception_or_message.suspended_time
+ self.suspended_status.suspend(suspended_time, error_message) # pylint: disable=no-member
def _extend_container_basic(self, result_container, start_time, search_results):
# update result_container
@@ -91,7 +104,7 @@ class EngineProcessor(ABC):
def extend_container(self, result_container, start_time, search_results):
if getattr(threading.current_thread(), '_timeout', False):
# the main thread is not waiting anymore
- self.handle_exception(result_container, 'Timeout', None)
+ self.handle_exception(result_container, 'timeout', None)
else:
# check if the engine accepted the request
if search_results is not None:
@@ -137,9 +150,7 @@ class EngineProcessor(ABC):
if tests is None:
tests = getattr(self.engine, 'additional_tests', {})
tests.update(self.get_default_tests())
- return tests
- else:
- return tests
+ return tests
- def get_default_tests(self):
+ def get_default_tests(self): # pylint: disable=no-self-use
return {}
diff --git a/searx/search/processors/offline.py b/searx/search/processors/offline.py
index 5186b346a..f40626f39 100644
--- a/searx/search/processors/offline.py
+++ b/searx/search/processors/offline.py
@@ -1,13 +1,17 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
+# lint: pylint
-from searx import logger
-from searx.search.processors.abstract import EngineProcessor
+"""Processores for engine-type: ``offline``
+"""
-logger = logger.getChild('searx.search.processor.offline')
+from searx import logger
+from .abstract import EngineProcessor
+logger = logger.getChild('searx.search.processor.offline')
class OfflineProcessor(EngineProcessor):
+ """Processor class used by ``offline`` engines"""
engine_type = 'offline'
@@ -21,6 +25,6 @@ class OfflineProcessor(EngineProcessor):
except ValueError as e:
# do not record the error
logger.exception('engine {0} : invalid input : {1}'.format(self.engine_name, e))
- except Exception as e:
- self.handle_exception(result_container, 'unexpected crash', e)
+ except Exception as e: # pylint: disable=broad-except
+ self.handle_exception(result_container, e)
logger.exception('engine {0} : exception : {1}'.format(self.engine_name, e))
diff --git a/searx/search/processors/online.py b/searx/search/processors/online.py
index c39937023..93a9c6cbf 100644
--- a/searx/search/processors/online.py
+++ b/searx/search/processors/online.py
@@ -1,24 +1,29 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
+# lint: pylint
+
+"""Processores for engine-type: ``online``
+
+"""
from time import time
import asyncio
-
import httpx
import searx.network
from searx import logger
from searx.utils import gen_useragent
-from searx.exceptions import (SearxEngineAccessDeniedException, SearxEngineCaptchaException,
- SearxEngineTooManyRequestsException,)
+from searx.exceptions import (
+ SearxEngineAccessDeniedException,
+ SearxEngineCaptchaException,
+ SearxEngineTooManyRequestsException,
+)
from searx.metrics.error_recorder import count_error
-
-from searx.search.processors.abstract import EngineProcessor
-
+from .abstract import EngineProcessor
logger = logger.getChild('searx.search.processor.online')
-
def default_request_params():
+ """Default request parameters for ``online`` engines."""
return {
'method': 'GET',
'headers': {},
@@ -31,6 +36,7 @@ def default_request_params():
class OnlineProcessor(EngineProcessor):
+ """Processor class for ``online`` engines."""
engine_type = 'online'
@@ -130,7 +136,7 @@ class OnlineProcessor(EngineProcessor):
self.extend_container(result_container, start_time, search_results)
except (httpx.TimeoutException, asyncio.TimeoutError) as e:
# requests timeout (connect or read)
- self.handle_exception(result_container, 'HTTP timeout', e, suspend=True, display_exception=False)
+ self.handle_exception(result_container, e, suspend=True)
logger.error("engine {0} : HTTP requests timeout"
"(search duration : {1} s, timeout: {2} s) : {3}"
.format(self.engine_name, time() - start_time,
@@ -138,23 +144,23 @@ class OnlineProcessor(EngineProcessor):
e.__class__.__name__))
except (httpx.HTTPError, httpx.StreamError) as e:
# other requests exception
- self.handle_exception(result_container, 'HTTP error', e, suspend=True, display_exception=False)
+ self.handle_exception(result_container, e, suspend=True)
logger.exception("engine {0} : requests exception"
"(search duration : {1} s, timeout: {2} s) : {3}"
.format(self.engine_name, time() - start_time,
timeout_limit,
e))
except SearxEngineCaptchaException as e:
- self.handle_exception(result_container, 'CAPTCHA required', e, suspend=True, display_exception=False)
+ self.handle_exception(result_container, e, suspend=True)
logger.exception('engine {0} : CAPTCHA'.format(self.engine_name))
except SearxEngineTooManyRequestsException as e:
- self.handle_exception(result_container, 'too many requests', e, suspend=True, display_exception=False)
+ self.handle_exception(result_container, e, suspend=True)
logger.exception('engine {0} : Too many requests'.format(self.engine_name))
except SearxEngineAccessDeniedException as e:
- self.handle_exception(result_container, 'blocked', e, suspend=True, display_exception=False)
+ self.handle_exception(result_container, e, suspend=True)
logger.exception('engine {0} : Searx is blocked'.format(self.engine_name))
- except Exception as e:
- self.handle_exception(result_container, 'unexpected crash', e, display_exception=False)
+ except Exception as e: # pylint: disable=broad-except
+ self.handle_exception(result_container, e)
logger.exception('engine {0} : exception : {1}'.format(self.engine_name, e))
def get_default_tests(self):
diff --git a/searx/search/processors/online_currency.py b/searx/search/processors/online_currency.py
index 0dc3f3b6a..4f642fa72 100644
--- a/searx/search/processors/online_currency.py
+++ b/searx/search/processors/online_currency.py
@@ -1,4 +1,8 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
+# lint: pylint
+"""Processores for engine-type: ``online_currency``
+
+"""
import unicodedata
import re
@@ -6,32 +10,31 @@ import re
from searx.data import CURRENCIES
from .online import OnlineProcessor
-
parser_re = re.compile('.*?(\\d+(?:\\.\\d+)?) ([^.0-9]+) (?:in|to) ([^.0-9]+)', re.I)
+# pylint: disable=missing-function-docstring
def normalize_name(name):
name = name.lower().replace('-', ' ').rstrip('s')
name = re.sub(' +', ' ', name)
return unicodedata.normalize('NFKD', name).lower()
-
def name_to_iso4217(name):
- global CURRENCIES
+ global CURRENCIES # pylint: disable=global-statement
name = normalize_name(name)
currency = CURRENCIES['names'].get(name, [name])
if isinstance(currency, str):
return currency
return currency[0]
-
def iso4217_to_name(iso4217, language):
- global CURRENCIES
+ global CURRENCIES # pylint: disable=global-statement
return CURRENCIES['iso4217'].get(iso4217, {}).get(language, iso4217)
-
class OnlineCurrencyProcessor(OnlineProcessor):
+ """Processor class used by ``online_currency`` engines."""
+
engine_type = 'online_currency'
def get_params(self, search_query, engine_category):
diff --git a/searx/search/processors/online_dictionary.py b/searx/search/processors/online_dictionary.py
index 987c710a1..11ca0335d 100644
--- a/searx/search/processors/online_dictionary.py
+++ b/searx/search/processors/online_dictionary.py
@@ -1,15 +1,18 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
+# lint: pylint
+"""Processores for engine-type: ``online_dictionary``
+
+"""
import re
from searx.utils import is_valid_lang
from .online import OnlineProcessor
-
parser_re = re.compile('.*?([a-z]+)-([a-z]+) ([^ ]+)$', re.I)
-
class OnlineDictionaryProcessor(OnlineProcessor):
+ """Processor class used by ``online_dictionnary`` engines."""
engine_type = 'online_dictionnary'
diff --git a/searx/webapp.py b/searx/webapp.py
index 70d2d662b..b8bc60ec5 100755
--- a/searx/webapp.py
+++ b/searx/webapp.py
@@ -172,28 +172,34 @@ _category_names = (gettext('files'),
gettext('science'))
#
-exception_classname_to_label = {
- "searx.exceptions.SearxEngineCaptchaException": gettext("CAPTCHA"),
- "searx.exceptions.SearxEngineTooManyRequestsException": gettext("too many requests"),
- "searx.exceptions.SearxEngineAccessDeniedException": gettext("access denied"),
- "searx.exceptions.SearxEngineAPIException": gettext("server API error"),
- "httpx.TimeoutException": gettext("HTTP timeout"),
- "httpx.ConnectTimeout": gettext("HTTP timeout"),
- "httpx.ReadTimeout": gettext("HTTP timeout"),
- "httpx.WriteTimeout": gettext("HTTP timeout"),
- "httpx.HTTPStatusError": gettext("HTTP error"),
- "httpx.ConnectError": gettext("HTTP connection error"),
- "httpx.RemoteProtocolError": gettext("HTTP protocol error"),
- "httpx.LocalProtocolError": gettext("HTTP protocol error"),
- "httpx.ProtocolError": gettext("HTTP protocol error"),
- "httpx.ReadError": gettext("network error"),
- "httpx.WriteError": gettext("network error"),
- "httpx.ProxyError": gettext("proxy error"),
- "searx.exceptions.SearxEngineXPathException": gettext("parsing error"),
- "KeyError": gettext("parsing error"),
- "json.decoder.JSONDecodeError": gettext("parsing error"),
- "lxml.etree.ParserError": gettext("parsing error"),
- None: gettext("unexpected crash"),
+timeout_text = gettext('timeout')
+parsing_error_text = gettext('parsing error')
+http_protocol_error_text = gettext('HTTP protocol error')
+network_error_text = gettext('network error')
+exception_classname_to_text = {
+ None: gettext('unexpected crash'),
+ 'timeout': timeout_text,
+ 'asyncio.TimeoutError': timeout_text,
+ 'httpx.TimeoutException': timeout_text,
+ 'httpx.ConnectTimeout': timeout_text,
+ 'httpx.ReadTimeout': timeout_text,
+ 'httpx.WriteTimeout': timeout_text,
+ 'httpx.HTTPStatusError': gettext('HTTP error'),
+ 'httpx.ConnectError': gettext("HTTP connection error"),
+ 'httpx.RemoteProtocolError': http_protocol_error_text,
+ 'httpx.LocalProtocolError': http_protocol_error_text,
+ 'httpx.ProtocolError': http_protocol_error_text,
+ 'httpx.ReadError': network_error_text,
+ 'httpx.WriteError': network_error_text,
+ 'httpx.ProxyError': gettext("proxy error"),
+ 'searx.exceptions.SearxEngineCaptchaException': gettext("CAPTCHA"),
+ 'searx.exceptions.SearxEngineTooManyRequestsException': gettext("too many requests"),
+ 'searx.exceptions.SearxEngineAccessDeniedException': gettext("access denied"),
+ 'searx.exceptions.SearxEngineAPIException': gettext("server API error"),
+ 'searx.exceptions.SearxEngineXPathException': parsing_error_text,
+ 'KeyError': parsing_error_text,
+ 'json.decoder.JSONDecodeError': parsing_error_text,
+ 'lxml.etree.ParserError': parsing_error_text,
}
_flask_babel_get_translations = flask_babel.get_translations
@@ -786,15 +792,21 @@ def search():
def __get_translated_errors(unresponsive_engines):
- translated_errors = set()
- for unresponsive_engine in unresponsive_engines:
- error_msg = gettext(unresponsive_engine[1])
+ translated_errors = []
+ # make a copy unresponsive_engines to avoid "RuntimeError: Set changed size during iteration"
+ # it happens when an engine modifies the ResultContainer after the search_multiple_requests method
+ # has stopped waiting
+ for unresponsive_engine in list(unresponsive_engines):
+ error_user_text = exception_classname_to_text.get(unresponsive_engine[1])
+ if not error_user_text:
+ error_user_text = exception_classname_to_text[None]
+ error_msg = gettext(error_user_text)
if unresponsive_engine[2]:
error_msg = "{} {}".format(error_msg, unresponsive_engine[2])
if unresponsive_engine[3]:
error_msg = gettext('Suspended') + ': ' + error_msg
- translated_errors.add((unresponsive_engine[0], error_msg))
- return translated_errors
+ translated_errors.append((unresponsive_engine[0], error_msg))
+ return sorted(translated_errors, key=lambda e: e[0])
@app.route('/about', methods=['GET'])
@@ -944,14 +956,14 @@ def preferences():
# the first element has the highest percentage rate.
reliabilities_errors = []
for error in errors:
- error_user_message = None
+ error_user_text = None
if error.get('secondary') or 'exception_classname' not in error:
continue
- error_user_message = exception_classname_to_label.get(error.get('exception_classname'))
+ error_user_text = exception_classname_to_text.get(error.get('exception_classname'))
if not error:
- error_user_message = exception_classname_to_label[None]
- if error_user_message not in reliabilities_errors:
- reliabilities_errors.append(error_user_message)
+ error_user_text = exception_classname_to_text[None]
+ if error_user_text not in reliabilities_errors:
+ reliabilities_errors.append(error_user_text)
reliabilities[e.name]['errors'] = reliabilities_errors
# supports
diff --git a/tox.ini b/tox.ini
deleted file mode 100644
index 6deafc261..000000000
--- a/tox.ini
+++ /dev/null
@@ -1,2 +0,0 @@
-[flake8]
-max-line-length = 120