summaryrefslogtreecommitdiff
path: root/searx/search
diff options
context:
space:
mode:
authorAlexandre Flament <alex@al-f.net>2020-12-17 11:49:43 +0100
committerGitHub <noreply@github.com>2020-12-17 11:49:43 +0100
commit9b27935f71ea94ba034d73c09c1f18df05fd33b6 (patch)
treec1e7b116220d72f9e541ff4e7e5b108dd8a2f2aa /searx/search
parent13a2b1a44d0e216d3750519239fab2c0abb142e4 (diff)
parent02fc4147ce745325ff25146a8085a915a5d3cacd (diff)
Merge pull request #2225 from dalf/processors
Processors
Diffstat (limited to 'searx/search')
-rw-r--r--searx/search/__init__.py265
-rw-r--r--searx/search/processors/__init__.py41
-rw-r--r--searx/search/processors/abstract.py39
-rw-r--r--searx/search/processors/offline.py51
-rw-r--r--searx/search/processors/online.py211
-rw-r--r--searx/search/processors/online_currency.py57
-rw-r--r--searx/search/processors/online_dictionary.py37
7 files changed, 701 insertions, 0 deletions
diff --git a/searx/search/__init__.py b/searx/search/__init__.py
new file mode 100644
index 000000000..4511463f3
--- /dev/null
+++ b/searx/search/__init__.py
@@ -0,0 +1,265 @@
+'''
+searx is free software: you can redistribute it and/or modify
+it under the terms of the GNU Affero General Public License as published by
+the Free Software Foundation, either version 3 of the License, or
+(at your option) any later version.
+
+searx is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+GNU Affero General Public License for more details.
+
+You should have received a copy of the GNU Affero General Public License
+along with searx. If not, see < http://www.gnu.org/licenses/ >.
+
+(C) 2013- by Adam Tauber, <asciimoo@gmail.com>
+'''
+
+import typing
+import gc
+import threading
+from time import time
+from uuid import uuid4
+from _thread import start_new_thread
+
+from searx import settings
+from searx.answerers import ask
+from searx.external_bang import get_bang_url
+from searx.results import ResultContainer
+from searx import logger
+from searx.plugins import plugins
+from searx.search.processors import processors, initialize as initialize_processors
+
+
+logger = logger.getChild('search')
+
+max_request_timeout = settings.get('outgoing', {}).get('max_request_timeout' or None)
+if max_request_timeout is None:
+ logger.info('max_request_timeout={0}'.format(max_request_timeout))
+else:
+ if isinstance(max_request_timeout, float):
+ logger.info('max_request_timeout={0} second(s)'.format(max_request_timeout))
+ else:
+ logger.critical('outgoing.max_request_timeout if defined has to be float')
+ import sys
+ sys.exit(1)
+
+
+def initialize(settings_engines=None):
+ settings_engines = settings_engines or settings['engines']
+ initialize_processors(settings_engines)
+
+
+class EngineRef:
+
+ __slots__ = 'name', 'category', 'from_bang'
+
+ def __init__(self, name: str, category: str, from_bang: bool=False):
+ self.name = name
+ self.category = category
+ self.from_bang = from_bang
+
+ def __repr__(self):
+ return "EngineRef({!r}, {!r}, {!r})".format(self.name, self.category, self.from_bang)
+
+ def __eq__(self, other):
+ return self.name == other.name and self.category == other.category and self.from_bang == other.from_bang
+
+
+class SearchQuery:
+ """container for all the search parameters (query, language, etc...)"""
+
+ __slots__ = 'query', 'engineref_list', 'categories', 'lang', 'safesearch', 'pageno', 'time_range',\
+ 'timeout_limit', 'external_bang'
+
+ def __init__(self,
+ query: str,
+ engineref_list: typing.List[EngineRef],
+ categories: typing.List[str],
+ lang: str,
+ safesearch: int,
+ pageno: int,
+ time_range: typing.Optional[str],
+ timeout_limit: typing.Optional[float]=None,
+ external_bang: typing.Optional[str]=None):
+ self.query = query
+ self.engineref_list = engineref_list
+ self.categories = categories
+ self.lang = lang
+ self.safesearch = safesearch
+ self.pageno = pageno
+ self.time_range = time_range
+ self.timeout_limit = timeout_limit
+ self.external_bang = external_bang
+
+ def __repr__(self):
+ return "SearchQuery({!r}, {!r}, {!r}, {!r}, {!r}, {!r}, {!r}, {!r}, {!r})".\
+ format(self.query, self.engineref_list, self.categories, self.lang, self.safesearch,
+ self.pageno, self.time_range, self.timeout_limit, self.external_bang)
+
+ def __eq__(self, other):
+ return self.query == other.query\
+ and self.engineref_list == other.engineref_list\
+ and self.categories == self.categories\
+ and self.lang == other.lang\
+ and self.safesearch == other.safesearch\
+ and self.pageno == other.pageno\
+ and self.time_range == other.time_range\
+ and self.timeout_limit == other.timeout_limit\
+ and self.external_bang == other.external_bang
+
+
+class Search:
+ """Search information container"""
+
+ __slots__ = "search_query", "result_container", "start_time", "actual_timeout"
+
+ def __init__(self, search_query):
+ # init vars
+ super().__init__()
+ self.search_query = search_query
+ self.result_container = ResultContainer()
+ self.start_time = None
+ self.actual_timeout = None
+
+ def search_external_bang(self):
+ """
+ Check if there is a external bang.
+ If yes, update self.result_container and return True
+ """
+ if self.search_query.external_bang:
+ self.result_container.redirect_url = get_bang_url(self.search_query)
+
+ # This means there was a valid bang and the
+ # rest of the search does not need to be continued
+ if isinstance(self.result_container.redirect_url, str):
+ return True
+ return False
+
+ def search_answerers(self):
+ """
+ Check if an answer return a result.
+ If yes, update self.result_container and return True
+ """
+ answerers_results = ask(self.search_query)
+
+ if answerers_results:
+ for results in answerers_results:
+ self.result_container.extend('answer', results)
+ return True
+ return False
+
+ # do search-request
+ def _get_requests(self):
+ # init vars
+ requests = []
+
+ # max of all selected engine timeout
+ default_timeout = 0
+
+ # start search-reqest for all selected engines
+ for engineref in self.search_query.engineref_list:
+ processor = processors[engineref.name]
+
+ # set default request parameters
+ request_params = processor.get_params(self.search_query, engineref.category)
+ if request_params is None:
+ continue
+
+ # append request to list
+ requests.append((engineref.name, self.search_query.query, request_params))
+
+ # update default_timeout
+ default_timeout = max(default_timeout, processor.engine.timeout)
+
+ # adjust timeout
+ actual_timeout = default_timeout
+ query_timeout = self.search_query.timeout_limit
+
+ if max_request_timeout is None and query_timeout is None:
+ # No max, no user query: default_timeout
+ pass
+ elif max_request_timeout is None and query_timeout is not None:
+ # No max, but user query: From user query except if above default
+ actual_timeout = min(default_timeout, query_timeout)
+ elif max_request_timeout is not None and query_timeout is None:
+ # Max, no user query: Default except if above max
+ actual_timeout = min(default_timeout, max_request_timeout)
+ elif max_request_timeout is not None and query_timeout is not None:
+ # Max & user query: From user query except if above max
+ actual_timeout = min(query_timeout, max_request_timeout)
+
+ logger.debug("actual_timeout={0} (default_timeout={1}, ?timeout_limit={2}, max_request_timeout={3})"
+ .format(actual_timeout, default_timeout, query_timeout, max_request_timeout))
+
+ return requests, actual_timeout
+
+ def search_multiple_requests(self, requests):
+ search_id = uuid4().__str__()
+
+ for engine_name, query, request_params in requests:
+ th = threading.Thread(
+ target=processors[engine_name].search,
+ args=(query, request_params, self.result_container, self.start_time, self.actual_timeout),
+ name=search_id,
+ )
+ th._timeout = False
+ th._engine_name = engine_name
+ th.start()
+
+ for th in threading.enumerate():
+ if th.name == search_id:
+ remaining_time = max(0.0, self.actual_timeout - (time() - self.start_time))
+ th.join(remaining_time)
+ if th.is_alive():
+ th._timeout = True
+ self.result_container.add_unresponsive_engine(th._engine_name, 'timeout')
+ logger.warning('engine timeout: {0}'.format(th._engine_name))
+
+ def search_standard(self):
+ """
+ Update self.result_container, self.actual_timeout
+ """
+ requests, self.actual_timeout = self._get_requests()
+
+ # send all search-request
+ if requests:
+ self.search_multiple_requests(requests)
+ start_new_thread(gc.collect, tuple())
+
+ # return results, suggestions, answers and infoboxes
+ return True
+
+ # do search-request
+ def search(self):
+ self.start_time = time()
+
+ if not self.search_external_bang():
+ if not self.search_answerers():
+ self.search_standard()
+
+ return self.result_container
+
+
+class SearchWithPlugins(Search):
+ """Similar to the Search class but call the plugins."""
+
+ __slots__ = 'ordered_plugin_list', 'request'
+
+ def __init__(self, search_query, ordered_plugin_list, request):
+ super().__init__(search_query)
+ self.ordered_plugin_list = ordered_plugin_list
+ self.request = request
+
+ def search(self):
+ if plugins.call(self.ordered_plugin_list, 'pre_search', self.request, self):
+ super().search()
+
+ plugins.call(self.ordered_plugin_list, 'post_search', self.request, self)
+
+ results = self.result_container.get_ordered_results()
+
+ for result in results:
+ plugins.call(self.ordered_plugin_list, 'on_result', self.request, self, result)
+
+ return self.result_container
diff --git a/searx/search/processors/__init__.py b/searx/search/processors/__init__.py
new file mode 100644
index 000000000..4cae3cd0f
--- /dev/null
+++ b/searx/search/processors/__init__.py
@@ -0,0 +1,41 @@
+# SPDX-License-Identifier: AGPL-3.0-or-later
+
+from .online import OnlineProcessor
+from .offline import OfflineProcessor
+from .online_dictionary import OnlineDictionaryProcessor
+from .online_currency import OnlineCurrencyProcessor
+from .abstract import EngineProcessor
+from searx import logger
+import searx.engines as engines
+
+
+__all__ = ['EngineProcessor', 'OfflineProcessor', 'OnlineProcessor',
+ 'OnlineDictionaryProcessor', 'OnlineCurrencyProcessor', 'processors']
+logger = logger.getChild('search.processors')
+processors = {}
+
+
+def get_processor_class(engine_type):
+ for c in [OnlineProcessor, OfflineProcessor, OnlineDictionaryProcessor, OnlineCurrencyProcessor]:
+ if c.engine_type == engine_type:
+ return c
+ return None
+
+
+def get_processor(engine, engine_name):
+ engine_type = getattr(engine, 'engine_type', 'online')
+ processor_class = get_processor_class(engine_type)
+ if processor_class:
+ return processor_class(engine, engine_name)
+ else:
+ return None
+
+
+def initialize(engine_list):
+ engines.initialize_engines(engine_list)
+ for engine_name, engine in engines.engines.items():
+ processor = get_processor(engine, engine_name)
+ if processor is None:
+ logger.error('Error get processor for engine %s', engine_name)
+ else:
+ processors[engine_name] = processor
diff --git a/searx/search/processors/abstract.py b/searx/search/processors/abstract.py
new file mode 100644
index 000000000..cf3fd7236
--- /dev/null
+++ b/searx/search/processors/abstract.py
@@ -0,0 +1,39 @@
+# SPDX-License-Identifier: AGPL-3.0-or-later
+
+from abc import abstractmethod
+from searx import logger
+
+
+logger = logger.getChild('searx.search.processor')
+
+
+class EngineProcessor:
+
+ def __init__(self, engine, engine_name):
+ self.engine = engine
+ self.engine_name = engine_name
+
+ def get_params(self, search_query, engine_category):
+ # if paging is not supported, skip
+ if search_query.pageno > 1 and not self.engine.paging:
+ return None
+
+ # if time_range is not supported, skip
+ if search_query.time_range and not self.engine.time_range_support:
+ return None
+
+ params = {}
+ params['category'] = engine_category
+ params['pageno'] = search_query.pageno
+ params['safesearch'] = search_query.safesearch
+ params['time_range'] = search_query.time_range
+
+ if hasattr(self.engine, 'language') and self.engine.language:
+ params['language'] = self.engine.language
+ else:
+ params['language'] = search_query.lang
+ return params
+
+ @abstractmethod
+ def search(self, query, params, result_container, start_time, timeout_limit):
+ pass
diff --git a/searx/search/processors/offline.py b/searx/search/processors/offline.py
new file mode 100644
index 000000000..ede8eb5e1
--- /dev/null
+++ b/searx/search/processors/offline.py
@@ -0,0 +1,51 @@
+# SPDX-License-Identifier: AGPL-3.0-or-later
+
+import threading
+from time import time
+from searx import logger
+from searx.metrology.error_recorder import record_exception, record_error
+from searx.search.processors.abstract import EngineProcessor
+
+
+logger = logger.getChild('search.processor.offline')
+
+
+class OfflineProcessor(EngineProcessor):
+
+ engine_type = 'offline'
+
+ def _record_stats_on_error(self, result_container, start_time):
+ engine_time = time() - start_time
+ result_container.add_timing(self.engine_name, engine_time, engine_time)
+
+ with threading.RLock():
+ self.engine.stats['errors'] += 1
+
+ def _search_basic(self, query, params):
+ return self.engine.search(query, params)
+
+ def search(self, query, params, result_container, start_time, timeout_limit):
+ try:
+ search_results = self._search_basic(query, params)
+
+ if search_results:
+ result_container.extend(self.engine_name, search_results)
+
+ engine_time = time() - start_time
+ result_container.add_timing(self.engine_name, engine_time, engine_time)
+ with threading.RLock():
+ self.engine.stats['engine_time'] += engine_time
+ self.engine.stats['engine_time_count'] += 1
+
+ except ValueError as e:
+ record_exception(self.engine_name, e)
+ self._record_stats_on_error(result_container, start_time)
+ logger.exception('engine {0} : invalid input : {1}'.format(self.engine_name, e))
+ except Exception as e:
+ record_exception(self.engine_name, e)
+ self._record_stats_on_error(result_container, start_time)
+ result_container.add_unresponsive_engine(self.engine_name, 'unexpected crash', str(e))
+ logger.exception('engine {0} : exception : {1}'.format(self.engine_name, e))
+ else:
+ if getattr(threading.current_thread(), '_timeout', False):
+ record_error(self.engine_name, 'Timeout')
diff --git a/searx/search/processors/online.py b/searx/search/processors/online.py
new file mode 100644
index 000000000..17bbfef0f
--- /dev/null
+++ b/searx/search/processors/online.py
@@ -0,0 +1,211 @@
+# SPDX-License-Identifier: AGPL-3.0-or-later
+
+from urllib.parse import urlparse
+from time import time
+import threading
+
+import requests.exceptions
+
+import searx.poolrequests as poolrequests
+from searx.engines import settings
+from searx import logger
+from searx.utils import gen_useragent
+from searx.exceptions import (SearxEngineAccessDeniedException, SearxEngineCaptchaException,
+ SearxEngineTooManyRequestsException,)
+from searx.metrology.error_recorder import record_exception, record_error
+
+from searx.search.processors.abstract import EngineProcessor
+
+
+logger = logger.getChild('search.processor.online')
+
+DEFAULT_PARAMS = {
+ 'method': 'GET',
+ 'headers': {},
+ 'data': {},
+ 'url': '',
+ 'cookies': {},
+ 'verify': True,
+ 'auth': None
+}
+
+
+class OnlineProcessor(EngineProcessor):
+
+ engine_type = 'online'
+
+ def get_params(self, search_query, engine_category):
+ params = super().get_params(search_query, engine_category)
+ if params is None:
+ return None
+
+ # skip suspended engines
+ if self.engine.suspend_end_time >= time():
+ logger.debug('Engine currently suspended: %s', self.engine_name)
+ return None
+
+ # add default params
+ params.update(DEFAULT_PARAMS)
+
+ # add an user agent
+ params['headers']['User-Agent'] = gen_useragent()
+
+ return params
+
+ def _send_http_request(self, params):
+ # create dictionary which contain all
+ # informations about the request
+ request_args = dict(
+ headers=params['headers'],
+ cookies=params['cookies'],
+ verify=params['verify'],
+ auth=params['auth']
+ )
+
+ # setting engine based proxies
+ if hasattr(self.engine, 'proxies'):
+ request_args['proxies'] = poolrequests.get_proxies(self.engine.proxies)
+
+ # max_redirects
+ max_redirects = params.get('max_redirects')
+ if max_redirects:
+ request_args['max_redirects'] = max_redirects
+
+ # soft_max_redirects
+ soft_max_redirects = params.get('soft_max_redirects', max_redirects or 0)
+
+ # raise_for_status
+ request_args['raise_for_httperror'] = params.get('raise_for_httperror', False)
+
+ # specific type of request (GET or POST)
+ if params['method'] == 'GET':
+ req = poolrequests.get
+ else:
+ req = poolrequests.post
+
+ request_args['data'] = params['data']
+
+ # send the request
+ response = req(params['url'], **request_args)
+
+ # check soft limit of the redirect count
+ if len(response.history) > soft_max_redirects:
+ # unexpected redirect : record an error
+ # but the engine might still return valid results.
+ status_code = str(response.status_code or '')
+ reason = response.reason or ''
+ hostname = str(urlparse(response.url or '').netloc)
+ record_error(self.engine_name,
+ '{} redirects, maximum: {}'.format(len(response.history), soft_max_redirects),
+ (status_code, reason, hostname))
+
+ return response
+
+ def _search_basic(self, query, params):
+ # update request parameters dependent on
+ # search-engine (contained in engines folder)
+ self.engine.request(query, params)
+
+ # ignoring empty urls
+ if params['url'] is None:
+ return None
+
+ if not params['url']:
+ return None
+
+ # send request
+ response = self._send_http_request(params)
+
+ # parse the response
+ response.search_params = params
+ return self.engine.response(response)
+
+ def search(self, query, params, result_container, start_time, timeout_limit):
+ # set timeout for all HTTP requests
+ poolrequests.set_timeout_for_thread(timeout_limit, start_time=start_time)
+ # reset the HTTP total time
+ poolrequests.reset_time_for_thread()
+
+ # suppose everything will be alright
+ requests_exception = False
+ suspended_time = None
+
+ try:
+ # send requests and parse the results
+ search_results = self._search_basic(query, params)
+
+ # check if the engine accepted the request
+ if search_results is not None:
+ # yes, so add results
+ result_container.extend(self.engine_name, search_results)
+
+ # update engine time when there is no exception
+ engine_time = time() - start_time
+ page_load_time = poolrequests.get_time_for_thread()
+ result_container.add_timing(self.engine_name, engine_time, page_load_time)
+ with threading.RLock():
+ self.engine.stats['engine_time'] += engine_time
+ self.engine.stats['engine_time_count'] += 1
+ # update stats with the total HTTP time
+ self.engine.stats['page_load_time'] += page_load_time
+ self.engine.stats['page_load_count'] += 1
+ except Exception as e:
+ record_exception(self.engine_name, e)
+
+ # Timing
+ engine_time = time() - start_time
+ page_load_time = poolrequests.get_time_for_thread()
+ result_container.add_timing(self.engine_name, engine_time, page_load_time)
+
+ # Record the errors
+ with threading.RLock():
+ self.engine.stats['errors'] += 1
+
+ if (issubclass(e.__class__, requests.exceptions.Timeout)):
+ result_container.add_unresponsive_engine(self.engine_name, 'HTTP timeout')
+ # requests timeout (connect or read)
+ logger.error("engine {0} : HTTP requests timeout"
+ "(search duration : {1} s, timeout: {2} s) : {3}"
+ .format(self.engine_name, engine_time, timeout_limit, e.__class__.__name__))
+ requests_exception = True
+ elif (issubclass(e.__class__, requests.exceptions.RequestException)):
+ result_container.add_unresponsive_engine(self.engine_name, 'HTTP error')
+ # other requests exception
+ logger.exception("engine {0} : requests exception"
+ "(search duration : {1} s, timeout: {2} s) : {3}"
+ .format(self.engine_name, engine_time, timeout_limit, e))
+ requests_exception = True
+ elif (issubclass(e.__class__, SearxEngineCaptchaException)):
+ result_container.add_unresponsive_engine(self.engine_name, 'CAPTCHA required')
+ logger.exception('engine {0} : CAPTCHA')
+ suspended_time = e.suspended_time # pylint: disable=no-member
+ elif (issubclass(e.__class__, SearxEngineTooManyRequestsException)):
+ result_container.add_unresponsive_engine(self.engine_name, 'too many requests')
+ logger.exception('engine {0} : Too many requests')
+ suspended_time = e.suspended_time # pylint: disable=no-member
+ elif (issubclass(e.__class__, SearxEngineAccessDeniedException)):
+ result_container.add_unresponsive_engine(self.engine_name, 'blocked')
+ logger.exception('engine {0} : Searx is blocked')
+ suspended_time = e.suspended_time # pylint: disable=no-member
+ else:
+ result_container.add_unresponsive_engine(self.engine_name, 'unexpected crash')
+ # others errors
+ logger.exception('engine {0} : exception : {1}'.format(self.engine_name, e))
+ else:
+ if getattr(threading.current_thread(), '_timeout', False):
+ record_error(self.engine_name, 'Timeout')
+
+ # suspend the engine if there is an HTTP error
+ # or suspended_time is defined
+ with threading.RLock():
+ if requests_exception or suspended_time:
+ # update continuous_errors / suspend_end_time
+ self.engine.continuous_errors += 1
+ if suspended_time is None:
+ suspended_time = min(settings['search']['max_ban_time_on_fail'],
+ self.engine.continuous_errors * settings['search']['ban_time_on_fail'])
+ self.engine.suspend_end_time = time() + suspended_time
+ else:
+ # reset the suspend variables
+ self.engine.continuous_errors = 0
+ self.engine.suspend_end_time = 0
diff --git a/searx/search/processors/online_currency.py b/searx/search/processors/online_currency.py
new file mode 100644
index 000000000..f0e919c03
--- /dev/null
+++ b/searx/search/processors/online_currency.py
@@ -0,0 +1,57 @@
+# SPDX-License-Identifier: AGPL-3.0-or-later
+
+import unicodedata
+import re
+
+from searx.data import CURRENCIES
+from .online import OnlineProcessor
+
+
+parser_re = re.compile('.*?(\\d+(?:\\.\\d+)?) ([^.0-9]+) (?:in|to) ([^.0-9]+)', re.I)
+
+
+def normalize_name(name):
+ name = name.lower().replace('-', ' ').rstrip('s')
+ name = re.sub(' +', ' ', name)
+ return unicodedata.normalize('NFKD', name).lower()
+
+
+def name_to_iso4217(name):
+ global CURRENCIES
+ name = normalize_name(name)
+ currency = CURRENCIES['names'].get(name, [name])
+ return currency[0]
+
+
+def iso4217_to_name(iso4217, language):
+ global CURRENCIES
+ return CURRENCIES['iso4217'].get(iso4217, {}).get(language, iso4217)
+
+
+class OnlineCurrencyProcessor(OnlineProcessor):
+
+ engine_type = 'online_currency'
+
+ def get_params(self, search_query, engine_category):
+ params = super().get_params(search_query, engine_category)
+ if params is None:
+ return None
+
+ m = parser_re.match(search_query.query)
+ if not m:
+ return None
+
+ amount_str, from_currency, to_currency = m.groups()
+ try:
+ amount = float(amount_str)
+ except ValueError:
+ return None
+ from_currency = name_to_iso4217(from_currency.strip())
+ to_currency = name_to_iso4217(to_currency.strip())
+
+ params['amount'] = amount
+ params['from'] = from_currency
+ params['to'] = to_currency
+ params['from_name'] = iso4217_to_name(from_currency, 'en')
+ params['to_name'] = iso4217_to_name(to_currency, 'en')
+ return params
diff --git a/searx/search/processors/online_dictionary.py b/searx/search/processors/online_dictionary.py
new file mode 100644
index 000000000..8e9ef1620
--- /dev/null
+++ b/searx/search/processors/online_dictionary.py
@@ -0,0 +1,37 @@
+# SPDX-License-Identifier: AGPL-3.0-or-later
+
+import re
+
+from searx.utils import is_valid_lang
+from .online import OnlineProcessor
+
+
+parser_re = re.compile('.*?([a-z]+)-([a-z]+) ([^ ]+)$', re.I)
+
+
+class OnlineDictionaryProcessor(OnlineProcessor):
+
+ engine_type = 'online_dictionnary'
+
+ def get_params(self, search_query, engine_category):
+ params = super().get_params(search_query, engine_category)
+ if params is None:
+ return None
+
+ m = parser_re.match(search_query.query)
+ if not m:
+ return None
+
+ from_lang, to_lang, query = m.groups()
+
+ from_lang = is_valid_lang(from_lang)
+ to_lang = is_valid_lang(to_lang)
+
+ if not from_lang or not to_lang:
+ return None
+
+ params['from_lang'] = from_lang
+ params['to_lang'] = to_lang
+ params['query'] = query
+
+ return params