summaryrefslogtreecommitdiff
path: root/searx/search.py
diff options
context:
space:
mode:
Diffstat (limited to 'searx/search.py')
-rw-r--r--searx/search.py258
1 files changed, 182 insertions, 76 deletions
diff --git a/searx/search.py b/searx/search.py
index 22aea3661..661add7ca 100644
--- a/searx/search.py
+++ b/searx/search.py
@@ -24,7 +24,7 @@ from flask_babel import gettext
import requests.exceptions
import searx.poolrequests as requests_lib
from searx.engines import (
- categories, engines
+ categories, engines, settings
)
from searx.answerers import ask
from searx.utils import gen_useragent
@@ -45,18 +45,24 @@ if sys.version_info[0] == 3:
logger = logger.getChild('search')
number_of_searches = 0
+max_request_timeout = settings.get('outgoing', {}).get('max_request_timeout' or None)
+if max_request_timeout is None:
+ logger.info('max_request_timeout={0}'.format(max_request_timeout))
+else:
+ if isinstance(max_request_timeout, float):
+ logger.info('max_request_timeout={0} second(s)'.format(max_request_timeout))
+ else:
+ logger.critical('outgoing.max_request_timeout if defined has to be float')
+ from sys import exit
+ exit(1)
-def send_http_request(engine, request_params, start_time, timeout_limit):
- # for page_load_time stats
- time_before_request = time()
-
+def send_http_request(engine, request_params):
# create dictionary which contain all
# informations about the request
request_args = dict(
headers=request_params['headers'],
cookies=request_params['cookies'],
- timeout=timeout_limit,
verify=request_params['verify']
)
@@ -68,98 +74,144 @@ def send_http_request(engine, request_params, start_time, timeout_limit):
request_args['data'] = request_params['data']
# send the request
- response = req(request_params['url'], **request_args)
+ return req(request_params['url'], **request_args)
- # is there a timeout (no parsing in this case)
- timeout_overhead = 0.2 # seconds
- time_after_request = time()
- search_duration = time_after_request - start_time
- if search_duration > timeout_limit + timeout_overhead:
- raise requests.exceptions.Timeout(response=response)
-
- with threading.RLock():
- # no error : reset the suspend variables
- engine.continuous_errors = 0
- engine.suspend_end_time = 0
- # update stats with current page-load-time
- # only the HTTP request
- engine.stats['page_load_time'] += time_after_request - time_before_request
- engine.stats['page_load_count'] += 1
- # everything is ok : return the response
- return response
-
-
-def search_one_request(engine, query, request_params, start_time, timeout_limit):
+def search_one_http_request(engine, query, request_params):
# update request parameters dependent on
# search-engine (contained in engines folder)
engine.request(query, request_params)
# ignoring empty urls
if request_params['url'] is None:
- return []
+ return None
if not request_params['url']:
- return []
+ return None
# send request
- response = send_http_request(engine, request_params, start_time, timeout_limit)
+ response = send_http_request(engine, request_params)
# parse the response
response.search_params = request_params
return engine.response(response)
+def search_one_offline_request(engine, query, request_params):
+ return engine.search(query, request_params)
+
+
def search_one_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit):
+ if engines[engine_name].offline:
+ return search_one_offline_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit) # noqa
+ return search_one_http_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit)
+
+
+def search_one_offline_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit):
engine = engines[engine_name]
try:
- # send requests and parse the results
- search_results = search_one_request(engine, query, request_params, start_time, timeout_limit)
-
- # add results
- result_container.extend(engine_name, search_results)
+ search_results = search_one_offline_request(engine, query, request_params)
- # update engine time when there is no exception
- with threading.RLock():
- engine.stats['engine_time'] += time() - start_time
- engine.stats['engine_time_count'] += 1
+ if search_results:
+ result_container.extend(engine_name, search_results)
- return True
+ engine_time = time() - start_time
+ result_container.add_timing(engine_name, engine_time, engine_time)
+ with threading.RLock():
+ engine.stats['engine_time'] += engine_time
+ engine.stats['engine_time_count'] += 1
+ except ValueError as e:
+ record_offline_engine_stats_on_error(engine, result_container, start_time)
+ logger.exception('engine {0} : invalid input : {1}'.format(engine_name, e))
except Exception as e:
+ record_offline_engine_stats_on_error(engine, result_container, start_time)
+ result_container.add_unresponsive_engine(engine_name, 'unexpected crash', str(e))
+ logger.exception('engine {0} : exception : {1}'.format(engine_name, e))
+
+
+def record_offline_engine_stats_on_error(engine, result_container, start_time):
+ engine_time = time() - start_time
+ result_container.add_timing(engine.name, engine_time, engine_time)
+
+ with threading.RLock():
engine.stats['errors'] += 1
- search_duration = time() - start_time
- requests_exception = False
+
+def search_one_http_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit):
+ # set timeout for all HTTP requests
+ requests_lib.set_timeout_for_thread(timeout_limit, start_time=start_time)
+ # reset the HTTP total time
+ requests_lib.reset_time_for_thread()
+
+ #
+ engine = engines[engine_name]
+
+ # suppose everything will be alright
+ requests_exception = False
+
+ try:
+ # send requests and parse the results
+ search_results = search_one_http_request(engine, query, request_params)
+
+ # check if the engine accepted the request
+ if search_results is not None:
+ # yes, so add results
+ result_container.extend(engine_name, search_results)
+
+ # update engine time when there is no exception
+ engine_time = time() - start_time
+ page_load_time = requests_lib.get_time_for_thread()
+ result_container.add_timing(engine_name, engine_time, page_load_time)
+ with threading.RLock():
+ engine.stats['engine_time'] += engine_time
+ engine.stats['engine_time_count'] += 1
+ # update stats with the total HTTP time
+ engine.stats['page_load_time'] += page_load_time
+ engine.stats['page_load_count'] += 1
+
+ except Exception as e:
+ # Timing
+ engine_time = time() - start_time
+ page_load_time = requests_lib.get_time_for_thread()
+ result_container.add_timing(engine_name, engine_time, page_load_time)
+
+ # Record the errors
+ with threading.RLock():
+ engine.stats['errors'] += 1
if (issubclass(e.__class__, requests.exceptions.Timeout)):
- result_container.add_unresponsive_engine((engine_name, gettext('timeout')))
+ result_container.add_unresponsive_engine(engine_name, 'timeout')
# requests timeout (connect or read)
logger.error("engine {0} : HTTP requests timeout"
"(search duration : {1} s, timeout: {2} s) : {3}"
- .format(engine_name, search_duration, timeout_limit, e.__class__.__name__))
+ .format(engine_name, engine_time, timeout_limit, e.__class__.__name__))
requests_exception = True
elif (issubclass(e.__class__, requests.exceptions.RequestException)):
- result_container.add_unresponsive_engine((engine_name, gettext('request exception')))
+ result_container.add_unresponsive_engine(engine_name, 'request exception')
# other requests exception
logger.exception("engine {0} : requests exception"
"(search duration : {1} s, timeout: {2} s) : {3}"
- .format(engine_name, search_duration, timeout_limit, e))
+ .format(engine_name, engine_time, timeout_limit, e))
requests_exception = True
else:
- result_container.add_unresponsive_engine((engine_name, gettext('unexpected crash')))
+ result_container.add_unresponsive_engine(engine_name, 'unexpected crash', str(e))
# others errors
logger.exception('engine {0} : exception : {1}'.format(engine_name, e))
- # update continuous_errors / suspend_end_time
+ # suspend or not the engine if there are HTTP errors
+ with threading.RLock():
if requests_exception:
- with threading.RLock():
- engine.continuous_errors += 1
- engine.suspend_end_time = time() + min(60, engine.continuous_errors)
-
- #
- return False
+ # update continuous_errors / suspend_end_time
+ engine.continuous_errors += 1
+ engine.suspend_end_time = time() + min(settings['search']['max_ban_time_on_fail'],
+ engine.continuous_errors * settings['search']['ban_time_on_fail'])
+ else:
+ # no HTTP error (perhaps an engine error)
+ # anyway, reset the suspend variables
+ engine.continuous_errors = 0
+ engine.suspend_end_time = 0
def search_multiple_requests(requests, result_container, start_time, timeout_limit):
@@ -179,7 +231,7 @@ def search_multiple_requests(requests, result_container, start_time, timeout_lim
remaining_time = max(0.0, timeout_limit - (time() - start_time))
th.join(remaining_time)
if th.isAlive():
- result_container.add_unresponsive_engine((th._engine_name, gettext('timeout')))
+ result_container.add_unresponsive_engine(th._engine_name, 'timeout')
logger.warning('engine timeout: {0}'.format(th._engine_name))
@@ -195,6 +247,13 @@ def default_request_params():
}
+# remove duplicate queries.
+# FIXME: does not fix "!music !soundcloud", because the categories are 'none' and 'music'
+def deduplicate_query_engines(query_engines):
+ uniq_query_engines = {q["category"] + '|' + q["name"]: q for q in query_engines}
+ return uniq_query_engines.values()
+
+
def get_search_query_from_webapp(preferences, form):
# no text for the query ?
if not form.get('q'):
@@ -255,14 +314,31 @@ def get_search_query_from_webapp(preferences, form):
# query_engines
query_engines = raw_text_query.engines
+ # timeout_limit
+ query_timeout = raw_text_query.timeout_limit
+ if query_timeout is None and 'timeout_limit' in form:
+ raw_time_limit = form.get('timeout_limit')
+ if raw_time_limit in ['None', '']:
+ raw_time_limit = None
+ else:
+ try:
+ query_timeout = float(raw_time_limit)
+ except ValueError:
+ raise SearxParameterException('timeout_limit', raw_time_limit)
+
# query_categories
query_categories = []
# if engines are calculated from query,
# set categories by using that informations
if query_engines and raw_text_query.specific:
- query_categories = list(set(engine['category']
- for engine in query_engines))
+ additional_categories = set()
+ for engine in query_engines:
+ if 'from_bang' in engine and engine['from_bang']:
+ additional_categories.add('none')
+ else:
+ additional_categories.add(engine['category'])
+ query_categories = list(additional_categories)
# otherwise, using defined categories to
# calculate which engines should be used
@@ -320,8 +396,12 @@ def get_search_query_from_webapp(preferences, form):
for engine in categories[categ]
if (engine.name, categ) not in disabled_engines)
- return SearchQuery(query, query_engines, query_categories,
- query_lang, query_safesearch, query_pageno, query_time_range)
+ query_engines = deduplicate_query_engines(query_engines)
+
+ return (SearchQuery(query, query_engines, query_categories,
+ query_lang, query_safesearch, query_pageno,
+ query_time_range, query_timeout, preferences),
+ raw_text_query)
class Search(object):
@@ -333,6 +413,7 @@ class Search(object):
super(Search, self).__init__()
self.search_query = search_query
self.result_container = ResultContainer()
+ self.actual_timeout = None
# do search-request
def search(self):
@@ -362,7 +443,7 @@ class Search(object):
search_query = self.search_query
# max of all selected engine timeout
- timeout_limit = 0
+ default_timeout = 0
# start search-reqest for all selected engines
for selected_engine in search_query.engines:
@@ -371,6 +452,9 @@ class Search(object):
engine = engines[selected_engine['name']]
+ if not search_query.preferences.validate_token(engine):
+ continue
+
# skip suspended engines
if engine.suspend_end_time >= time():
logger.debug('Engine currently suspended: %s', selected_engine['name'])
@@ -385,29 +469,51 @@ class Search(object):
continue
# set default request parameters
- request_params = default_request_params()
- request_params['headers']['User-Agent'] = user_agent
- request_params['category'] = selected_engine['category']
- request_params['pageno'] = search_query.pageno
+ request_params = {}
+ if not engine.offline:
+ request_params = default_request_params()
+ request_params['headers']['User-Agent'] = user_agent
- if hasattr(engine, 'language') and engine.language:
- request_params['language'] = engine.language
- else:
- request_params['language'] = search_query.lang
+ if hasattr(engine, 'language') and engine.language:
+ request_params['language'] = engine.language
+ else:
+ request_params['language'] = search_query.lang
+
+ request_params['safesearch'] = search_query.safesearch
+ request_params['time_range'] = search_query.time_range
- # 0 = None, 1 = Moderate, 2 = Strict
- request_params['safesearch'] = search_query.safesearch
- request_params['time_range'] = search_query.time_range
+ request_params['category'] = selected_engine['category']
+ request_params['pageno'] = search_query.pageno
# append request to list
requests.append((selected_engine['name'], search_query.query, request_params))
- # update timeout_limit
- timeout_limit = max(timeout_limit, engine.timeout)
-
+ # update default_timeout
+ default_timeout = max(default_timeout, engine.timeout)
+
+ # adjust timeout
+ self.actual_timeout = default_timeout
+ query_timeout = self.search_query.timeout_limit
+
+ if max_request_timeout is None and query_timeout is None:
+ # No max, no user query: default_timeout
+ pass
+ elif max_request_timeout is None and query_timeout is not None:
+ # No max, but user query: From user query except if above default
+ self.actual_timeout = min(default_timeout, query_timeout)
+ elif max_request_timeout is not None and query_timeout is None:
+ # Max, no user query: Default except if above max
+ self.actual_timeout = min(default_timeout, max_request_timeout)
+ elif max_request_timeout is not None and query_timeout is not None:
+ # Max & user query: From user query except if above max
+ self.actual_timeout = min(query_timeout, max_request_timeout)
+
+ logger.debug("actual_timeout={0} (default_timeout={1}, ?timeout_limit={2}, max_request_timeout={3})"
+ .format(self.actual_timeout, default_timeout, query_timeout, max_request_timeout))
+
+ # send all search-request
if requests:
- # send all search-request
- search_multiple_requests(requests, self.result_container, start_time, timeout_limit)
+ search_multiple_requests(requests, self.result_container, start_time, self.actual_timeout)
start_new_thread(gc.collect, tuple())
# return results, suggestions, answers and infoboxes