diff options
Diffstat (limited to 'searx/search')
| -rw-r--r-- | searx/search/checker/impl.py | 6 | ||||
| -rw-r--r-- | searx/search/processors/online.py | 22 |
2 files changed, 12 insertions, 16 deletions
diff --git a/searx/search/checker/impl.py b/searx/search/checker/impl.py index b5fb38a99..e54b3f68d 100644 --- a/searx/search/checker/impl.py +++ b/searx/search/checker/impl.py @@ -13,7 +13,7 @@ from langdetect import detect_langs from langdetect.lang_detect_exception import LangDetectException import httpx -from searx import poolrequests, logger +from searx import network, logger from searx.results import ResultContainer from searx.search.models import SearchQuery, EngineRef from searx.search.processors import EngineProcessor @@ -75,8 +75,8 @@ def _is_url_image(image_url): while retry > 0: a = time() try: - poolrequests.set_timeout_for_thread(10.0, time()) - r = poolrequests.get(image_url, timeout=10.0, allow_redirects=True, headers={ + network.set_timeout_for_thread(10.0, time()) + r = network.get(image_url, timeout=10.0, allow_redirects=True, headers={ 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:84.0) Gecko/20100101 Firefox/84.0', 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8', 'Accept-Language': 'en-US;q=0.5,en;q=0.3', diff --git a/searx/search/processors/online.py b/searx/search/processors/online.py index 24d8f53e2..66719ea9b 100644 --- a/searx/search/processors/online.py +++ b/searx/search/processors/online.py @@ -6,7 +6,7 @@ import asyncio import httpx -import searx.poolrequests as poolrequests +import searx.network from searx.engines import settings from searx import logger from searx.utils import gen_useragent @@ -64,10 +64,6 @@ class OnlineProcessor(EngineProcessor): auth=params['auth'] ) - # setting engine based proxies - if hasattr(self.engine, 'proxies'): - request_args['proxies'] = poolrequests.get_proxies(self.engine.proxies) - # max_redirects max_redirects = params.get('max_redirects') if max_redirects: @@ -85,9 +81,9 @@ class OnlineProcessor(EngineProcessor): # specific type of request (GET or POST) if params['method'] == 'GET': - req = poolrequests.get + req = searx.network.get else: - req = poolrequests.post + req = searx.network.post request_args['data'] = params['data'] @@ -128,11 +124,11 @@ class OnlineProcessor(EngineProcessor): def search(self, query, params, result_container, start_time, timeout_limit): # set timeout for all HTTP requests - poolrequests.set_timeout_for_thread(timeout_limit, start_time=start_time) + searx.network.set_timeout_for_thread(timeout_limit, start_time=start_time) # reset the HTTP total time - poolrequests.reset_time_for_thread() - # enable HTTP only if explicitly enabled - poolrequests.set_enable_http_protocol(self.engine.enable_http) + searx.network.reset_time_for_thread() + # set the network + searx.network.set_context_network_name(self.engine_name) # suppose everything will be alright http_exception = False @@ -149,7 +145,7 @@ class OnlineProcessor(EngineProcessor): # update engine time when there is no exception engine_time = time() - start_time - page_load_time = poolrequests.get_time_for_thread() + page_load_time = searx.network.get_time_for_thread() result_container.add_timing(self.engine_name, engine_time, page_load_time) with threading.RLock(): self.engine.stats['engine_time'] += engine_time @@ -162,7 +158,7 @@ class OnlineProcessor(EngineProcessor): # Timing engine_time = time() - start_time - page_load_time = poolrequests.get_time_for_thread() + page_load_time = searx.network.get_time_for_thread() result_container.add_timing(self.engine_name, engine_time, page_load_time) # Record the errors |