summaryrefslogtreecommitdiff
path: root/searx
diff options
context:
space:
mode:
Diffstat (limited to 'searx')
-rw-r--r--searx/engines/google_images.py5
-rw-r--r--searx/engines/google_news.py5
-rw-r--r--searx/engines/google_scholar.py10
-rw-r--r--searx/engines/google_videos.py5
4 files changed, 9 insertions, 16 deletions
diff --git a/searx/engines/google_images.py b/searx/engines/google_images.py
index 0f8f1c73e..e7382a6fe 100644
--- a/searx/engines/google_images.py
+++ b/searx/engines/google_images.py
@@ -103,6 +103,8 @@ def request(query, params):
# pylint: disable=undefined-variable
params, supported_languages, language_aliases, False
)
+ logger.debug(
+ "HTTP header Accept-Language --> %s", lang_info['headers']['Accept-Language'])
query_url = 'https://' + lang_info['subdomain'] + '/search' + "?" + urlencode({
'q': query,
@@ -117,11 +119,8 @@ def request(query, params):
query_url += '&' + urlencode({'tbs': 'qdr:' + time_range_dict[params['time_range']]})
if params['safesearch']:
query_url += '&' + urlencode({'safe': filter_mapping[params['safesearch']]})
-
- logger.debug("query_url --> %s", query_url)
params['url'] = query_url
- logger.debug("HTTP header Accept-Language --> %s", lang_info.get('Accept-Language'))
params['headers'].update(lang_info['headers'])
params['headers']['Accept'] = (
'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8'
diff --git a/searx/engines/google_news.py b/searx/engines/google_news.py
index 71b6093d3..485d602bc 100644
--- a/searx/engines/google_news.py
+++ b/searx/engines/google_news.py
@@ -85,6 +85,8 @@ def request(query, params):
# pylint: disable=undefined-variable
params, supported_languages, language_aliases, False
)
+ logger.debug(
+ "HTTP header Accept-Language --> %s", lang_info['headers']['Accept-Language'])
# google news has only one domain
lang_info['subdomain'] = 'news.google.com'
@@ -107,11 +109,8 @@ def request(query, params):
'oe': "utf8",
'gl': lang_info['country'],
}) + ('&ceid=%s' % ceid) # ceid includes a ':' character which must not be urlencoded
-
- logger.debug("query_url --> %s", query_url)
params['url'] = query_url
- logger.debug("HTTP header Accept-Language --> %s", lang_info.get('Accept-Language'))
params['headers'].update(lang_info['headers'])
params['headers']['Accept'] = (
'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8'
diff --git a/searx/engines/google_scholar.py b/searx/engines/google_scholar.py
index fbf443e2e..8442a7bfa 100644
--- a/searx/engines/google_scholar.py
+++ b/searx/engines/google_scholar.py
@@ -77,12 +77,11 @@ def request(query, params):
offset = (params['pageno'] - 1) * 10
lang_info = get_lang_info(
# pylint: disable=undefined-variable
-
-
- # params, {}, language_aliases
-
params, supported_languages, language_aliases, False
)
+ logger.debug(
+ "HTTP header Accept-Language --> %s", lang_info['headers']['Accept-Language'])
+
# subdomain is: scholar.google.xy
lang_info['subdomain'] = lang_info['subdomain'].replace("www.", "scholar.")
@@ -95,11 +94,8 @@ def request(query, params):
})
query_url += time_range_url(params)
-
- logger.debug("query_url --> %s", query_url)
params['url'] = query_url
- logger.debug("HTTP header Accept-Language --> %s", lang_info.get('Accept-Language'))
params['headers'].update(lang_info['headers'])
params['headers']['Accept'] = (
'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8'
diff --git a/searx/engines/google_videos.py b/searx/engines/google_videos.py
index 81e0a8473..c57db4e63 100644
--- a/searx/engines/google_videos.py
+++ b/searx/engines/google_videos.py
@@ -121,6 +121,8 @@ def request(query, params):
# pylint: disable=undefined-variable
params, supported_languages, language_aliases, False
)
+ logger.debug(
+ "HTTP header Accept-Language --> %s", lang_info['headers']['Accept-Language'])
query_url = 'https://' + lang_info['subdomain'] + '/search' + "?" + urlencode({
'q': query,
@@ -134,11 +136,8 @@ def request(query, params):
query_url += '&' + urlencode({'tbs': 'qdr:' + time_range_dict[params['time_range']]})
if params['safesearch']:
query_url += '&' + urlencode({'safe': filter_mapping[params['safesearch']]})
-
- logger.debug("query_url --> %s", query_url)
params['url'] = query_url
- logger.debug("HTTP header Accept-Language --> %s", lang_info.get('Accept-Language'))
params['headers'].update(lang_info['headers'])
params['headers']['Accept'] = (
'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8'