summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--AUTHORS.rst1
-rw-r--r--searx/autocomplete.py20
-rw-r--r--searx/engines/archlinux.py17
-rwxr-xr-xsearx/engines/base.py122
-rw-r--r--searx/engines/bing_images.py2
-rw-r--r--searx/engines/doku.py84
-rw-r--r--searx/engines/google.py7
-rw-r--r--searx/engines/google_images.py8
-rw-r--r--searx/languages.py4
-rw-r--r--searx/preferences.py271
-rw-r--r--searx/query.py10
-rw-r--r--searx/results.py4
-rw-r--r--searx/search.py22
-rw-r--r--searx/settings.yml11
-rw-r--r--searx/settings_robot.yml2
-rw-r--r--searx/templates/courgette/preferences.html2
-rw-r--r--searx/templates/default/preferences.html2
-rw-r--r--searx/templates/oscar/base.html3
-rw-r--r--searx/templates/oscar/preferences.html4
-rw-r--r--searx/templates/pix-art/preferences.html2
-rw-r--r--searx/utils.py23
-rw-r--r--searx/webapp.py227
-rw-r--r--tests/robot/test_basic.robot108
-rw-r--r--tests/unit/engines/test_archlinux.py17
-rw-r--r--tests/unit/engines/test_bing_images.py162
-rw-r--r--tests/unit/engines/test_doku.py79
-rw-r--r--tests/unit/engines/test_google_images.py6
-rw-r--r--tests/unit/test_preferences.py101
-rw-r--r--tests/unit/test_webapp.py6
29 files changed, 922 insertions, 405 deletions
diff --git a/AUTHORS.rst b/AUTHORS.rst
index c5047438a..974fbeb15 100644
--- a/AUTHORS.rst
+++ b/AUTHORS.rst
@@ -42,3 +42,4 @@ generally made searx better:
- Noemi Vanyi
- Kang-min Liu
- Kirill Isakov
+- Guilhem Bonnefille
diff --git a/searx/autocomplete.py b/searx/autocomplete.py
index 94913e8d8..527104041 100644
--- a/searx/autocomplete.py
+++ b/searx/autocomplete.py
@@ -110,7 +110,7 @@ def searx_bang(full_query):
return list(result_set)
-def dbpedia(query):
+def dbpedia(query, lang):
# dbpedia autocompleter, no HTTPS
autocomplete_url = 'http://lookup.dbpedia.org/api/search.asmx/KeywordSearch?'
@@ -126,7 +126,7 @@ def dbpedia(query):
return results
-def duckduckgo(query):
+def duckduckgo(query, lang):
# duckduckgo autocompleter
url = 'https://ac.duckduckgo.com/ac/?{0}&type=list'
@@ -136,11 +136,11 @@ def duckduckgo(query):
return []
-def google(query):
+def google(query, lang):
# google autocompleter
autocomplete_url = 'https://suggestqueries.google.com/complete/search?client=toolbar&'
- response = get(autocomplete_url + urlencode(dict(q=query)))
+ response = get(autocomplete_url + urlencode(dict(hl=lang, q=query)))
results = []
@@ -151,8 +151,8 @@ def google(query):
return results
-def startpage(query):
- # wikipedia autocompleter
+def startpage(query, lang):
+ # startpage autocompleter
url = 'https://startpage.com/do/suggest?{query}'
resp = get(url.format(query=urlencode({'query': query}))).text.split('\n')
@@ -161,11 +161,11 @@ def startpage(query):
return []
-def qwant(query):
+def qwant(query, lang):
# qwant autocompleter (additional parameter : lang=en_en&count=xxx )
url = 'https://api.qwant.com/api/suggest?{query}'
- resp = get(url.format(query=urlencode({'q': query})))
+ resp = get(url.format(query=urlencode({'q': query, 'lang': lang})))
results = []
@@ -178,9 +178,9 @@ def qwant(query):
return results
-def wikipedia(query):
+def wikipedia(query, lang):
# wikipedia autocompleter
- url = 'https://en.wikipedia.org/w/api.php?action=opensearch&{0}&limit=10&namespace=0&format=json'
+ url = 'https://' + lang + '.wikipedia.org/w/api.php?action=opensearch&{0}&limit=10&namespace=0&format=json'
resp = loads(get(url.format(urlencode(dict(search=query)))).text)
if len(resp) > 1:
diff --git a/searx/engines/archlinux.py b/searx/engines/archlinux.py
index f12c4328a..84e0d0fba 100644
--- a/searx/engines/archlinux.py
+++ b/searx/engines/archlinux.py
@@ -3,12 +3,12 @@
"""
Arch Linux Wiki
- @website https://wiki.archlinux.org
- @provide-api no (Mediawiki provides API, but Arch Wiki blocks access to it
- @using-api no
- @results HTML
- @stable no (HTML can change)
- @parse url, title, content
+ @website https://wiki.archlinux.org
+ @provide-api no (Mediawiki provides API, but Arch Wiki blocks access to it
+ @using-api no
+ @results HTML
+ @stable no (HTML can change)
+ @parse url, title
"""
from urlparse import urljoin
@@ -26,7 +26,6 @@ base_url = 'https://wiki.archlinux.org'
# xpath queries
xpath_results = '//ul[@class="mw-search-results"]/li'
xpath_link = './/div[@class="mw-search-result-heading"]/a'
-xpath_content = './/div[@class="searchresult"]'
# cut 'en' from 'en_US', 'de' from 'de_CH', and so on
@@ -135,10 +134,8 @@ def response(resp):
link = result.xpath(xpath_link)[0]
href = urljoin(base_url, link.attrib.get('href'))
title = escape(extract_text(link))
- content = escape(extract_text(result.xpath(xpath_content)))
results.append({'url': href,
- 'title': title,
- 'content': content})
+ 'title': title})
return results
diff --git a/searx/engines/base.py b/searx/engines/base.py
new file mode 100755
index 000000000..66491d395
--- /dev/null
+++ b/searx/engines/base.py
@@ -0,0 +1,122 @@
+#!/usr/bin/env python
+
+"""
+ BASE (Scholar publications)
+
+ @website https://base-search.net
+ @provide-api yes with authorization (https://api.base-search.net/)
+
+ @using-api yes
+ @results XML
+ @stable ?
+ @parse url, title, publishedDate, content
+ More info on api: http://base-search.net/about/download/base_interface.pdf
+"""
+
+from lxml import etree
+from urllib import urlencode
+from searx.utils import searx_useragent
+from cgi import escape
+from datetime import datetime
+import re
+
+
+categories = ['science']
+
+base_url = 'https://api.base-search.net/cgi-bin/BaseHttpSearchInterface.fcgi'\
+ + '?func=PerformSearch&{query}&boost=oa&hits={hits}&offset={offset}'
+
+# engine dependent config
+paging = True
+number_of_results = 10
+
+# shortcuts for advanced search
+shorcut_dict = {
+ # user-friendly keywords
+ 'format:': 'dcformat:',
+ 'author:': 'dccreator:',
+ 'collection:': 'dccollection:',
+ 'hdate:': 'dchdate:',
+ 'contributor:': 'dccontributor:',
+ 'coverage:': 'dccoverage:',
+ 'date:': 'dcdate:',
+ 'abstract:': 'dcdescription:',
+ 'urls:': 'dcidentifier:',
+ 'language:': 'dclanguage:',
+ 'publisher:': 'dcpublisher:',
+ 'relation:': 'dcrelation:',
+ 'rights:': 'dcrights:',
+ 'source:': 'dcsource:',
+ 'subject:': 'dcsubject:',
+ 'title:': 'dctitle:',
+ 'type:': 'dcdctype:'
+}
+
+
+def request(query, params):
+ # replace shortcuts with API advanced search keywords
+ for key in shorcut_dict.keys():
+ query = re.sub(str(key), str(shorcut_dict[key]), query)
+
+ # basic search
+ offset = (params['pageno'] - 1) * number_of_results
+
+ string_args = dict(query=urlencode({'query': query}),
+ offset=offset,
+ hits=number_of_results)
+
+ params['url'] = base_url.format(**string_args)
+
+ params['headers']['User-Agent'] = searx_useragent()
+ return params
+
+
+def response(resp):
+ results = []
+
+ search_results = etree.XML(resp.content)
+
+ for entry in search_results.xpath('./result/doc'):
+ content = "No description available"
+
+ date = datetime.now() # needed in case no dcdate is available for an item
+ for item in entry:
+ if item.attrib["name"] == "dchdate":
+ harvestDate = item.text
+
+ elif item.attrib["name"] == "dcdate":
+ date = item.text
+
+ elif item.attrib["name"] == "dctitle":
+ title = item.text
+
+ elif item.attrib["name"] == "dclink":
+ url = item.text
+
+ elif item.attrib["name"] == "dcdescription":
+ content = escape(item.text[:300])
+ if len(item.text) > 300:
+ content += "..."
+
+# dates returned by the BASE API are not several formats
+ publishedDate = None
+ for date_format in ['%Y-%m-%dT%H:%M:%SZ', '%Y-%m-%d', '%Y-%m', '%Y']:
+ try:
+ publishedDate = datetime.strptime(date, date_format)
+ break
+ except:
+ pass
+
+ if publishedDate is not None:
+ res_dict = {'url': url,
+ 'title': title,
+ 'publishedDate': publishedDate,
+ 'content': content}
+ else:
+ res_dict = {'url': url,
+ 'title': title,
+ 'content': content}
+
+ results.append(res_dict)
+
+ return results
diff --git a/searx/engines/bing_images.py b/searx/engines/bing_images.py
index 2664b795f..384520392 100644
--- a/searx/engines/bing_images.py
+++ b/searx/engines/bing_images.py
@@ -69,7 +69,7 @@ def response(resp):
dom = html.fromstring(resp.text)
# parse results
- for result in dom.xpath('//div[@class="dg_u"]'):
+ for result in dom.xpath('//div[@class="dg_u"]/div'):
link = result.xpath('./a')[0]
# parse json-data (it is required to add a space, to make it parsable)
diff --git a/searx/engines/doku.py b/searx/engines/doku.py
new file mode 100644
index 000000000..93867fd0d
--- /dev/null
+++ b/searx/engines/doku.py
@@ -0,0 +1,84 @@
+# Doku Wiki
+#
+# @website https://www.dokuwiki.org/
+# @provide-api yes
+# (https://www.dokuwiki.org/devel:xmlrpc)
+#
+# @using-api no
+# @results HTML
+# @stable yes
+# @parse (general) url, title, content
+
+from urllib import urlencode
+from lxml.html import fromstring
+from searx.engines.xpath import extract_text
+
+# engine dependent config
+categories = ['general'] # TODO , 'images', 'music', 'videos', 'files'
+paging = False
+language_support = False
+number_of_results = 5
+
+# search-url
+# Doku is OpenSearch compatible
+base_url = 'http://localhost:8090'
+search_url = '/?do=search'\
+ '&{query}'
+# TODO '&startRecord={offset}'\
+# TODO '&maximumRecords={limit}'\
+
+
+# do search-request
+def request(query, params):
+
+ params['url'] = base_url +\
+ search_url.format(query=urlencode({'id': query}))
+
+ return params
+
+
+# get response from search-request
+def response(resp):
+ results = []
+
+ doc = fromstring(resp.text)
+
+ # parse results
+ # Quickhits
+ for r in doc.xpath('//div[@class="search_quickresult"]/ul/li'):
+ try:
+ res_url = r.xpath('.//a[@class="wikilink1"]/@href')[-1]
+ except:
+ continue
+
+ if not res_url:
+ continue
+
+ title = extract_text(r.xpath('.//a[@class="wikilink1"]/@title'))
+
+ # append result
+ results.append({'title': title,
+ 'content': "",
+ 'url': base_url + res_url})
+
+ # Search results
+ for r in doc.xpath('//dl[@class="search_results"]/*'):
+ try:
+ if r.tag == "dt":
+ res_url = r.xpath('.//a[@class="wikilink1"]/@href')[-1]
+ title = extract_text(r.xpath('.//a[@class="wikilink1"]/@title'))
+ elif r.tag == "dd":
+ content = extract_text(r.xpath('.'))
+
+ # append result
+ results.append({'title': title,
+ 'content': content,
+ 'url': base_url + res_url})
+ except:
+ continue
+
+ if not res_url:
+ continue
+
+ # return results
+ return results
diff --git a/searx/engines/google.py b/searx/engines/google.py
index 8b06e9de6..6018ad1b2 100644
--- a/searx/engines/google.py
+++ b/searx/engines/google.py
@@ -90,7 +90,7 @@ url_map = 'https://www.openstreetmap.org/'\
search_path = '/search'
search_url = ('https://{hostname}' +
search_path +
- '?{query}&start={offset}&gbv=1&gws_rd=ssl')
+ '?{query}&start={offset}&gws_rd=cr&gbv=1&lr={lang}&ei=x')
# other URLs
map_hostname_start = 'maps.google.'
@@ -160,6 +160,7 @@ def request(query, params):
if params['language'] == 'all':
language = 'en'
country = 'US'
+ url_lang = ''
else:
language_array = params['language'].lower().split('_')
if len(language_array) == 2:
@@ -167,6 +168,7 @@ def request(query, params):
else:
country = 'US'
language = language_array[0] + ',' + language_array[0] + '-' + country
+ url_lang = 'lang_' + language_array[0]
if use_locale_domain:
google_hostname = country_to_hostname.get(country.upper(), default_hostname)
@@ -175,7 +177,8 @@ def request(query, params):
params['url'] = search_url.format(offset=offset,
query=urlencode({'q': query}),
- hostname=google_hostname)
+ hostname=google_hostname,
+ lang=url_lang)
params['headers']['Accept-Language'] = language
params['headers']['Accept'] = 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8'
diff --git a/searx/engines/google_images.py b/searx/engines/google_images.py
index 9d51428cc..efe46812a 100644
--- a/searx/engines/google_images.py
+++ b/searx/engines/google_images.py
@@ -49,8 +49,6 @@ def response(resp):
# parse results
for result in dom.xpath('//div[@data-ved]'):
- data_url = result.xpath('./a/@href')[0]
- data_query = {k: v[0] for k, v in parse_qs(data_url.split('?', 1)[1]).iteritems()}
metadata = loads(result.xpath('./div[@class="rg_meta"]/text()')[0])
@@ -60,11 +58,11 @@ def response(resp):
thumbnail_src = thumbnail_src.replace("http://", "https://")
# append result
- results.append({'url': data_query['imgrefurl'],
+ results.append({'url': metadata['ru'],
'title': metadata['pt'],
'content': metadata['s'],
- 'thumbnail_src': metadata['tu'],
- 'img_src': data_query['imgurl'],
+ 'thumbnail_src': thumbnail_src,
+ 'img_src': metadata['ou'],
'template': 'images.html'})
# return results
diff --git a/searx/languages.py b/searx/languages.py
index b67da9d22..70459a577 100644
--- a/searx/languages.py
+++ b/searx/languages.py
@@ -20,10 +20,10 @@ language_codes = (
("ar_XA", "Arabic", "Arabia"),
("bg_BG", "Bulgarian", "Bulgaria"),
("cs_CZ", "Czech", "Czech Republic"),
- ("de_DE", "German", "Germany"),
("da_DK", "Danish", "Denmark"),
("de_AT", "German", "Austria"),
("de_CH", "German", "Switzerland"),
+ ("de_DE", "German", "Germany"),
("el_GR", "Greek", "Greece"),
("en_AU", "English", "Australia"),
("en_CA", "English", "Canada"),
@@ -58,10 +58,10 @@ language_codes = (
("ko_KR", "Korean", "Korea"),
("lt_LT", "Lithuanian", "Lithuania"),
("lv_LV", "Latvian", "Latvia"),
- ("oc_OC", "Occitan", "Occitan"),
("nb_NO", "Norwegian", "Norway"),
("nl_BE", "Dutch", "Belgium"),
("nl_NL", "Dutch", "Netherlands"),
+ ("oc_OC", "Occitan", "Occitan"),
("pl_PL", "Polish", "Poland"),
("pt_BR", "Portuguese", "Brazil"),
("pt_PT", "Portuguese", "Portugal"),
diff --git a/searx/preferences.py b/searx/preferences.py
new file mode 100644
index 000000000..ad9e67335
--- /dev/null
+++ b/searx/preferences.py
@@ -0,0 +1,271 @@
+from searx import settings, autocomplete
+from searx.languages import language_codes as languages
+
+
+COOKIE_MAX_AGE = 60 * 60 * 24 * 365 * 5 # 5 years
+LANGUAGE_CODES = [l[0] for l in languages]
+LANGUAGE_CODES.append('all')
+DISABLED = 0
+ENABLED = 1
+
+
+class MissingArgumentException(Exception):
+ pass
+
+
+class ValidationException(Exception):
+ pass
+
+
+class Setting(object):
+ """Base class of user settings"""
+
+ def __init__(self, default_value, **kwargs):
+ super(Setting, self).__init__()
+ self.value = default_value
+ for key, value in kwargs.iteritems():
+ setattr(self, key, value)
+
+ self._post_init()
+
+ def _post_init(self):
+ pass
+
+ def parse(self, data):
+ self.value = data
+
+ def get_value(self):
+ return self.value
+
+ def save(self, name, resp):
+ resp.set_cookie(name, bytes(self.value), max_age=COOKIE_MAX_AGE)
+
+
+class StringSetting(Setting):
+ """Setting of plain string values"""
+ pass
+
+
+class EnumStringSetting(Setting):
+ """Setting of a value which can only come from the given choices"""
+
+ def _post_init(self):
+ if not hasattr(self, 'choices'):
+ raise MissingArgumentException('Missing argument: choices')
+
+ if self.value != '' and self.value not in self.choices:
+ raise ValidationException('Invalid default value: {0}'.format(self.value))
+
+ def parse(self, data):
+ if data not in self.choices and data != self.value:
+ raise ValidationException('Invalid choice: {0}'.format(data))
+ self.value = data
+
+
+class MultipleChoiceSetting(EnumStringSetting):
+ """Setting of values which can only come from the given choices"""
+
+ def _post_init(self):
+ if not hasattr(self, 'choices'):
+ raise MissingArgumentException('Missing argument: choices')
+ for item in self.value:
+ if item not in self.choices:
+ raise ValidationException('Invalid default value: {0}'.format(self.value))
+
+ def parse(self, data):
+ if data == '':
+ self.value = []
+ return
+
+ elements = data.split(',')
+ for item in elements:
+ if item not in self.choices:
+ raise ValidationException('Invalid choice: {0}'.format(item))
+ self.value = elements
+
+ def parse_form(self, data):
+ self.value = []
+ for choice in data:
+ if choice in self.choices and choice not in self.value:
+ self.value.append(choice)
+
+ def save(self, name, resp):
+ resp.set_cookie(name, ','.join(self.value), max_age=COOKIE_MAX_AGE)
+
+
+class MapSetting(Setting):
+ """Setting of a value that has to be translated in order to be storable"""
+
+ def _post_init(self):
+ if not hasattr(self, 'map'):
+ raise MissingArgumentException('missing argument: map')
+ if self.value not in self.map.values():
+ raise ValidationException('Invalid default value')
+
+ def parse(self, data):
+ if data not in self.map:
+ raise ValidationException('Invalid choice: {0}'.format(data))
+ self.value = self.map[data]
+ self.key = data
+
+ def save(self, name, resp):
+ resp.set_cookie(name, bytes(self.key), max_age=COOKIE_MAX_AGE)
+
+
+class SwitchableSetting(Setting):
+ """ Base class for settings that can be turned on && off"""
+
+ def _post_init(self):
+ self.disabled = set()
+ self.enabled = set()
+ if not hasattr(self, 'choices'):
+ raise MissingArgumentException('missing argument: choices')
+
+ def transform_form_items(self, items):
+ return items
+
+ def transform_values(self, values):
+ return values
+
+ def parse_cookie(self, data):
+ if data[DISABLED] != '':
+ self.disabled = set(data[DISABLED].split(','))
+ if data[ENABLED] != '':
+ self.enabled = set(data[ENABLED].split(','))
+
+ def parse_form(self, items):
+ items = self.transform_form_items(items)
+
+ self.disabled = set()
+ self.enabled = set()
+ for choice in self.choices:
+ if choice['default_on']:
+ if choice['id'] in items:
+ self.disabled.add(choice['id'])
+ else:
+ if choice['id'] not in items:
+ self.enabled.add(choice['id'])
+
+ def save(self, resp):
+ resp.set_cookie('disabled_{0}'.format(self.value), ','.join(self.disabled), max_age=COOKIE_MAX_AGE)
+ resp.set_cookie('enabled_{0}'.format(self.value), ','.join(self.enabled), max_age=COOKIE_MAX_AGE)
+
+ def get_disabled(self):
+ disabled = self.disabled
+ for choice in self.choices:
+ if not choice['default_on'] and choice['id'] not in self.enabled:
+ disabled.add(choice['id'])
+ return self.transform_values(disabled)
+
+ def get_enabled(self):
+ enabled = self.enabled
+ for choice in self.choices:
+ if choice['default_on'] and choice['id'] not in self.disabled:
+ enabled.add(choice['id'])
+ return self.transform_values(enabled)
+
+
+class EnginesSetting(SwitchableSetting):
+ def _post_init(self):
+ super(EnginesSetting, self)._post_init()
+ transformed_choices = []
+ for engine_name, engine in self.choices.iteritems():
+ for category in engine.categories:
+ transformed_choice = dict()
+ transformed_choice['default_on'] = not engine.disabled
+ transformed_choice['id'] = '{}__{}'.format(engine_name, category)
+ transformed_choices.append(transformed_choice)
+ self.choices = transformed_choices
+
+ def transform_form_items(self, items):
+ return [item[len('engine_'):].replace('_', ' ').replace(' ', '__') for item in items]
+
+ def transform_values(self, values):
+ if len(values) == 1 and values[0] == '':
+ return list()
+ transformed_values = []
+ for value in values:
+ engine, category = value.split('__')
+ transformed_values.append((engine, category))
+ return transformed_values
+
+
+class PluginsSetting(SwitchableSetting):
+ def _post_init(self):
+ super(PluginsSetting, self)._post_init()
+ transformed_choices = []
+ for plugin in self.choices:
+ transformed_choice = dict()
+ transformed_choice['default_on'] = plugin.default_on
+ transformed_choice['id'] = plugin.id
+ transformed_choices.append(transformed_choice)
+ self.choices = transformed_choices
+
+ def transform_form_items(self, items):
+ return [item[len('plugin_'):] for item in items]
+
+
+class Preferences(object):
+ """Stores, validates and saves preferences to cookies"""
+
+ def __init__(self, themes, categories, engines, plugins):
+ super(Preferences, self).__init__()
+
+ self.key_value_settings = {'categories': MultipleChoiceSetting(['general'], choices=categories),
+ 'language': EnumStringSetting('all', choices=LANGUAGE_CODES),
+ 'locale': EnumStringSetting(settings['ui']['default_locale'],
+ choices=settings['locales'].keys()),
+ 'autocomplete': EnumStringSetting(settings['search']['autocomplete'],
+ choices=autocomplete.backends.keys()),
+ 'image_proxy': MapSetting(settings['server']['image_proxy'],
+ map={'': settings['server']['image_proxy'],
+ '0': False,
+ '1': True}),
+ 'method': EnumStringSetting('POST', choices=('GET', 'POST')),
+ 'safesearch': MapSetting(settings['search']['safe_search'], map={'0': 0,
+ '1': 1,
+ '2': 2}),
+ 'theme': EnumStringSetting(settings['ui']['default_theme'], choices=themes)}
+
+ self.engines = EnginesSetting('engines', choices=engines)
+ self.plugins = PluginsSetting('plugins', choices=plugins)
+
+ def parse_cookies(self, input_data):
+ for user_setting_name, user_setting in input_data.iteritems():
+ if user_setting_name in self.key_value_settings:
+ self.key_value_settings[user_setting_name].parse(user_setting)
+ elif user_setting_name == 'disabled_engines':
+ self.engines.parse_cookie((input_data.get('disabled_engines', ''),
+ input_data.get('enabled_engines', '')))
+ elif user_setting_name == 'disabled_plugins':
+ self.plugins.parse_cookie((input_data.get('disabled_plugins', ''),
+ input_data.get('enabled_plugins', '')))
+
+ def parse_form(self, input_data):
+ disabled_engines = []
+ enabled_categories = []
+ disabled_plugins = []
+ for user_setting_name, user_setting in input_data.iteritems():
+ if user_setting_name in self.key_value_settings:
+ self.key_value_settings[user_setting_name].parse(user_setting)
+ elif user_setting_name.startswith('engine_'):
+ disabled_engines.append(user_setting_name)
+ elif user_setting_name.startswith('category_'):
+ enabled_categories.append(user_setting_name[len('category_'):])
+ elif user_setting_name.startswith('plugin_'):
+ disabled_plugins.append(user_setting_name)
+ self.key_value_settings['categories'].parse_form(enabled_categories)
+ self.engines.parse_form(disabled_engines)
+ self.plugins.parse_form(disabled_plugins)
+
+ # cannot be used in case of engines or plugins
+ def get_value(self, user_setting_name):
+ if user_setting_name in self.key_value_settings:
+ return self.key_value_settings[user_setting_name].get_value()
+
+ def save(self, resp):
+ for user_setting_name, user_setting in self.key_value_settings.iteritems():
+ user_setting.save(user_setting_name, resp)
+ self.engines.save(resp)
+ self.plugins.save(resp)
+ return resp
diff --git a/searx/query.py b/searx/query.py
index e79e760a3..3d617ab05 100644
--- a/searx/query.py
+++ b/searx/query.py
@@ -28,12 +28,12 @@ import re
class Query(object):
"""parse query"""
- def __init__(self, query, blocked_engines):
+ def __init__(self, query, disabled_engines):
self.query = query
- self.blocked_engines = []
+ self.disabled_engines = []
- if blocked_engines:
- self.blocked_engines = blocked_engines
+ if disabled_engines:
+ self.disabled_engines = disabled_engines
self.query_parts = []
self.engines = []
@@ -107,7 +107,7 @@ class Query(object):
self.engines.extend({'category': prefix,
'name': engine.name}
for engine in categories[prefix]
- if (engine.name, prefix) not in self.blocked_engines)
+ if (engine.name, prefix) not in self.disabled_engines)
if query_part[0] == '!':
self.specific = True
diff --git a/searx/results.py b/searx/results.py
index 7e087382c..5d51eb5b5 100644
--- a/searx/results.py
+++ b/searx/results.py
@@ -116,6 +116,10 @@ class ResultContainer(object):
self.results[engine_name].extend(results)
for i, result in enumerate(results):
+ try:
+ result['url'] = result['url'].decode('utf-8')
+ except:
+ pass
position = i + 1
self._merge_result(result, position)
diff --git a/searx/search.py b/searx/search.py
index ce41b231b..a40801640 100644
--- a/searx/search.py
+++ b/searx/search.py
@@ -23,7 +23,7 @@ from searx.engines import (
categories, engines
)
from searx.languages import language_codes
-from searx.utils import gen_useragent, get_blocked_engines
+from searx.utils import gen_useragent
from searx.query import Query
from searx.results import ResultContainer
from searx import logger
@@ -140,15 +140,13 @@ class Search(object):
self.lang = 'all'
# set blocked engines
- self.blocked_engines = get_blocked_engines(engines, request.cookies)
+ self.disabled_engines = request.preferences.engines.get_disabled()
self.result_container = ResultContainer()
self.request_data = {}
# set specific language if set
- if request.cookies.get('language')\
- and request.cookies['language'] in (x[0] for x in language_codes):
- self.lang = request.cookies['language']
+ self.lang = request.preferences.get_value('language')
# set request method
if request.method == 'POST':
@@ -169,7 +167,7 @@ class Search(object):
# parse query, if tags are set, which change
# the serch engine or search-language
- query_obj = Query(self.request_data['q'], self.blocked_engines)
+ query_obj = Query(self.request_data['q'], self.disabled_engines)
query_obj.parse_query()
# set query
@@ -229,8 +227,7 @@ class Search(object):
# using user-defined default-configuration which
# (is stored in cookie)
if not self.categories:
- cookie_categories = request.cookies.get('categories', '')
- cookie_categories = cookie_categories.split(',')
+ cookie_categories = request.preferences.get_value('categories')
for ccateg in cookie_categories:
if ccateg in categories:
self.categories.append(ccateg)
@@ -246,7 +243,7 @@ class Search(object):
self.engines.extend({'category': categ,
'name': engine.name}
for engine in categories[categ]
- if (engine.name, categ) not in self.blocked_engines)
+ if (engine.name, categ) not in self.disabled_engines)
# remove suspended engines
self.engines = [e for e in self.engines
@@ -294,11 +291,8 @@ class Search(object):
else:
request_params['language'] = self.lang
- try:
- # 0 = None, 1 = Moderate, 2 = Strict
- request_params['safesearch'] = int(request.cookies.get('safesearch'))
- except Exception:
- request_params['safesearch'] = settings['search']['safe_search']
+ # 0 = None, 1 = Moderate, 2 = Strict
+ request_params['safesearch'] = request.preferences.get_value('safesearch')
# update request parameters dependent on
# search-engine (contained in engines folder)
diff --git a/searx/settings.yml b/searx/settings.yml
index 580ce1ac6..e18d27ff3 100644
--- a/searx/settings.yml
+++ b/searx/settings.yml
@@ -38,6 +38,10 @@ engines:
engine : archlinux
shortcut : al
+ - name : base
+ engine : base
+ shortcut : bs
+
- name : wikipedia
engine : mediawiki
shortcut : wp
@@ -421,6 +425,13 @@ engines:
# number_of_results : 5
# timeout : 3.0
+# Doku engine lets you access to any Doku wiki instance:
+# A public one or a privete/corporate one.
+# - name : ubuntuwiki
+# engine : doku
+# shortcut : uw
+# base_url : 'http://doc.ubuntu-fr.org'
+
locales:
en : English
bg : Български (Bulgarian)
diff --git a/searx/settings_robot.yml b/searx/settings_robot.yml
index fb193e43d..7c7c4eec2 100644
--- a/searx/settings_robot.yml
+++ b/searx/settings_robot.yml
@@ -4,7 +4,7 @@ general:
search:
safe_search : 0
- autocomplete : 0
+ autocomplete : ""
server:
port : 11111
diff --git a/searx/templates/courgette/preferences.html b/searx/templates/courgette/preferences.html
index f89915d8d..ba4d0c650 100644
--- a/searx/templates/courgette/preferences.html
+++ b/searx/templates/courgette/preferences.html
@@ -109,7 +109,7 @@
<td>{{ search_engine.name }} ({{ shortcuts[search_engine.name] }})&lrm;</td>
<td>{{ _(categ) }}</td>
<td class="engine_checkbox">
- <input type="checkbox" id="engine_{{ categ|replace(' ', '_') }}_{{ search_engine.name|replace(' ', '_') }}" name="engine_{{ search_engine.name }}__{{ categ }}"{% if (search_engine.name, categ) in blocked_engines %} checked="checked"{% endif %} />
+ <input type="checkbox" id="engine_{{ categ|replace(' ', '_') }}_{{ search_engine.name|replace(' ', '_') }}" name="engine_{{ search_engine.name }}__{{ categ }}"{% if (search_engine.name, categ) in disabled_engines %} checked="checked"{% endif %} />
<label class="allow" for="engine_{{ categ|replace(' ', '_') }}_{{ search_engine.name|replace(' ', '_') }}">{{ _('Allow') }}</label>
<label class="deny" for="engine_{{ categ|replace(' ', '_') }}_{{ search_engine.name|replace(' ', '_') }}">{{ _('Block') }}</label>
</td>
diff --git a/searx/templates/default/preferences.html b/searx/templates/default/preferences.html
index 90006c029..a47dba458 100644
--- a/searx/templates/default/preferences.html
+++ b/searx/templates/default/preferences.html
@@ -97,7 +97,7 @@
<td>{{ search_engine.name }} ({{ shortcuts[search_engine.name] }})&lrm;</td>
<td>{{ _(categ) }}</td>
<td class="engine_checkbox">
- <input type="checkbox" id="engine_{{ categ|replace(' ', '_') }}_{{ search_engine.name|replace(' ', '_') }}" name="engine_{{ search_engine.name }}__{{ categ }}"{% if (search_engine.name, categ) in blocked_engines %} checked="checked"{% endif %} />
+ <input type="checkbox" id="engine_{{ categ|replace(' ', '_') }}_{{ search_engine.name|replace(' ', '_') }}" name="engine_{{ search_engine.name }}__{{ categ }}"{% if (search_engine.name, categ) in disabled_engines %} checked="checked"{% endif %} />
<label class="allow" for="engine_{{ categ|replace(' ', '_') }}_{{ search_engine.name|replace(' ', '_') }}">{{ _('Allow') }}</label>
<label class="deny" for="engine_{{ categ|replace(' ', '_') }}_{{ search_engine.name|replace(' ', '_') }}">{{ _('Block') }}</label>
</td>
diff --git a/searx/templates/oscar/base.html b/searx/templates/oscar/base.html
index 4813fffc2..f63025ecc 100644
--- a/searx/templates/oscar/base.html
+++ b/searx/templates/oscar/base.html
@@ -86,5 +86,8 @@
{% for script in scripts %}
<script src="{{ url_for('static', filename=script) }}"></script>
{% endfor %}
+ <script type="text/javascript">
+ $(function() { $('a[data-toggle="modal"]').attr('href', '#'); });
+ </script>
</body>
</html>
diff --git a/searx/templates/oscar/preferences.html b/searx/templates/oscar/preferences.html
index c677a0c66..a2c493a02 100644
--- a/searx/templates/oscar/preferences.html
+++ b/searx/templates/oscar/preferences.html
@@ -157,7 +157,7 @@
{% if not search_engine.private %}
<tr>
{% if not rtl %}
- <td>{{ checkbox_toggle('engine_' + search_engine.name|replace(' ', '_') + '__' + categ|replace(' ', '_'), (search_engine.name, categ) in blocked_engines) }}</td>
+ <td>{{ checkbox_toggle('engine_' + search_engine.name|replace(' ', '_') + '__' + categ|replace(' ', '_'), (search_engine.name, categ) in disabled_engines) }}</td>
<th>{{ search_engine.name }}</th>
<td>{{ shortcuts[search_engine.name] }}</td>
<td><input type="checkbox" {{ "checked" if search_engine.safesearch==True else ""}} readonly="readonly" disabled="disabled"></td>
@@ -169,7 +169,7 @@
<td><input type="checkbox" {{ "checked" if search_engine.safesearch==True else ""}} readonly="readonly" disabled="disabled"></td>
<td>{{ shortcuts[search_engine.name] }}</td>
<th>{{ search_engine.name }}</th>
- <td>{{ checkbox_toggle('engine_' + search_engine.name|replace(' ', '_') + '__' + categ|replace(' ', '_'), (search_engine.name, categ) in blocked_engines) }}</td>
+ <td>{{ checkbox_toggle('engine_' + search_engine.name|replace(' ', '_') + '__' + categ|replace(' ', '_'), (search_engine.name, categ) in disabled_engines) }}</td>
{% endif %}
</tr>
{% endif %}
diff --git a/searx/templates/pix-art/preferences.html b/searx/templates/pix-art/preferences.html
index f59497ec8..a4a6cd268 100644
--- a/searx/templates/pix-art/preferences.html
+++ b/searx/templates/pix-art/preferences.html
@@ -60,7 +60,7 @@
<tr>
<td>{{ search_engine.name }} ({{ shortcuts[search_engine.name] }})&lrm;</td>
<td class="engine_checkbox">
- <input type="checkbox" id="engine_{{ categ|replace(' ', '_') }}_{{ search_engine.name|replace(' ', '_') }}" name="engine_{{ search_engine.name }}__{{ categ }}"{% if (search_engine.name, categ) in blocked_engines %} checked="checked"{% endif %} />
+ <input type="checkbox" id="engine_{{ categ|replace(' ', '_') }}_{{ search_engine.name|replace(' ', '_') }}" name="engine_{{ search_engine.name }}__{{ categ }}"{% if (search_engine.name, categ) in disabled_engines %} checked="checked"{% endif %} />
<label class="allow" for="engine_{{ categ|replace(' ', '_') }}_{{ search_engine.name|replace(' ', '_') }}">{{ _('Allow') }}</label>
<label class="deny" for="engine_{{ categ|replace(' ', '_') }}_{{ search_engine.name|replace(' ', '_') }}">{{ _('Block') }}</label>
</td>
diff --git a/searx/utils.py b/searx/utils.py
index 506228465..b297582ef 100644
--- a/searx/utils.py
+++ b/searx/utils.py
@@ -230,26 +230,3 @@ def list_get(a_list, index, default=None):
return a_list[index]
else:
return default
-
-
-def get_blocked_engines(engines, cookies):
- if 'blocked_engines' not in cookies:
- return [(engine_name, category) for engine_name in engines
- for category in engines[engine_name].categories if engines[engine_name].disabled]
-
- blocked_engine_strings = cookies.get('blocked_engines', '').split(',')
- blocked_engines = []
-
- if not blocked_engine_strings:
- return blocked_engines
-
- for engine_string in blocked_engine_strings:
- if engine_string.find('__') > -1:
- engine, category = engine_string.split('__', 1)
- if engine in engines and category in engines[engine].categories:
- blocked_engines.append((engine, category))
- elif engine_string in engines:
- for category in engines[engine_string].categories:
- blocked_engines.append((engine_string, category))
-
- return blocked_engines
diff --git a/searx/webapp.py b/searx/webapp.py
index 66ba65a29..ed8ff1bd9 100644
--- a/searx/webapp.py
+++ b/searx/webapp.py
@@ -56,7 +56,7 @@ from searx.engines import (
from searx.utils import (
UnicodeWriter, highlight_content, html_to_text, get_themes,
get_static_files, get_result_templates, gen_useragent, dict_subset,
- prettify_url, get_blocked_engines
+ prettify_url
)
from searx.version import VERSION_STRING
from searx.languages import language_codes
@@ -64,6 +64,7 @@ from searx.search import Search
from searx.query import Query
from searx.autocomplete import searx_bang, backends as autocomplete_backends
from searx.plugins import plugins
+from searx.preferences import Preferences
# check if the pyopenssl, ndg-httpsclient, pyasn1 packages are installed.
# They are needed for SSL connection without trouble, see #298
@@ -73,7 +74,7 @@ try:
import pyasn1 # NOQA
except ImportError:
logger.critical("The pyopenssl, ndg-httpsclient, pyasn1 packages have to be installed.\n"
- "Some HTTPS connections will failed")
+ "Some HTTPS connections will fail")
static_path, templates_path, themes =\
@@ -109,8 +110,7 @@ for indice, theme in enumerate(themes):
for (dirpath, dirnames, filenames) in os.walk(theme_img_path):
global_favicons[indice].extend(filenames)
-cookie_max_age = 60 * 60 * 24 * 365 * 5 # 5 years
-
+# used when translating category names
_category_names = (gettext('files'),
gettext('general'),
gettext('music'),
@@ -129,11 +129,8 @@ outgoing_proxies = settings['outgoing'].get('proxies', None)
def get_locale():
locale = request.accept_languages.best_match(settings['locales'].keys())
- if settings['ui'].get('default_locale'):
- locale = settings['ui']['default_locale']
-
- if request.cookies.get('locale', '') in settings['locales']:
- locale = request.cookies.get('locale', '')
+ if request.preferences.get_value('locale') != '':
+ locale = request.preferences.get_value('locale')
if 'locale' in request.args\
and request.args['locale'] in settings['locales']:
@@ -222,9 +219,7 @@ def get_current_theme_name(override=None):
if override and override in themes:
return override
- theme_name = request.args.get('theme',
- request.cookies.get('theme',
- default_theme))
+ theme_name = request.args.get('theme', request.preferences.get_value('theme'))
if theme_name not in themes:
theme_name = default_theme
return theme_name
@@ -251,7 +246,7 @@ def image_proxify(url):
if url.startswith('//'):
url = 'https:' + url
- if not settings['server'].get('image_proxy') and not request.cookies.get('image_proxy'):
+ if not request.preferences.get_value('image_proxy'):
return url
hash_string = url + settings['server']['secret_key']
@@ -262,23 +257,18 @@ def image_proxify(url):
def render(template_name, override_theme=None, **kwargs):
- blocked_engines = get_blocked_engines(engines, request.cookies)
-
- autocomplete = request.cookies.get('autocomplete', settings['search']['autocomplete'])
-
- if autocomplete not in autocomplete_backends:
- autocomplete = None
+ disabled_engines = request.preferences.engines.get_disabled()
- nonblocked_categories = set(category for engine_name in engines
- for category in engines[engine_name].categories
- if (engine_name, category) not in blocked_engines)
+ enabled_categories = set(category for engine_name in engines
+ for category in engines[engine_name].categories
+ if (engine_name, category) not in disabled_engines)
if 'categories' not in kwargs:
kwargs['categories'] = ['general']
kwargs['categories'].extend(x for x in
sorted(categories.keys())
if x != 'general'
- and x in nonblocked_categories)
+ and x in enabled_categories)
if 'all_categories' not in kwargs:
kwargs['all_categories'] = ['general']
@@ -295,25 +285,24 @@ def render(template_name, override_theme=None, **kwargs):
kwargs['selected_categories'].append(c)
if not kwargs['selected_categories']:
- cookie_categories = request.cookies.get('categories', '').split(',')
+ cookie_categories = request.preferences.get_value('categories')
for ccateg in cookie_categories:
- if ccateg in categories:
- kwargs['selected_categories'].append(ccateg)
+ kwargs['selected_categories'].append(ccateg)
if not kwargs['selected_categories']:
kwargs['selected_categories'] = ['general']
if 'autocomplete' not in kwargs:
- kwargs['autocomplete'] = autocomplete
+ kwargs['autocomplete'] = request.preferences.get_value('autocomplete')
if get_locale() in rtl_locales and 'rtl' not in kwargs:
kwargs['rtl'] = True
kwargs['searx_version'] = VERSION_STRING
- kwargs['method'] = request.cookies.get('method', 'POST')
+ kwargs['method'] = request.preferences.get_value('method')
- kwargs['safesearch'] = request.cookies.get('safesearch', str(settings['search']['safe_search']))
+ kwargs['safesearch'] = str(request.preferences.get_value('safesearch'))
# override url_for function in templates
kwargs['url_for'] = url_for_theme
@@ -347,14 +336,18 @@ def render(template_name, override_theme=None, **kwargs):
@app.before_request
def pre_request():
# merge GET, POST vars
+ preferences = Preferences(themes, categories.keys(), engines, plugins)
+ preferences.parse_cookies(request.cookies)
+ request.preferences = preferences
+
request.form = dict(request.form.items())
for k, v in request.args.items():
if k not in request.form:
request.form[k] = v
request.user_plugins = []
- allowed_plugins = request.cookies.get('allowed_plugins', '').split(',')
- disabled_plugins = request.cookies.get('disabled_plugins', '').split(',')
+ allowed_plugins = preferences.plugins.get_enabled()
+ disabled_plugins = preferences.plugins.get_disabled()
for plugin in plugins:
if ((plugin.default_on and plugin.id not in disabled_plugins)
or plugin.id in allowed_plugins):
@@ -408,17 +401,21 @@ def index():
# TODO, check if timezone is calculated right
if 'publishedDate' in result:
- result['pubdate'] = result['publishedDate'].strftime('%Y-%m-%d %H:%M:%S%z')
- if result['publishedDate'].replace(tzinfo=None) >= datetime.now() - timedelta(days=1):
- timedifference = datetime.now() - result['publishedDate'].replace(tzinfo=None)
- minutes = int((timedifference.seconds / 60) % 60)
- hours = int(timedifference.seconds / 60 / 60)
- if hours == 0:
- result['publishedDate'] = gettext(u'{minutes} minute(s) ago').format(minutes=minutes)
- else:
- result['publishedDate'] = gettext(u'{hours} hour(s), {minutes} minute(s) ago').format(hours=hours, minutes=minutes) # noqa
+ try: # test if publishedDate >= 1900 (datetime module bug)
+ result['pubdate'] = result['publishedDate'].strftime('%Y-%m-%d %H:%M:%S%z')
+ except ValueError:
+ result['publishedDate'] = None
else:
- result['publishedDate'] = format_date(result['publishedDate'])
+ if result['publishedDate'].replace(tzinfo=None) >= datetime.now() - timedelta(days=1):
+ timedifference = datetime.now() - result['publishedDate'].replace(tzinfo=None)
+ minutes = int((timedifference.seconds / 60) % 60)
+ hours = int(timedifference.seconds / 60 / 60)
+ if hours == 0:
+ result['publishedDate'] = gettext(u'{minutes} minute(s) ago').format(minutes=minutes)
+ else:
+ result['publishedDate'] = gettext(u'{hours} hour(s), {minutes} minute(s) ago').format(hours=hours, minutes=minutes) # noqa
+ else:
+ result['publishedDate'] = format_date(result['publishedDate'])
if search.request_data.get('format') == 'json':
return Response(json.dumps({'query': search.query,
@@ -482,10 +479,10 @@ def autocompleter():
request_data = request.args
# set blocked engines
- blocked_engines = get_blocked_engines(engines, request.cookies)
+ disabled_engines = request.preferences.engines.get_disabled()
# parse query
- query = Query(request_data.get('q', '').encode('utf-8'), blocked_engines)
+ query = Query(request_data.get('q', '').encode('utf-8'), disabled_engines)
query.parse_query()
# check if search query is set
@@ -493,15 +490,21 @@ def autocompleter():
return '', 400
# run autocompleter
- completer = autocomplete_backends.get(request.cookies.get('autocomplete', settings['search']['autocomplete']))
+ completer = autocomplete_backends.get(request.preferences.get_value('autocomplete'))
# parse searx specific autocompleter results like !bang
raw_results = searx_bang(query)
# normal autocompletion results only appear if max 3 inner results returned
if len(raw_results) <= 3 and completer:
+ # get language from cookie
+ language = request.preferences.get_value('language')
+ if not language or language == 'all':
+ language = 'en'
+ else:
+ language = language.split('_')[0]
# run autocompletion
- raw_results.extend(completer(query.getSearchQuery()))
+ raw_results.extend(completer(query.getSearchQuery(), language))
# parse results (write :language and !engine back to result string)
results = []
@@ -522,117 +525,23 @@ def autocompleter():
@app.route('/preferences', methods=['GET', 'POST'])
def preferences():
- """Render preferences page.
-
- Settings that are going to be saved as cookies."""
- lang = None
- image_proxy = request.cookies.get('image_proxy', settings['server'].get('image_proxy'))
-
- if request.cookies.get('language')\
- and request.cookies['language'] in (x[0] for x in language_codes):
- lang = request.cookies['language']
-
- blocked_engines = []
-
- resp = make_response(redirect(urljoin(settings['server']['base_url'], url_for('index'))))
-
- if request.method == 'GET':
- blocked_engines = get_blocked_engines(engines, request.cookies)
- else: # on save
- selected_categories = []
- post_disabled_plugins = []
- locale = None
- autocomplete = ''
- method = 'POST'
- safesearch = settings['search']['safe_search']
- for pd_name, pd in request.form.items():
- if pd_name.startswith('category_'):
- category = pd_name[9:]
- if category not in categories:
- continue
- selected_categories.append(category)
- elif pd_name == 'locale' and pd in settings['locales']:
- locale = pd
- elif pd_name == 'image_proxy':
- image_proxy = pd
- elif pd_name == 'autocomplete':
- autocomplete = pd
- elif pd_name == 'language' and (pd == 'all' or
- pd in (x[0] for
- x in language_codes)):
- lang = pd
- elif pd_name == 'method':
- method = pd
- elif pd_name == 'safesearch':
- safesearch = pd
- elif pd_name.startswith('engine_'):
- if pd_name.find('__') > -1:
- # TODO fix underscore vs space
- engine_name, category = [x.replace('_', ' ') for x in
- pd_name.replace('engine_', '', 1).split('__', 1)]
- if engine_name in engines and category in engines[engine_name].categories:
- blocked_engines.append((engine_name, category))
- elif pd_name == 'theme':
- theme = pd if pd in themes else default_theme
- elif pd_name.startswith('plugin_'):
- plugin_id = pd_name.replace('plugin_', '', 1)
- if not any(plugin.id == plugin_id for plugin in plugins):
- continue
- post_disabled_plugins.append(plugin_id)
- else:
- resp.set_cookie(pd_name, pd, max_age=cookie_max_age)
+ """Render preferences page && save user preferences"""
- disabled_plugins = []
- allowed_plugins = []
- for plugin in plugins:
- if plugin.default_on:
- if plugin.id in post_disabled_plugins:
- disabled_plugins.append(plugin.id)
- elif plugin.id not in post_disabled_plugins:
- allowed_plugins.append(plugin.id)
-
- resp.set_cookie('disabled_plugins', ','.join(disabled_plugins), max_age=cookie_max_age)
-
- resp.set_cookie('allowed_plugins', ','.join(allowed_plugins), max_age=cookie_max_age)
-
- resp.set_cookie(
- 'blocked_engines', ','.join('__'.join(e) for e in blocked_engines),
- max_age=cookie_max_age
- )
-
- if locale:
- resp.set_cookie(
- 'locale', locale,
- max_age=cookie_max_age
- )
-
- if lang:
- resp.set_cookie(
- 'language', lang,
- max_age=cookie_max_age
- )
-
- if selected_categories:
- # cookie max age: 4 weeks
- resp.set_cookie(
- 'categories', ','.join(selected_categories),
- max_age=cookie_max_age
- )
-
- resp.set_cookie(
- 'autocomplete', autocomplete,
- max_age=cookie_max_age
- )
-
- resp.set_cookie('method', method, max_age=cookie_max_age)
-
- resp.set_cookie('safesearch', str(safesearch), max_age=cookie_max_age)
-
- resp.set_cookie('image_proxy', image_proxy, max_age=cookie_max_age)
-
- resp.set_cookie('theme', theme, max_age=cookie_max_age)
-
- return resp
+ # save preferences
+ if request.method == 'POST':
+ resp = make_response(redirect(urljoin(settings['server']['base_url'], url_for('index'))))
+ try:
+ request.preferences.parse_form(request.form)
+ except ValidationException:
+ # TODO use flash feature of flask
+ return resp
+ return request.preferences.save(resp)
+
+ # render preferences
+ image_proxy = request.preferences.get_value('image_proxy')
+ lang = request.preferences.get_value('language')
+ disabled_engines = request.preferences.engines.get_disabled()
+ allowed_plugins = request.preferences.plugins.get_enabled()
# stats for preferences page
stats = {}
@@ -654,17 +563,17 @@ def preferences():
return render('preferences.html',
locales=settings['locales'],
current_locale=get_locale(),
- current_language=lang or 'all',
+ current_language=lang,
image_proxy=image_proxy,
language_codes=language_codes,
engines_by_category=categories,
stats=stats,
- blocked_engines=blocked_engines,
+ disabled_engines=disabled_engines,
autocomplete_backends=autocomplete_backends,
shortcuts={y: x for x, y in engine_shortcuts.items()},
themes=themes,
plugins=plugins,
- allowed_plugins=[plugin.id for plugin in request.user_plugins],
+ allowed_plugins=allowed_plugins,
theme=get_current_theme_name())
@@ -740,7 +649,7 @@ Disallow: /preferences
def opensearch():
method = 'post'
- if request.cookies.get('method', 'POST') == 'GET':
+ if request.preferences.get_value('method') == 'GET':
method = 'get'
# chrome/chromium only supports HTTP GET....
diff --git a/tests/robot/test_basic.robot b/tests/robot/test_basic.robot
index 1b8e78fff..4a20d0ba2 100644
--- a/tests/robot/test_basic.robot
+++ b/tests/robot/test_basic.robot
@@ -42,3 +42,111 @@ Change language
Location Should Be http://localhost:11111/
Page Should Contain rólunk
Page Should Contain beállítások
+
+Change method
+ Page Should Contain about
+ Page Should Contain preferences
+ Go To http://localhost:11111/preferences
+ Select From List method GET
+ Submit Form id=search_form
+ Location Should Be http://localhost:11111/
+ Go To http://localhost:11111/preferences
+ List Selection Should Be method GET
+ Select From List method POST
+ Submit Form id=search_form
+ Location Should Be http://localhost:11111/
+ Go To http://localhost:11111/preferences
+ List Selection Should Be method POST
+
+Change theme
+ Page Should Contain about
+ Page Should Contain preferences
+ Go To http://localhost:11111/preferences
+ List Selection Should Be theme default
+ Select From List theme oscar
+ Submit Form id=search_form
+ Location Should Be http://localhost:11111/
+ Go To http://localhost:11111/preferences
+ List Selection Should Be theme oscar
+
+Change safesearch
+ Page Should Contain about
+ Page Should Contain preferences
+ Go To http://localhost:11111/preferences
+ List Selection Should Be safesearch None
+ Select From List safesearch Strict
+ Submit Form id=search_form
+ Location Should Be http://localhost:11111/
+ Go To http://localhost:11111/preferences
+ List Selection Should Be safesearch Strict
+
+Change image proxy
+ Page Should Contain about
+ Page Should Contain preferences
+ Go To http://localhost:11111/preferences
+ List Selection Should Be image_proxy Disabled
+ Select From List image_proxy Enabled
+ Submit Form id=search_form
+ Location Should Be http://localhost:11111/
+ Go To http://localhost:11111/preferences
+ List Selection Should Be image_proxy Enabled
+
+Change search language
+ Page Should Contain about
+ Page Should Contain preferences
+ Go To http://localhost:11111/preferences
+ List Selection Should Be language Automatic
+ Select From List language Turkish (Turkey) - tr_TR
+ Submit Form id=search_form
+ Location Should Be http://localhost:11111/
+ Go To http://localhost:11111/preferences
+ List Selection Should Be language Turkish (Turkey) - tr_TR
+
+Change autocomplete
+ Page Should Contain about
+ Page Should Contain preferences
+ Go To http://localhost:11111/preferences
+ List Selection Should Be autocomplete -
+ Select From List autocomplete google
+ Submit Form id=search_form
+ Location Should Be http://localhost:11111/
+ Go To http://localhost:11111/preferences
+ List Selection Should Be autocomplete google
+
+Change allowed/disabled engines
+ Page Should Contain about
+ Page Should Contain preferences
+ Go To http://localhost:11111/preferences
+ Page Should Contain Engine name
+ Element Should Contain xpath=//label[@class="deny"][@for='engine_dummy_dummy_dummy'] Block
+ Element Should Contain xpath=//label[@class="deny"][@for='engine_general_general_dummy'] Block
+ Click Element xpath=//label[@class="deny"][@for='engine_general_general_dummy']
+ Submit Form id=search_form
+ Location Should Be http://localhost:11111/
+ Page Should Contain about
+ Page Should Contain preferences
+ Go To http://localhost:11111/preferences
+ Page Should Contain Engine name
+ Element Should Contain xpath=//label[@class="deny"][@for='engine_dummy_dummy_dummy'] Block
+ Element Should Contain xpath=//label[@class="deny"][@for='engine_general_general_dummy'] \
+
+Block a plugin
+ Page Should Contain about
+ Page Should Contain preferences
+ Go To http://localhost:11111/preferences
+ List Selection Should Be theme default
+ Select From List theme oscar
+ Submit Form id=search_form
+ Location Should Be http://localhost:11111/
+ Go To http://localhost:11111/preferences
+ List Selection Should Be theme oscar
+ Page Should Contain Plugins
+ Click Link Plugins
+ Checkbox Should Not Be Selected id=plugin_HTTPS_rewrite
+ Click Element xpath=//label[@for='plugin_HTTPS_rewrite']
+ Submit Form id=search_form
+ Location Should Be http://localhost:11111/
+ Go To http://localhost:11111/preferences
+ Page Should Contain Plugins
+ Click Link Plugins
+ Checkbox Should Be Selected id=plugin_HTTPS_rewrite
diff --git a/tests/unit/engines/test_archlinux.py b/tests/unit/engines/test_archlinux.py
index 66959857a..d0009d63a 100644
--- a/tests/unit/engines/test_archlinux.py
+++ b/tests/unit/engines/test_archlinux.py
@@ -18,7 +18,7 @@ class TestArchLinuxEngine(SearxTestCase):
def test_request(self):
query = 'test_query'
dic = defaultdict(dict)
- dic['pageno'] = 0
+ dic['pageno'] = 1
dic['language'] = 'en_US'
params = archlinux.request(query, dic)
self.assertTrue('url' in params)
@@ -31,10 +31,8 @@ class TestArchLinuxEngine(SearxTestCase):
self.assertTrue(domain in params['url'])
def test_response(self):
- response = mock.Mock(text='<html></html>')
- response.search_params = {
- 'language': 'en_US'
- }
+ response = mock.Mock(text='<html></html>',
+ search_params={'language': 'en_US'})
self.assertEqual(archlinux.response(response), [])
html = """
@@ -79,18 +77,15 @@ class TestArchLinuxEngine(SearxTestCase):
expected = [
{
'title': 'ATI',
- 'url': 'https://wiki.archlinux.org/index.php/ATI',
- 'content': 'Lorem ipsum dolor sit amet'
+ 'url': 'https://wiki.archlinux.org/index.php/ATI'
},
{
'title': 'Frequently asked questions',
- 'url': 'https://wiki.archlinux.org/index.php/Frequently_asked_questions',
- 'content': 'CPUs with AMDs instruction set "AMD64"'
+ 'url': 'https://wiki.archlinux.org/index.php/Frequently_asked_questions'
},
{
'title': 'CPU frequency scaling',
- 'url': 'https://wiki.archlinux.org/index.php/CPU_frequency_scaling',
- 'content': 'ondemand for AMD and older Intel CPU'
+ 'url': 'https://wiki.archlinux.org/index.php/CPU_frequency_scaling'
}
]
diff --git a/tests/unit/engines/test_bing_images.py b/tests/unit/engines/test_bing_images.py
index 88538d8fa..9d8ec18af 100644
--- a/tests/unit/engines/test_bing_images.py
+++ b/tests/unit/engines/test_bing_images.py
@@ -38,12 +38,12 @@ class TestBingImagesEngine(SearxTestCase):
self.assertEqual(bing_images.response(response), [])
html = """
- <div class="dg_u" style="width:178px;height:144px;left:17px;top:0px">
+ <div class="dg_u" style="width:178px;height:144px;left:17px;top:0px"><div>
<a href="/images/search?q=south&amp;view=detailv2&amp;&amp;id=7E92863981CCFB89FBDD55205C742DFDA3290CF6&amp;selectedIndex=9&amp;ccid=vzvIfv5u&amp;simid=608055786735667000&amp;thid=OIP.Mbf3bc87efe6e0e476be8cc34bf6cd80eH0" ihk="OIP.Mbf3bc87efe6e0e476be8cc34bf6cd80eH0" t1="South Carolina" t2="747 x 589 &#183; 29 kB &#183; gif" t3="www.digital-topo-maps.com/county-map/south-carolina.shtml" hh="236" hw="300" m='{ns:"images",k:"5117",mid:"7E92863981CCFB89FBDD55205C742DFDA3290CF6",md5:"bf3bc87efe6e0e476be8cc34bf6cd80e",surl:"http://www.digital-topo-maps.com/county-map/south-carolina.shtml",imgurl:"http://www.digital-topo-maps.com/county-map/south-carolina-county-map.gif",tid:"OIP.Mbf3bc87efe6e0e476be8cc34bf6cd80eH0",ow:"480",docid:"608055786735667000",oh:"378",tft:"45"}' mid="7E92863981CCFB89FBDD55205C742DFDA3290CF6" h="ID=images,5117.1">
<img class="img_hid" src2="https://tse4.mm.bing.net/th?id=OIP.Mbf3bc87efe6e0e476be8cc34bf6cd80eH0&amp;w=210&amp;h=154&amp;c=7&amp;rs=1&amp;qlt=90&amp;o=4&amp;pid=1.1" style="width:210px;height:154px;" width="210" height="154">
</a>
- </div>
+ </div></div>
""" # noqa
html = html.replace('\r\n', '').replace('\n', '').replace('\r', '')
response = mock.Mock(text=html)
@@ -78,7 +78,7 @@ class TestBingImagesEngine(SearxTestCase):
self.assertEqual(len(results), 0)
html = """
- <div class="dg_u" style="width:178px;height:144px;left:17px;top:0px">
+ <div class="dg_u" style="width:178px;height:144px;left:17px;top:0px"><div>
<a href="#" ihk="HN.608003696942779811"
m="{ns:&quot;images&quot;,k:&quot;5045&quot;,
mid:&quot;659EB92C317974F34517A1CCAEBEF76A578E08DEE&quot;,
@@ -89,8 +89,8 @@ oh:&quot;238&quot;,tft:&quot;0&quot;,oi:&quot;http://www.image.url/Images/Test%2
<img src="https://tse4.mm.bing.net/th?id=HN.608003696942779811&amp;o=4&amp;pid=1.7"
style="height:144px;" width="178" height="144"/>
</a>
- </div>
- <div class="dg_u" style="width:178px;height:144px;left:17px;top:0px">
+ </div></div>
+ <div class="dg_u" style="width:178px;height:144px;left:17px;top:0px"><div>
<a href="#" ihk="HN.608003696942779811"
m="{ns:&quot;images&quot;,k:&quot;5045&quot;,
mid:&quot;659EB92C317974F34517A1CCAEBEF76A578E08DEE&quot;,
@@ -101,8 +101,8 @@ oh:&quot;238&quot;,tft:&quot;0&quot;,oi:&quot;http://www.image.url/Images/Test%2
<img src="https://tse4.mm.bing.net/th?id=HN.608003696942779811&amp;o=4&amp;pid=1.7"
style="height:144px;" width="178" height="144"/>
</a>
- </div>
- <div class="dg_u" style="width:178px;height:144px;left:17px;top:0px">
+ </div></div>
+ <div class="dg_u" style="width:178px;height:144px;left:17px;top:0px"><div>
<a href="#" ihk="HN.608003696942779811"
m="{ns:&quot;images&quot;,k:&quot;5045&quot;,
mid:&quot;659EB92C317974F34517A1CCAEBEF76A578E08DEE&quot;,
@@ -113,154 +113,10 @@ oh:&quot;238&quot;,tft:&quot;0&quot;,oi:&quot;http://www.image.url/Images/Test%2
<img src="https://tse4.mm.bing.net/th?id=HN.608003696942779811&amp;o=4&amp;pid=1.7"
style="height:144px;" width="178" height="144"/>
</a>
- </div>
- <div class="dg_u" style="width:178px;height:144px;left:17px;top:0px">
- <a href="#" ihk="HN.608003696942779811"
- m="{ns:&quot;images&quot;,k:&quot;5045&quot;,
-mid:&quot;659EB92C317974F34517A1CCAEBEF76A578E08DEE&quot;,
-surl:&quot;http://www.page.url/&quot;,imgurl:&quot;http://test.url/Test%20Query.jpg&quot;,
-oh:&quot;238&quot;,tft:&quot;0&quot;,oi:&quot;http://www.image.url/Images/Test%20Query.jpg&quot;}"
- mid="59EB92C317974F34517A1CCAEBEF76A578E08DEE" onclick="return false;"
- t1="Test Query" t2="650 x 517 · 31 kB · jpeg" t3="www.short.url" h="ID=images,5045.1">
- <img src="https://tse4.mm.bing.net/th?id=HN.608003696942779811&amp;o=4&amp;pid=1.7"
- style="height:144px;" width="178" height="144"/>
- </a>
- </div>
- <div class="dg_u" style="width:178px;height:144px;left:17px;top:0px">
- <a href="#" ihk="HN.608003696942779811"
- m="{ns:&quot;images&quot;,k:&quot;5045&quot;,
-mid:&quot;659EB92C317974F34517A1CCAEBEF76A578E08DEE&quot;,
-surl:&quot;http://www.page.url/&quot;,imgurl:&quot;http://test.url/Test%20Query.jpg&quot;,
-oh:&quot;238&quot;,tft:&quot;0&quot;,oi:&quot;http://www.image.url/Images/Test%20Query.jpg&quot;}"
- mid="59EB92C317974F34517A1CCAEBEF76A578E08DEE" onclick="return false;"
- t1="Test Query" t2="650 x 517 · 31 kB · jpeg" t3="www.short.url" h="ID=images,5045.1">
- <img src="https://tse4.mm.bing.net/th?id=HN.608003696942779811&amp;o=4&amp;pid=1.7"
- style="height:144px;" width="178" height="144"/>
- </a>
- </div>
- <div class="dg_u" style="width:178px;height:144px;left:17px;top:0px">
- <a href="#" ihk="HN.608003696942779811"
- m="{ns:&quot;images&quot;,k:&quot;5045&quot;,
-mid:&quot;659EB92C317974F34517A1CCAEBEF76A578E08DEE&quot;,
-surl:&quot;http://www.page.url/&quot;,imgurl:&quot;http://test.url/Test%20Query.jpg&quot;,
-oh:&quot;238&quot;,tft:&quot;0&quot;,oi:&quot;http://www.image.url/Images/Test%20Query.jpg&quot;}"
- mid="59EB92C317974F34517A1CCAEBEF76A578E08DEE" onclick="return false;"
- t1="Test Query" t2="650 x 517 · 31 kB · jpeg" t3="www.short.url" h="ID=images,5045.1">
- <img src="https://tse4.mm.bing.net/th?id=HN.608003696942779811&amp;o=4&amp;pid=1.7"
- style="height:144px;" width="178" height="144"/>
- </a>
- </div>
- <div class="dg_u" style="width:178px;height:144px;left:17px;top:0px">
- <a href="#" ihk="HN.608003696942779811"
- m="{ns:&quot;images&quot;,k:&quot;5045&quot;,
-mid:&quot;659EB92C317974F34517A1CCAEBEF76A578E08DEE&quot;,
-surl:&quot;http://www.page.url/&quot;,imgurl:&quot;http://test.url/Test%20Query.jpg&quot;,
-oh:&quot;238&quot;,tft:&quot;0&quot;,oi:&quot;http://www.image.url/Images/Test%20Query.jpg&quot;}"
- mid="59EB92C317974F34517A1CCAEBEF76A578E08DEE" onclick="return false;"
- t1="Test Query" t2="650 x 517 · 31 kB · jpeg" t3="www.short.url" h="ID=images,5045.1">
- <img src="https://tse4.mm.bing.net/th?id=HN.608003696942779811&amp;o=4&amp;pid=1.7"
- style="height:144px;" width="178" height="144"/>
- </a>
- </div>
- <div class="dg_u" style="width:178px;height:144px;left:17px;top:0px">
- <a href="#" ihk="HN.608003696942779811"
- m="{ns:&quot;images&quot;,k:&quot;5045&quot;,
-mid:&quot;659EB92C317974F34517A1CCAEBEF76A578E08DEE&quot;,
-surl:&quot;http://www.page.url/&quot;,imgurl:&quot;http://test.url/Test%20Query.jpg&quot;,
-oh:&quot;238&quot;,tft:&quot;0&quot;,oi:&quot;http://www.image.url/Images/Test%20Query.jpg&quot;}"
- mid="59EB92C317974F34517A1CCAEBEF76A578E08DEE" onclick="return false;"
- t1="Test Query" t2="650 x 517 · 31 kB · jpeg" t3="www.short.url" h="ID=images,5045.1">
- <img src="https://tse4.mm.bing.net/th?id=HN.608003696942779811&amp;o=4&amp;pid=1.7"
- style="height:144px;" width="178" height="144"/>
- </a>
- </div>
- <div class="dg_u" style="width:178px;height:144px;left:17px;top:0px">
- <a href="#" ihk="HN.608003696942779811"
- m="{ns:&quot;images&quot;,k:&quot;5045&quot;,
-mid:&quot;659EB92C317974F34517A1CCAEBEF76A578E08DEE&quot;,
-surl:&quot;http://www.page.url/&quot;,imgurl:&quot;http://test.url/Test%20Query.jpg&quot;,
-oh:&quot;238&quot;,tft:&quot;0&quot;,oi:&quot;http://www.image.url/Images/Test%20Query.jpg&quot;}"
- mid="59EB92C317974F34517A1CCAEBEF76A578E08DEE" onclick="return false;"
- t1="Test Query" t2="650 x 517 · 31 kB · jpeg" t3="www.short.url" h="ID=images,5045.1">
- <img src="https://tse4.mm.bing.net/th?id=HN.608003696942779811&amp;o=4&amp;pid=1.7"
- style="height:144px;" width="178" height="144"/>
- </a>
- </div>
- <div class="dg_u" style="width:178px;height:144px;left:17px;top:0px">
- <a href="#" ihk="HN.608003696942779811"
- m="{ns:&quot;images&quot;,k:&quot;5045&quot;,
-mid:&quot;659EB92C317974F34517A1CCAEBEF76A578E08DEE&quot;,
-surl:&quot;http://www.page.url/&quot;,imgurl:&quot;http://test.url/Test%20Query.jpg&quot;,
-oh:&quot;238&quot;,tft:&quot;0&quot;,oi:&quot;http://www.image.url/Images/Test%20Query.jpg&quot;}"
- mid="59EB92C317974F34517A1CCAEBEF76A578E08DEE" onclick="return false;"
- t1="Test Query" t2="650 x 517 · 31 kB · jpeg" t3="www.short.url" h="ID=images,5045.1">
- <img src="https://tse4.mm.bing.net/th?id=HN.608003696942779811&amp;o=4&amp;pid=1.7"
- style="height:144px;" width="178" height="144"/>
- </a>
- </div>
- <div class="dg_u" style="width:178px;height:144px;left:17px;top:0px">
- <a href="#" ihk="HN.608003696942779811"
- m="{ns:&quot;images&quot;,k:&quot;5045&quot;,
-mid:&quot;659EB92C317974F34517A1CCAEBEF76A578E08DEE&quot;,
-surl:&quot;http://www.page.url/&quot;,imgurl:&quot;http://test.url/Test%20Query.jpg&quot;,
-oh:&quot;238&quot;,tft:&quot;0&quot;,oi:&quot;http://www.image.url/Images/Test%20Query.jpg&quot;}"
- mid="59EB92C317974F34517A1CCAEBEF76A578E08DEE" onclick="return false;"
- t1="Test Query" t2="650 x 517 · 31 kB · jpeg" t3="www.short.url" h="ID=images,5045.1">
- <img src="https://tse4.mm.bing.net/th?id=HN.608003696942779811&amp;o=4&amp;pid=1.7"
- style="height:144px;" width="178" height="144"/>
- </a>
- </div>
- <div class="dg_u" style="width:178px;height:144px;left:17px;top:0px">
- <a href="#" ihk="HN.608003696942779811"
- m="{ns:&quot;images&quot;,k:&quot;5045&quot;,
-mid:&quot;659EB92C317974F34517A1CCAEBEF76A578E08DEE&quot;,
-surl:&quot;http://www.page.url/&quot;,imgurl:&quot;http://test.url/Test%20Query.jpg&quot;,
-oh:&quot;238&quot;,tft:&quot;0&quot;,oi:&quot;http://www.image.url/Images/Test%20Query.jpg&quot;}"
- mid="59EB92C317974F34517A1CCAEBEF76A578E08DEE" onclick="return false;"
- t1="Test Query" t2="650 x 517 · 31 kB · jpeg" t3="www.short.url" h="ID=images,5045.1">
- <img src="https://tse4.mm.bing.net/th?id=HN.608003696942779811&amp;o=4&amp;pid=1.7"
- style="height:144px;" width="178" height="144"/>
- </a>
- </div>
- <div class="dg_u" style="width:178px;height:144px;left:17px;top:0px">
- <a href="#" ihk="HN.608003696942779811"
- m="{ns:&quot;images&quot;,k:&quot;5045&quot;,
-mid:&quot;659EB92C317974F34517A1CCAEBEF76A578E08DEE&quot;,
-surl:&quot;http://www.page.url/&quot;,imgurl:&quot;http://test.url/Test%20Query.jpg&quot;,
-oh:&quot;238&quot;,tft:&quot;0&quot;,oi:&quot;http://www.image.url/Images/Test%20Query.jpg&quot;}"
- mid="59EB92C317974F34517A1CCAEBEF76A578E08DEE" onclick="return false;"
- t1="Test Query" t2="650 x 517 · 31 kB · jpeg" t3="www.short.url" h="ID=images,5045.1">
- <img src="https://tse4.mm.bing.net/th?id=HN.608003696942779811&amp;o=4&amp;pid=1.7"
- style="height:144px;" width="178" height="144"/>
- </a>
- </div>
- <div class="dg_u" style="width:178px;height:144px;left:17px;top:0px">
- <a href="#" ihk="HN.608003696942779811"
- m="{ns:&quot;images&quot;,k:&quot;5045&quot;,
-mid:&quot;659EB92C317974F34517A1CCAEBEF76A578E08DEE&quot;,
-surl:&quot;http://www.page.url/&quot;,imgurl:&quot;http://test.url/Test%20Query.jpg&quot;,
-oh:&quot;238&quot;,tft:&quot;0&quot;,oi:&quot;http://www.image.url/Images/Test%20Query.jpg&quot;}"
- mid="59EB92C317974F34517A1CCAEBEF76A578E08DEE" onclick="return false;"
- t1="Test Query" t2="650 x 517 · 31 kB · jpeg" t3="www.short.url" h="ID=images,5045.1">
- <img src="https://tse4.mm.bing.net/th?id=HN.608003696942779811&amp;o=4&amp;pid=1.7"
- style="height:144px;" width="178" height="144"/>
- </a>
- </div>
- <div class="dg_u" style="width:178px;height:144px;left:17px;top:0px">
- <a href="#" ihk="HN.608003696942779811"
- m="{ns:&quot;images&quot;,k:&quot;5045&quot;,
-mid:&quot;659EB92C317974F34517A1CCAEBEF76A578E08DEE&quot;,
-surl:&quot;http://www.page.url/&quot;,imgurl:&quot;http://test.url/Test%20Query.jpg&quot;,
-oh:&quot;238&quot;,tft:&quot;0&quot;,oi:&quot;http://www.image.url/Images/Test%20Query.jpg&quot;}"
- mid="59EB92C317974F34517A1CCAEBEF76A578E08DEE" onclick="return false;"
- t1="Test Query" t2="650 x 517 · 31 kB · jpeg" t3="www.short.url" h="ID=images,5045.1">
- <img src="https://tse4.mm.bing.net/th?id=HN.608003696942779811&amp;o=4&amp;pid=1.7"
- style="height:144px;" width="178" height="144"/>
- </a>
- </div>
+ </div></div>
"""
html = html.replace('\r\n', '').replace('\n', '').replace('\r', '')
response = mock.Mock(text=html)
results = bing_images.response(response)
self.assertEqual(type(results), list)
- self.assertEqual(len(results), 10)
+ self.assertEqual(len(results), 3)
diff --git a/tests/unit/engines/test_doku.py b/tests/unit/engines/test_doku.py
new file mode 100644
index 000000000..22ddb7a7f
--- /dev/null
+++ b/tests/unit/engines/test_doku.py
@@ -0,0 +1,79 @@
+# -*- coding: utf-8 -*-
+from collections import defaultdict
+import mock
+from searx.engines import doku
+from searx.testing import SearxTestCase
+
+
+class TestDokuEngine(SearxTestCase):
+
+ def test_request(self):
+ query = 'test_query'
+ dicto = defaultdict(dict)
+ params = doku.request(query, dicto)
+ self.assertIn('url', params)
+ self.assertIn(query, params['url'])
+
+ def test_response(self):
+ self.assertRaises(AttributeError, doku.response, None)
+ self.assertRaises(AttributeError, doku.response, [])
+ self.assertRaises(AttributeError, doku.response, '')
+ self.assertRaises(AttributeError, doku.response, '[]')
+
+ response = mock.Mock(text='<html></html>')
+ self.assertEqual(doku.response(response), [])
+
+ html = u"""
+ <div class="search_quickresult">
+ <h3>Pages trouvées :</h3>
+ <ul class="search_quickhits">
+ <li> <a href="/xfconf-query" class="wikilink1" title="xfconf-query">xfconf-query</a></li>
+ </ul>
+ <div class="clearer"></div>
+ </div>
+ """
+ response = mock.Mock(text=html)
+ results = doku.response(response)
+ expected = [{'content': '', 'title': 'xfconf-query', 'url': 'http://localhost:8090/xfconf-query'}]
+ self.assertEqual(doku.response(response), expected)
+
+ html = u"""
+ <dl class="search_results">
+ <dt><a href="/xvnc?s[]=query" class="wikilink1" title="xvnc">xvnc</a>: 40 Occurrences trouvées</dt>
+ <dd>er = /usr/bin/Xvnc
+ server_args = -inetd -<strong class="search_hit">query</strong> localhost -geometry 640x480 ... er = /usr/bin/Xvnc
+ server_args = -inetd -<strong class="search_hit">query</strong> localhost -geometry 800x600 ... er = /usr/bin/Xvnc
+ server_args = -inetd -<strong class="search_hit">query</strong> localhost -geometry 1024x768 ... er = /usr/bin/Xvnc
+ server_args = -inetd -<strong class="search_hit">query</strong> localhost -geometry 1280x1024 -depth 8 -Sec</dd>
+ <dt><a href="/postfix_mysql_tls_sasl_1404?s[]=query"
+ class="wikilink1"
+ title="postfix_mysql_tls_sasl_1404">postfix_mysql_tls_sasl_1404</a>: 14 Occurrences trouvées</dt>
+ <dd>tdepasse
+ hosts = 127.0.0.1
+ dbname = postfix
+ <strong class="search_hit">query</strong> = SELECT goto FROM alias WHERE address='%s' AND a... tdepasse
+ hosts = 127.0.0.1
+ dbname = postfix
+ <strong class="search_hit">query</strong> = SELECT domain FROM domain WHERE domain='%s'
+ #optional <strong class="search_hit">query</strong> to use when relaying for backup MX
+ #<strong class="search_hit">query</strong> = SELECT domain FROM domain WHERE domain='%s' and backupmx =</dd>
+ <dt><a href="/bind9?s[]=query" class="wikilink1" title="bind9">bind9</a>: 12 Occurrences trouvées</dt>
+ <dd> printcmd
+;; Got answer:
+;; -&gt;&gt;HEADER&lt;&lt;- opcode: <strong class="search_hit">QUERY</strong>, status: NOERROR, id: 13427
+;; flags: qr aa rd ra; <strong class="search_hit">QUERY</strong>: 1, ANSWER: 1, AUTHORITY: 1, ADDITIONAL: 1
+
+[...]
+
+;; <strong class="search_hit">Query</strong> time: 1 msec
+;; SERVER: 127.0.0.1#53(127.0.0.1)
+;... par la requête (<strong class="search_hit">Query</strong> time) , entre la première et la deuxième requête.</dd>
+ </dl>
+ """
+ response = mock.Mock(text=html)
+ results = doku.response(response)
+ self.assertEqual(type(results), list)
+ self.assertEqual(len(results), 3)
+ self.assertEqual(results[0]['title'], 'xvnc')
+# FIXME self.assertEqual(results[0]['url'], u'http://this.should.be.the.link/ű')
+# FIXME self.assertEqual(results[0]['content'], 'This should be the content.')
diff --git a/tests/unit/engines/test_google_images.py b/tests/unit/engines/test_google_images.py
index 876d0af1e..5f184e00c 100644
--- a/tests/unit/engines/test_google_images.py
+++ b/tests/unit/engines/test_google_images.py
@@ -41,7 +41,7 @@ class TestGoogleImagesEngine(SearxTestCase):
</div>
</a>
<div class="rg_meta">
- {"id":"bQWQ9wz9loJmjM:","isu":"clker.com","ity":"png","md":"/search?tbs\u003dsbi:AMhZZit7u1mHyop9pQisu-5idR-8W_1Itvwc3afChmsjQYPx_1yYMzBvUZgtkcGoojqekKZ-6n_1rjX9ySH0OWA_1eO5OijFY6BBDw_1GApr6xxb1bXJcBcj-DiguMoXWW7cZSG7MRQbwnI5SoDZNXcv_1xGszy886I7NVb_1oRKSliTHtzqbXAxhvYreM","msu":"/search?q\u003dsouth\u0026biw\u003d1364\u0026bih\u003d235\u0026tbm\u003disch\u0026tbs\u003dsimg:CAQSEgltBZD3DP2WgiG-U42R4G0RFw","oh":598,"os":"13KB","ow":504,"pt":"South Arrow Clip Art at Clker.com - vector clip art online ...","rid":"vlONkeBtERfDuM","s":"Download this image as:","sc":1,"si":"/search?q\u003dsouth\u0026biw\u003d1364\u0026bih\u003d235\u0026tbm\u003disch\u0026tbs\u003dsimg:CAESEgltBZD3DP2WgiG-U42R4G0RFw","th":245,"tu":"https://thumbnail.url/","tw":206}
+ {"id":"bQWQ9wz9loJmjM:","isu":"clker.com","ity":"png","md":"/search?tbs\u003dsbi:AMhZZit7u1mHyop9pQisu-5idR-8W_1Itvwc3afChmsjQYPx_1yYMzBvUZgtkcGoojqekKZ-6n_1rjX9ySH0OWA_1eO5OijFY6BBDw_1GApr6xxb1bXJcBcj-DiguMoXWW7cZSG7MRQbwnI5SoDZNXcv_1xGszy886I7NVb_1oRKSliTHtzqbXAxhvYreM","msu":"/search?q\u003dsouth\u0026biw\u003d1364\u0026bih\u003d235\u0026tbm\u003disch\u0026tbs\u003dsimg:CAQSEgltBZD3DP2WgiG-U42R4G0RFw","oh":598,"os":"13KB","ow":504,"pt":"South Arrow Clip Art at Clker.com - vector clip art online ...","rid":"vlONkeBtERfDuM","s":"Download this image as:","sc":1,"si":"/search?q\u003dsouth\u0026biw\u003d1364\u0026bih\u003d235\u0026tbm\u003disch\u0026tbs\u003dsimg:CAESEgltBZD3DP2WgiG-U42R4G0RFw","th":245,"tu":"https://thumbnail.url/","tw":206,"ru":"a","ou":"b"}
</div>
</div><!--n--><!--m-->
</div>
@@ -52,7 +52,7 @@ class TestGoogleImagesEngine(SearxTestCase):
self.assertEqual(type(results), list)
self.assertEqual(len(results), 1)
self.assertEqual(results[0]['title'], u'South Arrow Clip Art at Clker.com - vector clip art online ...')
- self.assertEqual(results[0]['url'], 'http://www.clker.com/clipart-south-arrow.html')
+ self.assertEqual(results[0]['url'], 'a')
self.assertEqual(results[0]['thumbnail_src'], 'https://thumbnail.url/')
- self.assertEqual(results[0]['img_src'], 'http://www.clker.com/cliparts/H/X/l/b/0/0/south-arrow-hi.png')
+ self.assertEqual(results[0]['img_src'], 'b')
self.assertEqual(results[0]['content'], 'Download this image as:')
diff --git a/tests/unit/test_preferences.py b/tests/unit/test_preferences.py
new file mode 100644
index 000000000..e418c0af4
--- /dev/null
+++ b/tests/unit/test_preferences.py
@@ -0,0 +1,101 @@
+from searx.preferences import (EnumStringSetting, MapSetting, MissingArgumentException,
+ MultipleChoiceSetting, PluginsSetting, ValidationException)
+from searx.testing import SearxTestCase
+
+
+class PluginStub(object):
+ def __init__(self, id, default_on):
+ self.id = id
+ self.default_on = default_on
+
+
+class TestSettings(SearxTestCase):
+ # map settings
+ def test_map_setting_invalid_initialization(self):
+ with self.assertRaises(MissingArgumentException):
+ setting = MapSetting(3, wrong_argument={'0': 0})
+
+ def test_map_setting_invalid_default_value(self):
+ with self.assertRaises(ValidationException):
+ setting = MapSetting(3, map={'dog': 1, 'bat': 2})
+
+ def test_map_setting_invalid_choice(self):
+ setting = MapSetting(2, map={'dog': 1, 'bat': 2})
+ with self.assertRaises(ValidationException):
+ setting.parse('cat')
+
+ def test_map_setting_valid_default(self):
+ setting = MapSetting(3, map={'dog': 1, 'bat': 2, 'cat': 3})
+ self.assertEquals(setting.get_value(), 3)
+
+ def test_map_setting_valid_choice(self):
+ setting = MapSetting(3, map={'dog': 1, 'bat': 2, 'cat': 3})
+ self.assertEquals(setting.get_value(), 3)
+ setting.parse('bat')
+ self.assertEquals(setting.get_value(), 2)
+
+ def test_enum_setting_invalid_initialization(self):
+ with self.assertRaises(MissingArgumentException):
+ setting = EnumStringSetting('cat', wrong_argument=[0, 1, 2])
+
+ # enum settings
+ def test_enum_setting_invalid_initialization(self):
+ with self.assertRaises(MissingArgumentException):
+ setting = EnumStringSetting('cat', wrong_argument=[0, 1, 2])
+
+ def test_enum_setting_invalid_default_value(self):
+ with self.assertRaises(ValidationException):
+ setting = EnumStringSetting(3, choices=[0, 1, 2])
+
+ def test_enum_setting_invalid_choice(self):
+ setting = EnumStringSetting(0, choices=[0, 1, 2])
+ with self.assertRaises(ValidationException):
+ setting.parse(3)
+
+ def test_enum_setting_valid_default(self):
+ setting = EnumStringSetting(3, choices=[1, 2, 3])
+ self.assertEquals(setting.get_value(), 3)
+
+ def test_enum_setting_valid_choice(self):
+ setting = EnumStringSetting(3, choices=[1, 2, 3])
+ self.assertEquals(setting.get_value(), 3)
+ setting.parse(2)
+ self.assertEquals(setting.get_value(), 2)
+
+ # multiple choice settings
+ def test_multiple_setting_invalid_initialization(self):
+ with self.assertRaises(MissingArgumentException):
+ setting = MultipleChoiceSetting(['2'], wrong_argument=['0', '1', '2'])
+
+ def test_multiple_setting_invalid_default_value(self):
+ with self.assertRaises(ValidationException):
+ setting = MultipleChoiceSetting(['3', '4'], choices=['0', '1', '2'])
+
+ def test_multiple_setting_invalid_choice(self):
+ setting = MultipleChoiceSetting(['1', '2'], choices=['0', '1', '2'])
+ with self.assertRaises(ValidationException):
+ setting.parse('4, 3')
+
+ def test_multiple_setting_valid_default(self):
+ setting = MultipleChoiceSetting(['3'], choices=['1', '2', '3'])
+ self.assertEquals(setting.get_value(), ['3'])
+
+ def test_multiple_setting_valid_choice(self):
+ setting = MultipleChoiceSetting(['3'], choices=['1', '2', '3'])
+ self.assertEquals(setting.get_value(), ['3'])
+ setting.parse('2')
+ self.assertEquals(setting.get_value(), ['2'])
+
+ # plugins settings
+ def test_plugins_setting_all_default_enabled(self):
+ plugin1 = PluginStub('plugin1', True)
+ plugin2 = PluginStub('plugin2', True)
+ setting = PluginsSetting(['3'], choices=[plugin1, plugin2])
+ self.assertEquals(setting.get_enabled(), set(['plugin1', 'plugin2']))
+
+ def test_plugins_setting_few_default_enabled(self):
+ plugin1 = PluginStub('plugin1', True)
+ plugin2 = PluginStub('plugin2', False)
+ plugin3 = PluginStub('plugin3', True)
+ setting = PluginsSetting('name', choices=[plugin1, plugin2, plugin3])
+ self.assertEquals(setting.get_enabled(), set(['plugin1', 'plugin3']))
diff --git a/tests/unit/test_webapp.py b/tests/unit/test_webapp.py
index 071c01df3..5697017d9 100644
--- a/tests/unit/test_webapp.py
+++ b/tests/unit/test_webapp.py
@@ -12,7 +12,6 @@ class ViewsTestCase(SearxTestCase):
def setUp(self):
webapp.app.config['TESTING'] = True # to get better error messages
self.app = webapp.app.test_client()
- webapp.default_theme = 'default'
# set some defaults
self.test_results = [
@@ -43,6 +42,11 @@ class ViewsTestCase(SearxTestCase):
webapp.Search.search = search_mock
+ def get_current_theme_name_mock(override=None):
+ return 'default'
+
+ webapp.get_current_theme_name = get_current_theme_name_mock
+
self.maxDiff = None # to see full diffs
def test_index_empty(self):