diff options
| author | Cqoicebordel <Cqoicebordel@users.noreply.github.com> | 2015-06-02 20:36:58 +0200 |
|---|---|---|
| committer | Cqoicebordel <Cqoicebordel@users.noreply.github.com> | 2015-06-02 20:36:58 +0200 |
| commit | f05087b93ac1ebef3bdacd353524bac0d8041832 (patch) | |
| tree | 41d3d6901cc969f216c485811b7f19baf62212ff /searx/engines | |
| parent | 884eeb8541e0a4cf3d65c2a17e1c2f788cab7fb1 (diff) | |
Refactor
Use only one engine for the four search from Qwant
Diffstat (limited to 'searx/engines')
| -rw-r--r-- | searx/engines/qwant.py | 38 | ||||
| -rw-r--r-- | searx/engines/qwant_images.py | 70 | ||||
| -rw-r--r-- | searx/engines/qwant_news.py | 69 | ||||
| -rw-r--r-- | searx/engines/qwant_social.py | 69 |
4 files changed, 30 insertions, 216 deletions
diff --git a/searx/engines/qwant.py b/searx/engines/qwant.py index 91c12a19e..38bafb043 100644 --- a/searx/engines/qwant.py +++ b/searx/engines/qwant.py @@ -1,5 +1,5 @@ """ - Qwant (Web) + Qwant (Web, Images, News, Social) @website https://qwant.com/ @provide-api not officially (https://api.qwant.com/api/search/) @@ -12,21 +12,25 @@ from urllib import urlencode from json import loads +from datetime import datetime # engine dependent config -categories = ['general'] +categories = None paging = True language_support = True +search_url_keyword = None + # search-url -url = 'https://api.qwant.com/api/search/web?count=10&offset={offset}&f=&{query}' +url = 'https://api.qwant.com/api/search/{keyword}?count=10&offset={offset}&f=&{query}' # do search-request def request(query, params): offset = (params['pageno'] - 1) * 10 - params['url'] = url.format(query=urlencode({'q': query}), + params['url'] = url.format(keyword=search_url_keyword, + query=urlencode({'q': query}), offset=offset) # add language tag if specified @@ -57,10 +61,28 @@ def response(resp): res_url = result['url'] content = result['desc'] - # append result - results.append({'title': title, - 'content': content, - 'url': res_url}) + if search_url_keyword == 'web': + results.append({'title': title, + 'content': content, + 'url': res_url}) + + elif search_url_keyword == 'images': + thumbnail_src = result['thumbnail'] + img_src = result['media'] + results.append({'template': 'images.html', + 'url': res_url, + 'title': title, + 'content': '', + 'thumbnail_src': thumbnail_src, + 'img_src': img_src}) + + elif search_url_keyword == 'news' or search_url_keyword == 'social': + published_date = datetime.fromtimestamp(result['date'], None) + + results.append({'url': res_url, + 'title': title, + 'publishedDate': published_date, + 'content': content}) # return results return results diff --git a/searx/engines/qwant_images.py b/searx/engines/qwant_images.py deleted file mode 100644 index 1c1753389..000000000 --- a/searx/engines/qwant_images.py +++ /dev/null @@ -1,70 +0,0 @@ -""" - Qwant (Images) - - @website https://qwant.com/ - @provide-api not officially (https://api.qwant.com/api/search/) - - @using-api yes - @results JSON - @stable yes - @parse url, title, content -""" - -from urllib import urlencode -from json import loads - -# engine dependent config -categories = ['images'] -paging = True -language_support = True - -# search-url -url = 'https://api.qwant.com/api/search/images?count=10&offset={offset}&f=&{query}' - - -# do search-request -def request(query, params): - offset = (params['pageno'] - 1) * 10 - - params['url'] = url.format(query=urlencode({'q': query}), - offset=offset) - - # add language tag if specified - if params['language'] != 'all': - params['url'] += '&locale=' + params['language'].lower() - - return params - - -# get response from search-request -def response(resp): - results = [] - - search_results = loads(resp.text) - - # return empty array if there are no results - if 'data' not in search_results: - return [] - - data = search_results.get('data', {}) - - res = data.get('result', {}) - - # parse results - for result in res.get('items', {}): - - title = result['title'] - res_url = result['url'] - thumbnail_src = result['thumbnail'] - img_src = result['media'] - - # append result - results.append({'template': 'images.html', - 'url': res_url, - 'title': title, - 'content': '', - 'thumbnail_src': thumbnail_src, - 'img_src': img_src}) - - # return results - return results diff --git a/searx/engines/qwant_news.py b/searx/engines/qwant_news.py deleted file mode 100644 index c4d5be5d3..000000000 --- a/searx/engines/qwant_news.py +++ /dev/null @@ -1,69 +0,0 @@ -""" - Qwant (News) - - @website https://qwant.com/ - @provide-api not officially (https://api.qwant.com/api/search/) - - @using-api yes - @results JSON - @stable yes - @parse url, title, content -""" - -from urllib import urlencode -from json import loads -from datetime import datetime - -# engine dependent config -categories = ['news'] -paging = True -language_support = True - -# search-url -url = 'https://api.qwant.com/api/search/news?count=10&offset={offset}&f=&{query}' - - -# do search-request -def request(query, params): - offset = (params['pageno'] - 1) * 10 - - params['url'] = url.format(query=urlencode({'q': query}), - offset=offset) - - # add language tag if specified - if params['language'] != 'all': - params['url'] += '&locale=' + params['language'].lower() - - return params - - -# get response from search-request -def response(resp): - results = [] - - search_results = loads(resp.text) - - # return empty array if there are no results - if 'data' not in search_results: - return [] - - data = search_results.get('data', {}) - - res = data.get('result', {}) - - # parse results - for result in res.get('items', {}): - - title = result['title'] - res_url = result['url'] - content = result['desc'] - published_date = datetime.fromtimestamp(result['date'], None) - - # append result - results.append({'url': res_url, - 'title': title, - 'publishedDate': published_date, - 'content': content}) - - # return results - return results diff --git a/searx/engines/qwant_social.py b/searx/engines/qwant_social.py deleted file mode 100644 index 474dfac02..000000000 --- a/searx/engines/qwant_social.py +++ /dev/null @@ -1,69 +0,0 @@ -""" - Qwant (social media) - - @website https://qwant.com/ - @provide-api not officially (https://api.qwant.com/api/search/) - - @using-api yes - @results JSON - @stable yes - @parse url, title, content -""" - -from urllib import urlencode -from json import loads -from datetime import datetime - -# engine dependent config -categories = ['social media'] -paging = True -language_support = True - -# search-url -url = 'https://api.qwant.com/api/search/social?count=10&offset={offset}&f=&{query}' - - -# do search-request -def request(query, params): - offset = (params['pageno'] - 1) * 10 - - params['url'] = url.format(query=urlencode({'q': query}), - offset=offset) - - # add language tag if specified - if params['language'] != 'all': - params['url'] += '&locale=' + params['language'].lower() - - return params - - -# get response from search-request -def response(resp): - results = [] - - search_results = loads(resp.text) - - # return empty array if there are no results - if 'data' not in search_results: - return [] - - data = search_results.get('data', {}) - - res = data.get('result', {}) - - # parse results - for result in res.get('items', {}): - - title = result['title'] - res_url = result['url'] - content = result['desc'] - published_date = datetime.fromtimestamp(result['date'], None) - - # append result - results.append({'url': res_url, - 'title': title, - 'content': content, - 'publishedDate': published_date}) - - # return results - return results |