summaryrefslogtreecommitdiff
path: root/searx/engines
diff options
context:
space:
mode:
authorCqoicebordel <Cqoicebordel@users.noreply.github.com>2015-06-01 00:00:32 +0200
committerCqoicebordel <Cqoicebordel@users.noreply.github.com>2015-06-01 00:00:32 +0200
commit884eeb8541e0a4cf3d65c2a17e1c2f788cab7fb1 (patch)
treea3179b4a58b34bde03e2a140c5511567008af3a9 /searx/engines
parentf965c978222cf48e8dd4b7dd6c9a28ccca9bc62f (diff)
New Qwant engines
- Web - Images - News - Social media
Diffstat (limited to 'searx/engines')
-rw-r--r--searx/engines/qwant.py66
-rw-r--r--searx/engines/qwant_images.py70
-rw-r--r--searx/engines/qwant_news.py69
-rw-r--r--searx/engines/qwant_social.py69
4 files changed, 274 insertions, 0 deletions
diff --git a/searx/engines/qwant.py b/searx/engines/qwant.py
new file mode 100644
index 000000000..91c12a19e
--- /dev/null
+++ b/searx/engines/qwant.py
@@ -0,0 +1,66 @@
+"""
+ Qwant (Web)
+
+ @website https://qwant.com/
+ @provide-api not officially (https://api.qwant.com/api/search/)
+
+ @using-api yes
+ @results JSON
+ @stable yes
+ @parse url, title, content
+"""
+
+from urllib import urlencode
+from json import loads
+
+# engine dependent config
+categories = ['general']
+paging = True
+language_support = True
+
+# search-url
+url = 'https://api.qwant.com/api/search/web?count=10&offset={offset}&f=&{query}'
+
+
+# do search-request
+def request(query, params):
+ offset = (params['pageno'] - 1) * 10
+
+ params['url'] = url.format(query=urlencode({'q': query}),
+ offset=offset)
+
+ # add language tag if specified
+ if params['language'] != 'all':
+ params['url'] += '&locale=' + params['language'].lower()
+
+ return params
+
+
+# get response from search-request
+def response(resp):
+ results = []
+
+ search_results = loads(resp.text)
+
+ # return empty array if there are no results
+ if 'data' not in search_results:
+ return []
+
+ data = search_results.get('data', {})
+
+ res = data.get('result', {})
+
+ # parse results
+ for result in res.get('items', {}):
+
+ title = result['title']
+ res_url = result['url']
+ content = result['desc']
+
+ # append result
+ results.append({'title': title,
+ 'content': content,
+ 'url': res_url})
+
+ # return results
+ return results
diff --git a/searx/engines/qwant_images.py b/searx/engines/qwant_images.py
new file mode 100644
index 000000000..1c1753389
--- /dev/null
+++ b/searx/engines/qwant_images.py
@@ -0,0 +1,70 @@
+"""
+ Qwant (Images)
+
+ @website https://qwant.com/
+ @provide-api not officially (https://api.qwant.com/api/search/)
+
+ @using-api yes
+ @results JSON
+ @stable yes
+ @parse url, title, content
+"""
+
+from urllib import urlencode
+from json import loads
+
+# engine dependent config
+categories = ['images']
+paging = True
+language_support = True
+
+# search-url
+url = 'https://api.qwant.com/api/search/images?count=10&offset={offset}&f=&{query}'
+
+
+# do search-request
+def request(query, params):
+ offset = (params['pageno'] - 1) * 10
+
+ params['url'] = url.format(query=urlencode({'q': query}),
+ offset=offset)
+
+ # add language tag if specified
+ if params['language'] != 'all':
+ params['url'] += '&locale=' + params['language'].lower()
+
+ return params
+
+
+# get response from search-request
+def response(resp):
+ results = []
+
+ search_results = loads(resp.text)
+
+ # return empty array if there are no results
+ if 'data' not in search_results:
+ return []
+
+ data = search_results.get('data', {})
+
+ res = data.get('result', {})
+
+ # parse results
+ for result in res.get('items', {}):
+
+ title = result['title']
+ res_url = result['url']
+ thumbnail_src = result['thumbnail']
+ img_src = result['media']
+
+ # append result
+ results.append({'template': 'images.html',
+ 'url': res_url,
+ 'title': title,
+ 'content': '',
+ 'thumbnail_src': thumbnail_src,
+ 'img_src': img_src})
+
+ # return results
+ return results
diff --git a/searx/engines/qwant_news.py b/searx/engines/qwant_news.py
new file mode 100644
index 000000000..c4d5be5d3
--- /dev/null
+++ b/searx/engines/qwant_news.py
@@ -0,0 +1,69 @@
+"""
+ Qwant (News)
+
+ @website https://qwant.com/
+ @provide-api not officially (https://api.qwant.com/api/search/)
+
+ @using-api yes
+ @results JSON
+ @stable yes
+ @parse url, title, content
+"""
+
+from urllib import urlencode
+from json import loads
+from datetime import datetime
+
+# engine dependent config
+categories = ['news']
+paging = True
+language_support = True
+
+# search-url
+url = 'https://api.qwant.com/api/search/news?count=10&offset={offset}&f=&{query}'
+
+
+# do search-request
+def request(query, params):
+ offset = (params['pageno'] - 1) * 10
+
+ params['url'] = url.format(query=urlencode({'q': query}),
+ offset=offset)
+
+ # add language tag if specified
+ if params['language'] != 'all':
+ params['url'] += '&locale=' + params['language'].lower()
+
+ return params
+
+
+# get response from search-request
+def response(resp):
+ results = []
+
+ search_results = loads(resp.text)
+
+ # return empty array if there are no results
+ if 'data' not in search_results:
+ return []
+
+ data = search_results.get('data', {})
+
+ res = data.get('result', {})
+
+ # parse results
+ for result in res.get('items', {}):
+
+ title = result['title']
+ res_url = result['url']
+ content = result['desc']
+ published_date = datetime.fromtimestamp(result['date'], None)
+
+ # append result
+ results.append({'url': res_url,
+ 'title': title,
+ 'publishedDate': published_date,
+ 'content': content})
+
+ # return results
+ return results
diff --git a/searx/engines/qwant_social.py b/searx/engines/qwant_social.py
new file mode 100644
index 000000000..474dfac02
--- /dev/null
+++ b/searx/engines/qwant_social.py
@@ -0,0 +1,69 @@
+"""
+ Qwant (social media)
+
+ @website https://qwant.com/
+ @provide-api not officially (https://api.qwant.com/api/search/)
+
+ @using-api yes
+ @results JSON
+ @stable yes
+ @parse url, title, content
+"""
+
+from urllib import urlencode
+from json import loads
+from datetime import datetime
+
+# engine dependent config
+categories = ['social media']
+paging = True
+language_support = True
+
+# search-url
+url = 'https://api.qwant.com/api/search/social?count=10&offset={offset}&f=&{query}'
+
+
+# do search-request
+def request(query, params):
+ offset = (params['pageno'] - 1) * 10
+
+ params['url'] = url.format(query=urlencode({'q': query}),
+ offset=offset)
+
+ # add language tag if specified
+ if params['language'] != 'all':
+ params['url'] += '&locale=' + params['language'].lower()
+
+ return params
+
+
+# get response from search-request
+def response(resp):
+ results = []
+
+ search_results = loads(resp.text)
+
+ # return empty array if there are no results
+ if 'data' not in search_results:
+ return []
+
+ data = search_results.get('data', {})
+
+ res = data.get('result', {})
+
+ # parse results
+ for result in res.get('items', {}):
+
+ title = result['title']
+ res_url = result['url']
+ content = result['desc']
+ published_date = datetime.fromtimestamp(result['date'], None)
+
+ # append result
+ results.append({'url': res_url,
+ 'title': title,
+ 'content': content,
+ 'publishedDate': published_date})
+
+ # return results
+ return results