summaryrefslogtreecommitdiff
path: root/searx/engines/__init__.py
blob: 8339a13dcde564647f3d19dd9b5c8e5a36a90cea (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126

'''
searx is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.

searx is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
GNU Affero General Public License for more details.

You should have received a copy of the GNU Affero General Public License
along with searx. If not, see < http://www.gnu.org/licenses/ >.

(C) 2013- by Adam Tauber, <asciimoo@gmail.com>
'''

from os.path import realpath, dirname, splitext, join
from os import listdir
from imp import load_source
import grequests
from itertools import izip_longest, chain
from operator import itemgetter
from urlparse import urlparse
from searx import settings

engine_dir = dirname(realpath(__file__))

engines = {}

categories = {'general': []}

for filename in listdir(engine_dir):
    if filename.startswith('_') or not filename.endswith('.py'):
        continue
    modname = splitext(filename)[0]
    if modname in settings.blacklist:
        continue
    filepath = join(engine_dir, filename)
    engine = load_source(modname, filepath)
    engine.name = modname
    if not hasattr(engine, 'request') or not hasattr(engine, 'response'):
        continue
    engines[modname] = engine
    if not hasattr(engine, 'categories'):
        categories['general'].append(engine)
    else:
        for category_name in engine.categories:
            categories.setdefault(category_name, []).append(engine)

def default_request_params():
    return {'method': 'GET', 'headers': {}, 'data': {}, 'url': '', 'cookies': {}}

def make_callback(engine_name, results, callback):
    def process_callback(response, **kwargs):
        cb_res = []
        for result in callback(response):
            result['engine'] = engine_name
            cb_res.append(result)
        results[engine_name] = cb_res
    return process_callback

def search(query, request, selected_categories):
    global engines, categories
    requests = []
    results = {}
    selected_engines = []
    user_agent = request.headers.get('User-Agent', '')
    if not len(selected_categories):
        selected_categories = ['general']
    for categ in selected_categories:
        selected_engines.extend({'category': categ, 'name': x.name} for x in categories[categ])
    for selected_engine in selected_engines:
        if selected_engine['name'] not in engines:
            continue
        engine = engines[selected_engine['name']]
        request_params = default_request_params()
        request_params['headers']['User-Agent'] = user_agent
        request_params['category'] = selected_engine['category']
        request_params = engine.request(query, request_params)
        callback = make_callback(selected_engine['name'], results, engine.response)
        if request_params['method'] == 'GET':
            req = grequests.get(request_params['url']
                                ,headers=request_params['headers']
                                ,hooks=dict(response=callback)
                                ,cookies = request_params['cookies']
                                )
        else:
            req = grequests.post(request_params['url']
                                ,data=request_params['data']
                                ,headers=request_params['headers']
                                ,hooks=dict(response=callback)
                                ,cookies = request_params['cookies']
                                )
        requests.append(req)
    grequests.map(requests)
    flat_res = filter(None, chain.from_iterable(izip_longest(*results.values())))
    flat_len = len(flat_res)
    results = []
    # deduplication + scoring
    for i,res in enumerate(flat_res):
        res['parsed_url'] = urlparse(res['url'])
        score = (flat_len - i)*settings.weights.get(res['engine'], 1)
        duplicated = False
        for new_res in results:
            if res['parsed_url'].netloc == new_res['parsed_url'].netloc and\
               res['parsed_url'].path == new_res['parsed_url'].path and\
               res['parsed_url'].query == new_res['parsed_url'].query:
                duplicated = new_res
                break
        if duplicated:
            if len(res.get('content', '')) > len(duplicated.get('content', '')):
                duplicated['content'] = res['content']
            duplicated['score'] += score
            duplicated['engine'] += ', '+res['engine']
            if duplicated['parsed_url'].scheme == 'https':
                continue
            elif res['parsed_url'].scheme == 'https':
                duplicated['parsed_url'].scheme == 'https'
                duplicated['url'] = duplicated['parsed_url'].geturl()
        else:
            res['score'] = score
            results.append(res)

    return sorted(results, key=itemgetter('score'), reverse=True)