|
@@ -0,0 +1,552 @@
|
|
1
|
+"""
|
|
2
|
+searx is free software: you can redistribute it and/or modify
|
|
3
|
+it under the terms of the GNU Affero General Public License as published by
|
|
4
|
+the Free Software Foundation, either version 3 of the License, or
|
|
5
|
+(at your option) any later version.
|
|
6
|
+
|
|
7
|
+searx is distributed in the hope that it will be useful,
|
|
8
|
+but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
9
|
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
10
|
+GNU Affero General Public License for more details.
|
|
11
|
+
|
|
12
|
+You should have received a copy of the GNU Affero General Public License
|
|
13
|
+along with searx. If not, see < http://www.gnu.org/licenses/ >.
|
|
14
|
+
|
|
15
|
+(C) 2013- by Adam Tauber, <asciimoo@gmail.com>
|
|
16
|
+"""
|
|
17
|
+
|
|
18
|
+import threading
|
|
19
|
+import re
|
|
20
|
+import searx.poolrequests as requests_lib
|
|
21
|
+from itertools import izip_longest, chain
|
|
22
|
+from operator import itemgetter
|
|
23
|
+from Queue import Queue
|
|
24
|
+from time import time
|
|
25
|
+from urlparse import urlparse, unquote
|
|
26
|
+from searx import settings
|
|
27
|
+from searx.engines import engines
|
|
28
|
+
|
|
29
|
+from searx.utils import gen_useragent, prettify_url, highlight_content, html_to_text
|
|
30
|
+from searx.plugins import plugins
|
|
31
|
+from searx.query import Query
|
|
32
|
+from searx import logger
|
|
33
|
+
|
|
34
|
+logger = logger.getChild('search')
|
|
35
|
+
|
|
36
|
+number_of_searches = 0
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+def search_request_wrapper(fn, url, engine_name, **kwargs):
|
|
40
|
+ try:
|
|
41
|
+ return fn(url, **kwargs)
|
|
42
|
+ except:
|
|
43
|
+ # increase errors stats
|
|
44
|
+ engines[engine_name].stats['errors'] += 1
|
|
45
|
+
|
|
46
|
+ # print engine name and specific error message
|
|
47
|
+ logger.exception('engine crash: {0}'.format(engine_name))
|
|
48
|
+ return
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+def threaded_requests(requests):
|
|
52
|
+ timeout_limit = max(r[2]['timeout'] for r in requests)
|
|
53
|
+ search_start = time()
|
|
54
|
+ for fn, url, request_args, engine_name in requests:
|
|
55
|
+ request_args['timeout'] = timeout_limit
|
|
56
|
+ th = threading.Thread(
|
|
57
|
+ target=search_request_wrapper,
|
|
58
|
+ args=(fn, url, engine_name),
|
|
59
|
+ kwargs=request_args,
|
|
60
|
+ name='search_request',
|
|
61
|
+ )
|
|
62
|
+ th._engine_name = engine_name
|
|
63
|
+ th.start()
|
|
64
|
+
|
|
65
|
+ for th in threading.enumerate():
|
|
66
|
+ if th.name == 'search_request':
|
|
67
|
+ remaining_time = max(0.0, timeout_limit - (time() - search_start))
|
|
68
|
+ th.join(remaining_time)
|
|
69
|
+ if th.isAlive():
|
|
70
|
+ logger.warning('engine timeout: {0}'.format(th._engine_name))
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+# get default reqest parameter
|
|
74
|
+def default_request_params():
|
|
75
|
+ return {
|
|
76
|
+ 'method': 'GET',
|
|
77
|
+ 'headers': {},
|
|
78
|
+ 'data': {},
|
|
79
|
+ 'url': '',
|
|
80
|
+ 'cookies': {},
|
|
81
|
+ 'verify': True
|
|
82
|
+ }
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+# create a callback wrapper for the search engine results
|
|
86
|
+def make_callback(engine_name, results_queue, callback, params):
|
|
87
|
+ # creating a callback wrapper for the search engine results
|
|
88
|
+ def process_callback(response, **kwargs):
|
|
89
|
+ # check if redirect comparing to the True value,
|
|
90
|
+ # because resp can be a Mock object, and any attribut name returns something.
|
|
91
|
+ if response.is_redirect is True:
|
|
92
|
+ logger.debug('{0} redirect on: {1}'.format(engine_name, response))
|
|
93
|
+ return
|
|
94
|
+
|
|
95
|
+ response.search_params = params
|
|
96
|
+
|
|
97
|
+ timeout_overhead = 0.2 # seconds
|
|
98
|
+ search_duration = time() - params['started']
|
|
99
|
+ timeout_limit = engines[engine_name].timeout + timeout_overhead
|
|
100
|
+ if search_duration > timeout_limit:
|
|
101
|
+ engines[engine_name].stats['page_load_time'] += timeout_limit
|
|
102
|
+ engines[engine_name].stats['errors'] += 1
|
|
103
|
+ return
|
|
104
|
+
|
|
105
|
+ # callback
|
|
106
|
+ search_results = callback(response)
|
|
107
|
+
|
|
108
|
+ # add results
|
|
109
|
+ for result in search_results:
|
|
110
|
+ result['engine'] = engine_name
|
|
111
|
+
|
|
112
|
+ results_queue.put_nowait((engine_name, search_results))
|
|
113
|
+
|
|
114
|
+ # update stats with current page-load-time
|
|
115
|
+ engines[engine_name].stats['page_load_time'] += search_duration
|
|
116
|
+
|
|
117
|
+ return process_callback
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+# return the meaningful length of the content for a result
|
|
121
|
+def content_result_len(content):
|
|
122
|
+ if isinstance(content, basestring):
|
|
123
|
+ content = re.sub('[,;:!?\./\\\\ ()-_]', '', content)
|
|
124
|
+ return len(content)
|
|
125
|
+ else:
|
|
126
|
+ return 0
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+# score results and remove duplications
|
|
130
|
+def score_results(results):
|
|
131
|
+ # calculate scoring parameters
|
|
132
|
+ flat_res = filter(
|
|
133
|
+ None, chain.from_iterable(izip_longest(*results.values())))
|
|
134
|
+ flat_len = len(flat_res)
|
|
135
|
+ engines_len = len(results)
|
|
136
|
+
|
|
137
|
+ results = []
|
|
138
|
+
|
|
139
|
+ # pass 1: deduplication + scoring
|
|
140
|
+ for i, res in enumerate(flat_res):
|
|
141
|
+
|
|
142
|
+ res['parsed_url'] = urlparse(res['url'])
|
|
143
|
+
|
|
144
|
+ res['host'] = res['parsed_url'].netloc
|
|
145
|
+
|
|
146
|
+ if res['host'].startswith('www.'):
|
|
147
|
+ res['host'] = res['host'].replace('www.', '', 1)
|
|
148
|
+
|
|
149
|
+ res['engines'] = [res['engine']]
|
|
150
|
+
|
|
151
|
+ weight = 1.0
|
|
152
|
+
|
|
153
|
+ # strip multiple spaces and cariage returns from content
|
|
154
|
+ if res.get('content'):
|
|
155
|
+ res['content'] = re.sub(' +', ' ',
|
|
156
|
+ res['content'].strip().replace('\n', ''))
|
|
157
|
+
|
|
158
|
+ # get weight of this engine if possible
|
|
159
|
+ if hasattr(engines[res['engine']], 'weight'):
|
|
160
|
+ weight = float(engines[res['engine']].weight)
|
|
161
|
+
|
|
162
|
+ # calculate score for that engine
|
|
163
|
+ score = int((flat_len - i) / engines_len) * weight + 1
|
|
164
|
+
|
|
165
|
+ # check for duplicates
|
|
166
|
+ duplicated = False
|
|
167
|
+ for new_res in results:
|
|
168
|
+ # remove / from the end of the url if required
|
|
169
|
+ p1 = res['parsed_url'].path[:-1] \
|
|
170
|
+ if res['parsed_url'].path.endswith('/') \
|
|
171
|
+ else res['parsed_url'].path
|
|
172
|
+ p2 = new_res['parsed_url'].path[:-1] \
|
|
173
|
+ if new_res['parsed_url'].path.endswith('/') \
|
|
174
|
+ else new_res['parsed_url'].path
|
|
175
|
+
|
|
176
|
+ # check if that result is a duplicate
|
|
177
|
+ if res['host'] == new_res['host'] and unquote(p1) == unquote(p2) \
|
|
178
|
+ and res['parsed_url'].query == new_res['parsed_url'].query \
|
|
179
|
+ and res.get('template') == new_res.get('template'):
|
|
180
|
+ duplicated = new_res
|
|
181
|
+ break
|
|
182
|
+
|
|
183
|
+ # merge duplicates together
|
|
184
|
+ if duplicated:
|
|
185
|
+ # using content with more text
|
|
186
|
+ if content_result_len(res.get('content', '')) > \
|
|
187
|
+ content_result_len(duplicated.get('content', '')):
|
|
188
|
+ duplicated['content'] = res['content']
|
|
189
|
+
|
|
190
|
+ # increase result-score
|
|
191
|
+ duplicated['score'] += score
|
|
192
|
+
|
|
193
|
+ # add engine to list of result-engines
|
|
194
|
+ duplicated['engines'].append(res['engine'])
|
|
195
|
+
|
|
196
|
+ # using https if possible
|
|
197
|
+ if duplicated['parsed_url'].scheme == 'https':
|
|
198
|
+ continue
|
|
199
|
+ elif res['parsed_url'].scheme == 'https':
|
|
200
|
+ duplicated['url'] = res['parsed_url'].geturl()
|
|
201
|
+ duplicated['parsed_url'] = res['parsed_url']
|
|
202
|
+
|
|
203
|
+ # if there is no duplicate found, append result
|
|
204
|
+ else:
|
|
205
|
+ res['score'] = score
|
|
206
|
+ # if the result has no scheme, use http as default
|
|
207
|
+ if res['parsed_url'].scheme == '':
|
|
208
|
+ res['parsed_url'] = res['parsed_url']._replace(scheme="http")
|
|
209
|
+
|
|
210
|
+ results.append(res)
|
|
211
|
+
|
|
212
|
+ results = sorted(results, key=itemgetter('score'), reverse=True)
|
|
213
|
+
|
|
214
|
+ # pass 2 : group results by category and template
|
|
215
|
+ gresults = []
|
|
216
|
+ categoryPositions = {}
|
|
217
|
+
|
|
218
|
+ for i, res in enumerate(results):
|
|
219
|
+ # FIXME : handle more than one category per engine
|
|
220
|
+ category = engines[res['engine']].categories[0] + ':' + '' \
|
|
221
|
+ if 'template' not in res \
|
|
222
|
+ else res['template']
|
|
223
|
+
|
|
224
|
+ current = None if category not in categoryPositions \
|
|
225
|
+ else categoryPositions[category]
|
|
226
|
+
|
|
227
|
+ # group with previous results using the same category
|
|
228
|
+ # if the group can accept more result and is not too far
|
|
229
|
+ # from the current position
|
|
230
|
+ if current is not None and (current['count'] > 0) \
|
|
231
|
+ and (len(gresults) - current['index'] < 20):
|
|
232
|
+ # group with the previous results using
|
|
233
|
+ # the same category with this one
|
|
234
|
+ index = current['index']
|
|
235
|
+ gresults.insert(index, res)
|
|
236
|
+
|
|
237
|
+ # update every index after the current one
|
|
238
|
+ # (including the current one)
|
|
239
|
+ for k in categoryPositions:
|
|
240
|
+ v = categoryPositions[k]['index']
|
|
241
|
+ if v >= index:
|
|
242
|
+ categoryPositions[k]['index'] = v + 1
|
|
243
|
+
|
|
244
|
+ # update this category
|
|
245
|
+ current['count'] -= 1
|
|
246
|
+
|
|
247
|
+ else:
|
|
248
|
+ # same category
|
|
249
|
+ gresults.append(res)
|
|
250
|
+
|
|
251
|
+ # update categoryIndex
|
|
252
|
+ categoryPositions[category] = {'index': len(gresults), 'count': 8}
|
|
253
|
+
|
|
254
|
+ # return gresults
|
|
255
|
+ return gresults
|
|
256
|
+
|
|
257
|
+
|
|
258
|
+def merge_two_infoboxes(infobox1, infobox2):
|
|
259
|
+ if 'urls' in infobox2:
|
|
260
|
+ urls1 = infobox1.get('urls', None)
|
|
261
|
+ if urls1 is None:
|
|
262
|
+ urls1 = []
|
|
263
|
+ infobox1.set('urls', urls1)
|
|
264
|
+
|
|
265
|
+ urlSet = set()
|
|
266
|
+ for url in infobox1.get('urls', []):
|
|
267
|
+ urlSet.add(url.get('url', None))
|
|
268
|
+
|
|
269
|
+ for url in infobox2.get('urls', []):
|
|
270
|
+ if url.get('url', None) not in urlSet:
|
|
271
|
+ urls1.append(url)
|
|
272
|
+
|
|
273
|
+ if 'attributes' in infobox2:
|
|
274
|
+ attributes1 = infobox1.get('attributes', None)
|
|
275
|
+ if attributes1 is None:
|
|
276
|
+ attributes1 = []
|
|
277
|
+ infobox1.set('attributes', attributes1)
|
|
278
|
+
|
|
279
|
+ attributeSet = set()
|
|
280
|
+ for attribute in infobox1.get('attributes', []):
|
|
281
|
+ if attribute.get('label', None) not in attributeSet:
|
|
282
|
+ attributeSet.add(attribute.get('label', None))
|
|
283
|
+
|
|
284
|
+ for attribute in infobox2.get('attributes', []):
|
|
285
|
+ attributes1.append(attribute)
|
|
286
|
+
|
|
287
|
+ if 'content' in infobox2:
|
|
288
|
+ content1 = infobox1.get('content', None)
|
|
289
|
+ content2 = infobox2.get('content', '')
|
|
290
|
+ if content1 is not None:
|
|
291
|
+ if content_result_len(content2) > content_result_len(content1):
|
|
292
|
+ infobox1['content'] = content2
|
|
293
|
+ else:
|
|
294
|
+ infobox1.set('content', content2)
|
|
295
|
+
|
|
296
|
+
|
|
297
|
+def merge_infoboxes(infoboxes):
|
|
298
|
+ results = []
|
|
299
|
+ infoboxes_id = {}
|
|
300
|
+ for infobox in infoboxes:
|
|
301
|
+ add_infobox = True
|
|
302
|
+ infobox_id = infobox.get('id', None)
|
|
303
|
+ if infobox_id is not None:
|
|
304
|
+ existing_index = infoboxes_id.get(infobox_id, None)
|
|
305
|
+ if existing_index is not None:
|
|
306
|
+ merge_two_infoboxes(results[existing_index], infobox)
|
|
307
|
+ add_infobox = False
|
|
308
|
+
|
|
309
|
+ if add_infobox:
|
|
310
|
+ results.append(infobox)
|
|
311
|
+ infoboxes_id[infobox_id] = len(results) - 1
|
|
312
|
+
|
|
313
|
+ return results
|
|
314
|
+
|
|
315
|
+
|
|
316
|
+class Search(object):
|
|
317
|
+ """Search information container"""
|
|
318
|
+
|
|
319
|
+ def __init__(self, task):
|
|
320
|
+ # init vars
|
|
321
|
+ # super(SearchAPI, self).__init__()
|
|
322
|
+ self.query = None
|
|
323
|
+ self.engines = []
|
|
324
|
+ self.plugins = []
|
|
325
|
+ self.categories = []
|
|
326
|
+ self.paging = False
|
|
327
|
+ self.pageno = 1
|
|
328
|
+ self.lang = 'all'
|
|
329
|
+
|
|
330
|
+ # set blocked engines
|
|
331
|
+ self.blocked_engines = [] # get_blocked_engines(engines, request.cookies)
|
|
332
|
+
|
|
333
|
+ self.results = []
|
|
334
|
+ self.suggestions = list()
|
|
335
|
+ self.answers = list()
|
|
336
|
+ self.infoboxes = []
|
|
337
|
+ self.request_data = {}
|
|
338
|
+
|
|
339
|
+ # set specific language if set
|
|
340
|
+ if 'language' in task['settings']:
|
|
341
|
+ self.lang = task['settings']['language']
|
|
342
|
+
|
|
343
|
+ if 'plugins' in task['settings']:
|
|
344
|
+ for plugin in task['settings']['plugins']:
|
|
345
|
+ if plugin['allow']:
|
|
346
|
+ self.plugins.append(plugin)
|
|
347
|
+
|
|
348
|
+ if task['pageno']:
|
|
349
|
+ self.pageno = int(task['pageno'])
|
|
350
|
+
|
|
351
|
+ # parse query, if tags are set, which change
|
|
352
|
+ # the search engine or search-language
|
|
353
|
+ query_obj = Query(str(task['query']), self.blocked_engines)
|
|
354
|
+ query_obj.parse_query()
|
|
355
|
+
|
|
356
|
+ # set query
|
|
357
|
+ self.query = query_obj.getSearchQuery()
|
|
358
|
+
|
|
359
|
+ # get last selected language in query, if possible
|
|
360
|
+ # TODO support search with multible languages
|
|
361
|
+ if len(query_obj.languages):
|
|
362
|
+ self.lang = query_obj.languages[-1]
|
|
363
|
+
|
|
364
|
+ self.engines = query_obj.engines
|
|
365
|
+
|
|
366
|
+ self.categories = []
|
|
367
|
+
|
|
368
|
+ # if engines are calculated from query,
|
|
369
|
+ # set categories by using that informations
|
|
370
|
+ if self.engines and query_obj.specific:
|
|
371
|
+ self.categories = list(set(engine['category']
|
|
372
|
+ for engine in self.engines))
|
|
373
|
+
|
|
374
|
+ # otherwise, using defined categories to
|
|
375
|
+ # calculate which engines should be used
|
|
376
|
+ else:
|
|
377
|
+ if 'selected_categories' in task and task['selected_categories']:
|
|
378
|
+ self.categories = task['selected_categories']
|
|
379
|
+
|
|
380
|
+ # if still no category is specified, using general
|
|
381
|
+ # as default-category
|
|
382
|
+ if not self.categories:
|
|
383
|
+ self.categories = ['general']
|
|
384
|
+
|
|
385
|
+ # set categories/engines
|
|
386
|
+ # load_default_categories = True
|
|
387
|
+ for engine in task['settings']['engines']:
|
|
388
|
+ if not engine['disabled']:
|
|
389
|
+ for categ in engine['categories']:
|
|
390
|
+ if categ in self.categories:
|
|
391
|
+ self.engines.append({'category': categ,
|
|
392
|
+ 'name': engine['name']})
|
|
393
|
+
|
|
394
|
+ # do search-request
|
|
395
|
+ def search(self, task):
|
|
396
|
+ global number_of_searches
|
|
397
|
+
|
|
398
|
+ # init vars
|
|
399
|
+ requests = []
|
|
400
|
+ results_queue = Queue()
|
|
401
|
+ results = {}
|
|
402
|
+
|
|
403
|
+ # increase number of searches
|
|
404
|
+ number_of_searches += 1
|
|
405
|
+
|
|
406
|
+ # set default useragent
|
|
407
|
+ # user_agent = request.headers.get('User-Agent', '')
|
|
408
|
+ user_agent = gen_useragent()
|
|
409
|
+
|
|
410
|
+ # start search-reqest for all selected engines
|
|
411
|
+ for selected_engine in self.engines:
|
|
412
|
+ if selected_engine['name'] not in engines:
|
|
413
|
+ continue
|
|
414
|
+
|
|
415
|
+ engine = engines[selected_engine['name']]
|
|
416
|
+
|
|
417
|
+ # if paging is not supported, skip
|
|
418
|
+ if self.pageno > 1 and not engine.paging:
|
|
419
|
+ continue
|
|
420
|
+
|
|
421
|
+ # if search-language is set and engine does not
|
|
422
|
+ # provide language-support, skip
|
|
423
|
+ if self.lang != 'all' and not engine.language_support:
|
|
424
|
+ continue
|
|
425
|
+
|
|
426
|
+ # set default request parameters
|
|
427
|
+ request_params = default_request_params()
|
|
428
|
+ request_params['headers']['User-Agent'] = user_agent
|
|
429
|
+ request_params['category'] = selected_engine['category']
|
|
430
|
+ request_params['started'] = time()
|
|
431
|
+ request_params['pageno'] = self.pageno
|
|
432
|
+
|
|
433
|
+ if hasattr(engine, 'language') and engine.language:
|
|
434
|
+ request_params['language'] = engine.language
|
|
435
|
+ else:
|
|
436
|
+ request_params['language'] = self.lang
|
|
437
|
+
|
|
438
|
+ # try:
|
|
439
|
+ # 0 = None, 1 = Moderate, 2 = Strict
|
|
440
|
+ # request_params['safesearch'] = int(request.cookies.get('safesearch'))
|
|
441
|
+ # except Exception:
|
|
442
|
+ request_params['safesearch'] = settings['search']['safe_search']
|
|
443
|
+
|
|
444
|
+ # update request parameters dependent on
|
|
445
|
+ # search-engine (contained in engines folder)
|
|
446
|
+ engine.request(task['query'].encode('utf-8'), request_params)
|
|
447
|
+
|
|
448
|
+ # update request parameters dependent on
|
|
449
|
+ # search-engine (contained in engines folder)
|
|
450
|
+ if request_params['url'] is None:
|
|
451
|
+ # TODO add support of offline engines
|
|
452
|
+ pass
|
|
453
|
+
|
|
454
|
+ # create a callback wrapper for the search engine results
|
|
455
|
+ callback = make_callback(
|
|
456
|
+ selected_engine['name'],
|
|
457
|
+ results_queue,
|
|
458
|
+ engine.response,
|
|
459
|
+ request_params)
|
|
460
|
+
|
|
461
|
+ # create dictionary which contain all
|
|
462
|
+ # informations about the request
|
|
463
|
+ request_args = dict(
|
|
464
|
+ headers=request_params['headers'],
|
|
465
|
+ hooks=dict(response=callback),
|
|
466
|
+ cookies=request_params['cookies'],
|
|
467
|
+ timeout=engine.timeout,
|
|
468
|
+ verify=request_params['verify']
|
|
469
|
+ )
|
|
470
|
+
|
|
471
|
+ # specific type of request (GET or POST)
|
|
472
|
+ if request_params['method'] == 'GET':
|
|
473
|
+ req = requests_lib.get
|
|
474
|
+ else:
|
|
475
|
+ req = requests_lib.post
|
|
476
|
+ request_args['data'] = request_params['data']
|
|
477
|
+
|
|
478
|
+ # ignoring empty urls
|
|
479
|
+ if not request_params['url']:
|
|
480
|
+ continue
|
|
481
|
+
|
|
482
|
+ # append request to list
|
|
483
|
+ requests.append((req, request_params['url'],
|
|
484
|
+ request_args,
|
|
485
|
+ selected_engine['name']))
|
|
486
|
+
|
|
487
|
+ if not requests:
|
|
488
|
+ return self
|
|
489
|
+ # send all search-request
|
|
490
|
+ threaded_requests(requests)
|
|
491
|
+
|
|
492
|
+ while not results_queue.empty():
|
|
493
|
+ engine_name, engine_results = results_queue.get_nowait()
|
|
494
|
+
|
|
495
|
+ # TODO type checks
|
|
496
|
+ [self.suggestions.append(x['suggestion'])
|
|
497
|
+ for x in list(engine_results)
|
|
498
|
+ if 'suggestion' in x
|
|
499
|
+ and engine_results.remove(x) is None]
|
|
500
|
+
|
|
501
|
+ [self.answers.append(x['answer'])
|
|
502
|
+ for x in list(engine_results)
|
|
503
|
+ if 'answer' in x
|
|
504
|
+ and engine_results.remove(x) is None]
|
|
505
|
+
|
|
506
|
+ self.infoboxes.extend(x for x in list(engine_results)
|
|
507
|
+ if 'infobox' in x
|
|
508
|
+ and engine_results.remove(x) is None)
|
|
509
|
+
|
|
510
|
+ results[engine_name] = engine_results
|
|
511
|
+
|
|
512
|
+ # update engine-specific stats
|
|
513
|
+ for engine_name, engine_results in results.items():
|
|
514
|
+ engines[engine_name].stats['search_count'] += 1
|
|
515
|
+ engines[engine_name].stats['result_count'] += len(engine_results)
|
|
516
|
+
|
|
517
|
+ # score results and remove duplications
|
|
518
|
+ self.results = score_results(results)
|
|
519
|
+
|
|
520
|
+ # merge infoboxes according to their ids
|
|
521
|
+ self.infoboxes = merge_infoboxes(self.infoboxes)
|
|
522
|
+
|
|
523
|
+ # update engine stats, using calculated score
|
|
524
|
+ for result in self.results:
|
|
525
|
+ plugins.callAPI('on_result', self.plugins, locals())
|
|
526
|
+
|
|
527
|
+ for res_engine in result['engines']:
|
|
528
|
+ engines[result['engine']] \
|
|
529
|
+ .stats['score_count'] += result['score']
|
|
530
|
+
|
|
531
|
+ result['pretty_url'] = prettify_url(result['url'])
|
|
532
|
+
|
|
533
|
+ # TODO, check if timezone is calculated right
|
|
534
|
+ if 'publishedDate' in result:
|
|
535
|
+ result['pubdate'] = result['publishedDate'].strftime('%Y-%m-%d %H:%M:%S%z')
|
|
536
|
+
|
|
537
|
+ if not self.paging and engines[result['engine']].paging:
|
|
538
|
+ self.paging = True
|
|
539
|
+
|
|
540
|
+ if 'content' in result:
|
|
541
|
+ result['content_html'] = highlight_content(result['content'],
|
|
542
|
+ self.query.encode('utf-8')) # noqa
|
|
543
|
+ result['title_html'] = highlight_content(result['title'],
|
|
544
|
+ self.query.encode('utf-8'))
|
|
545
|
+
|
|
546
|
+ if result.get('content'):
|
|
547
|
+ result['content'] = html_to_text(result['content']).strip()
|
|
548
|
+ # removing html content and whitespace duplications
|
|
549
|
+ result['title'] = ' '.join(html_to_text(result['title']).strip().split())
|
|
550
|
+
|
|
551
|
+ # return results, suggestions, answers and infoboxes
|
|
552
|
+ return self
|