|
@@ -36,14 +36,53 @@ logger = logger.getChild('search')
|
36
|
36
|
number_of_searches = 0
|
37
|
37
|
|
38
|
38
|
|
39
|
|
-def search_request_wrapper(fn, url, engine_name, **kwargs):
|
40
|
|
- ret = None
|
41
|
|
- engine = engines[engine_name]
|
|
39
|
+def send_http_request(engine, request_params, timeout_limit):
|
|
40
|
+ response = None
|
42
|
41
|
try:
|
43
|
|
- ret = fn(url, **kwargs)
|
|
42
|
+ # create dictionary which contain all
|
|
43
|
+ # informations about the request
|
|
44
|
+ request_args = dict(
|
|
45
|
+ headers=request_params['headers'],
|
|
46
|
+ cookies=request_params['cookies'],
|
|
47
|
+ timeout=timeout_limit,
|
|
48
|
+ verify=request_params['verify']
|
|
49
|
+ )
|
|
50
|
+ # specific type of request (GET or POST)
|
|
51
|
+ if request_params['method'] == 'GET':
|
|
52
|
+ req = requests_lib.get
|
|
53
|
+ else:
|
|
54
|
+ req = requests_lib.post
|
|
55
|
+ request_args['data'] = request_params['data']
|
|
56
|
+
|
|
57
|
+ # for page_load_time stats
|
|
58
|
+ time_before_request = time()
|
|
59
|
+
|
|
60
|
+ # send the request
|
|
61
|
+ response = req(request_params['url'], **request_args)
|
|
62
|
+
|
44
|
63
|
with threading.RLock():
|
|
64
|
+ # no error : reset the suspend variables
|
45
|
65
|
engine.continuous_errors = 0
|
46
|
66
|
engine.suspend_end_time = 0
|
|
67
|
+ # update stats with current page-load-time
|
|
68
|
+ # only the HTTP request
|
|
69
|
+ engine.stats['page_load_time'] += time() - time_before_request
|
|
70
|
+ engine.stats['page_load_count'] += 1
|
|
71
|
+
|
|
72
|
+ # is there a timeout (no parsing in this case)
|
|
73
|
+ timeout_overhead = 0.2 # seconds
|
|
74
|
+ search_duration = time() - request_params['started']
|
|
75
|
+ if search_duration > timeout_limit + timeout_overhead:
|
|
76
|
+ logger.exception('engine timeout on HTTP request:'
|
|
77
|
+ '{0} (search duration : {1} ms, time-out: {2} )'
|
|
78
|
+ .format(engine.name, search_duration, timeout_limit))
|
|
79
|
+ with threading.RLock():
|
|
80
|
+ engine.stats['errors'] += 1
|
|
81
|
+ return False
|
|
82
|
+
|
|
83
|
+ # everything is ok : return the response
|
|
84
|
+ return response
|
|
85
|
+
|
47
|
86
|
except:
|
48
|
87
|
# increase errors stats
|
49
|
88
|
with threading.RLock():
|
|
@@ -52,20 +91,62 @@ def search_request_wrapper(fn, url, engine_name, **kwargs):
|
52
|
91
|
engine.suspend_end_time = time() + min(60, engine.continuous_errors)
|
53
|
92
|
|
54
|
93
|
# print engine name and specific error message
|
55
|
|
- logger.exception('engine crash: {0}'.format(engine_name))
|
56
|
|
- return ret
|
|
94
|
+ logger.exception('engine crash: {0}'.format(engine.name))
|
|
95
|
+ return False
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+def search_one_request(engine_name, query, request_params, result_container, timeout_limit):
|
|
99
|
+ engine = engines[engine_name]
|
|
100
|
+
|
|
101
|
+ # update request parameters dependent on
|
|
102
|
+ # search-engine (contained in engines folder)
|
|
103
|
+ engine.request(query, request_params)
|
|
104
|
+
|
|
105
|
+ # TODO add support of offline engines
|
|
106
|
+ if request_params['url'] is None:
|
|
107
|
+ return False
|
|
108
|
+
|
|
109
|
+ # ignoring empty urls
|
|
110
|
+ if not request_params['url']:
|
|
111
|
+ return False
|
|
112
|
+
|
|
113
|
+ # send request
|
|
114
|
+ response = send_http_request(engine, request_params, timeout_limit)
|
|
115
|
+
|
|
116
|
+ # parse response
|
|
117
|
+ success = None
|
|
118
|
+ if response:
|
|
119
|
+ # parse the response
|
|
120
|
+ response.search_params = request_params
|
|
121
|
+ search_results = engine.response(response)
|
|
122
|
+
|
|
123
|
+ # add results
|
|
124
|
+ for result in search_results:
|
|
125
|
+ result['engine'] = engine.name
|
|
126
|
+
|
|
127
|
+ result_container.extend(engine.name, search_results)
|
|
128
|
+
|
|
129
|
+ success = True
|
|
130
|
+ else:
|
|
131
|
+ success = False
|
|
132
|
+
|
|
133
|
+ with threading.RLock():
|
|
134
|
+ # update stats : total time
|
|
135
|
+ engine.stats['engine_time'] += time() - request_params['started']
|
|
136
|
+ engine.stats['engine_time_count'] += 1
|
57
|
137
|
|
|
138
|
+ #
|
|
139
|
+ return success
|
58
|
140
|
|
59
|
|
-def threaded_requests(requests):
|
60
|
|
- timeout_limit = max(r[2]['timeout'] for r in requests)
|
61
|
|
- search_start = time()
|
|
141
|
+
|
|
142
|
+def search_multiple_requests(requests, result_container, timeout_limit):
|
|
143
|
+ start_time = time()
|
62
|
144
|
search_id = uuid4().__str__()
|
63
|
|
- for fn, url, request_args, engine_name in requests:
|
64
|
|
- request_args['timeout'] = timeout_limit
|
|
145
|
+
|
|
146
|
+ for engine_name, query, request_params in requests:
|
65
|
147
|
th = threading.Thread(
|
66
|
|
- target=search_request_wrapper,
|
67
|
|
- args=(fn, url, engine_name),
|
68
|
|
- kwargs=request_args,
|
|
148
|
+ target=search_one_request,
|
|
149
|
+ args=(engine_name, query, request_params, result_container, timeout_limit),
|
69
|
150
|
name=search_id,
|
70
|
151
|
)
|
71
|
152
|
th._engine_name = engine_name
|
|
@@ -73,7 +154,7 @@ def threaded_requests(requests):
|
73
|
154
|
|
74
|
155
|
for th in threading.enumerate():
|
75
|
156
|
if th.name == search_id:
|
76
|
|
- remaining_time = max(0.0, timeout_limit - (time() - search_start))
|
|
157
|
+ remaining_time = max(0.0, timeout_limit - (time() - start_time))
|
77
|
158
|
th.join(remaining_time)
|
78
|
159
|
if th.isAlive():
|
79
|
160
|
logger.warning('engine timeout: {0}'.format(th._engine_name))
|
|
@@ -91,44 +172,6 @@ def default_request_params():
|
91
|
172
|
}
|
92
|
173
|
|
93
|
174
|
|
94
|
|
-# create a callback wrapper for the search engine results
|
95
|
|
-def make_callback(engine_name, callback, params, result_container):
|
96
|
|
-
|
97
|
|
- # creating a callback wrapper for the search engine results
|
98
|
|
- def process_callback(response, **kwargs):
|
99
|
|
- # check if redirect comparing to the True value,
|
100
|
|
- # because resp can be a Mock object, and any attribut name returns something.
|
101
|
|
- if response.is_redirect is True:
|
102
|
|
- logger.debug('{0} redirect on: {1}'.format(engine_name, response))
|
103
|
|
- return
|
104
|
|
-
|
105
|
|
- response.search_params = params
|
106
|
|
-
|
107
|
|
- search_duration = time() - params['started']
|
108
|
|
- # update stats with current page-load-time
|
109
|
|
- with threading.RLock():
|
110
|
|
- engines[engine_name].stats['page_load_time'] += search_duration
|
111
|
|
-
|
112
|
|
- timeout_overhead = 0.2 # seconds
|
113
|
|
- timeout_limit = engines[engine_name].timeout + timeout_overhead
|
114
|
|
-
|
115
|
|
- if search_duration > timeout_limit:
|
116
|
|
- with threading.RLock():
|
117
|
|
- engines[engine_name].stats['errors'] += 1
|
118
|
|
- return
|
119
|
|
-
|
120
|
|
- # callback
|
121
|
|
- search_results = callback(response)
|
122
|
|
-
|
123
|
|
- # add results
|
124
|
|
- for result in search_results:
|
125
|
|
- result['engine'] = engine_name
|
126
|
|
-
|
127
|
|
- result_container.extend(engine_name, search_results)
|
128
|
|
-
|
129
|
|
- return process_callback
|
130
|
|
-
|
131
|
|
-
|
132
|
175
|
def get_search_query_from_webapp(preferences, form):
|
133
|
176
|
query = None
|
134
|
177
|
query_engines = []
|
|
@@ -255,6 +298,10 @@ class Search(object):
|
255
|
298
|
def search(self):
|
256
|
299
|
global number_of_searches
|
257
|
300
|
|
|
301
|
+ # start time
|
|
302
|
+ start_time = time()
|
|
303
|
+
|
|
304
|
+ # answeres ?
|
258
|
305
|
answerers_results = ask(self.search_query)
|
259
|
306
|
|
260
|
307
|
if answerers_results:
|
|
@@ -274,6 +321,9 @@ class Search(object):
|
274
|
321
|
|
275
|
322
|
search_query = self.search_query
|
276
|
323
|
|
|
324
|
+ # max of all selected engine timeout
|
|
325
|
+ timeout_limit = 0
|
|
326
|
+
|
277
|
327
|
# start search-reqest for all selected engines
|
278
|
328
|
for selected_engine in search_query.engines:
|
279
|
329
|
if selected_engine['name'] not in engines:
|
|
@@ -303,7 +353,7 @@ class Search(object):
|
303
|
353
|
request_params = default_request_params()
|
304
|
354
|
request_params['headers']['User-Agent'] = user_agent
|
305
|
355
|
request_params['category'] = selected_engine['category']
|
306
|
|
- request_params['started'] = time()
|
|
356
|
+ request_params['started'] = start_time
|
307
|
357
|
request_params['pageno'] = search_query.pageno
|
308
|
358
|
|
309
|
359
|
if hasattr(engine, 'language') and engine.language:
|
|
@@ -315,52 +365,16 @@ class Search(object):
|
315
|
365
|
request_params['safesearch'] = search_query.safesearch
|
316
|
366
|
request_params['time_range'] = search_query.time_range
|
317
|
367
|
|
318
|
|
- # update request parameters dependent on
|
319
|
|
- # search-engine (contained in engines folder)
|
320
|
|
- engine.request(search_query.query.encode('utf-8'), request_params)
|
321
|
|
-
|
322
|
|
- if request_params['url'] is None:
|
323
|
|
- # TODO add support of offline engines
|
324
|
|
- pass
|
325
|
|
-
|
326
|
|
- # create a callback wrapper for the search engine results
|
327
|
|
- callback = make_callback(
|
328
|
|
- selected_engine['name'],
|
329
|
|
- engine.response,
|
330
|
|
- request_params,
|
331
|
|
- self.result_container)
|
332
|
|
-
|
333
|
|
- # create dictionary which contain all
|
334
|
|
- # informations about the request
|
335
|
|
- request_args = dict(
|
336
|
|
- headers=request_params['headers'],
|
337
|
|
- hooks=dict(response=callback),
|
338
|
|
- cookies=request_params['cookies'],
|
339
|
|
- timeout=engine.timeout,
|
340
|
|
- verify=request_params['verify']
|
341
|
|
- )
|
342
|
|
-
|
343
|
|
- # specific type of request (GET or POST)
|
344
|
|
- if request_params['method'] == 'GET':
|
345
|
|
- req = requests_lib.get
|
346
|
|
- else:
|
347
|
|
- req = requests_lib.post
|
348
|
|
- request_args['data'] = request_params['data']
|
349
|
|
-
|
350
|
|
- # ignoring empty urls
|
351
|
|
- if not request_params['url']:
|
352
|
|
- continue
|
353
|
|
-
|
354
|
368
|
# append request to list
|
355
|
|
- requests.append((req, request_params['url'],
|
356
|
|
- request_args,
|
357
|
|
- selected_engine['name']))
|
|
369
|
+ requests.append((selected_engine['name'], search_query.query.encode('utf-8'), request_params))
|
358
|
370
|
|
359
|
|
- if not requests:
|
360
|
|
- return self.result_container
|
361
|
|
- # send all search-request
|
362
|
|
- threaded_requests(requests)
|
363
|
|
- start_new_thread(gc.collect, tuple())
|
|
371
|
+ # update timeout_limit
|
|
372
|
+ timeout_limit = max(timeout_limit, engine.timeout)
|
|
373
|
+
|
|
374
|
+ if requests:
|
|
375
|
+ # send all search-request
|
|
376
|
+ search_multiple_requests(requests, self.result_container, timeout_limit - (time() - start_time))
|
|
377
|
+ start_new_thread(gc.collect, tuple())
|
364
|
378
|
|
365
|
379
|
# return results, suggestions, answers and infoboxes
|
366
|
380
|
return self.result_container
|