소스 검색

Merge pull request #751 from dalf/searchpy2

Simplify search.py
Adam Tauber 8 년 전
부모
커밋
ceb8ae6439
3개의 변경된 파일154개의 추가작업 그리고 131개의 파일을 삭제
  1. 40
    33
      searx/engines/__init__.py
  2. 112
    98
      searx/search.py
  3. 2
    0
      searx/webapp.py

+ 40
- 33
searx/engines/__init__.py 파일 보기

90
         'result_count': 0,
90
         'result_count': 0,
91
         'search_count': 0,
91
         'search_count': 0,
92
         'page_load_time': 0,
92
         'page_load_time': 0,
93
+        'page_load_count': 0,
94
+        'engine_time': 0,
95
+        'engine_time_count': 0,
93
         'score_count': 0,
96
         'score_count': 0,
94
         'errors': 0
97
         'errors': 0
95
     }
98
     }
106
     return engine
109
     return engine
107
 
110
 
108
 
111
 
112
+def to_percentage(stats, maxvalue):
113
+    for engine_stat in stats:
114
+        if maxvalue:
115
+            engine_stat['percentage'] = int(engine_stat['avg'] / maxvalue * 100)
116
+        else:
117
+            engine_stat['percentage'] = 0
118
+    return stats
119
+
120
+
109
 def get_engines_stats():
121
 def get_engines_stats():
110
     # TODO refactor
122
     # TODO refactor
111
     pageloads = []
123
     pageloads = []
124
+    engine_times = []
112
     results = []
125
     results = []
113
     scores = []
126
     scores = []
114
     errors = []
127
     errors = []
115
     scores_per_result = []
128
     scores_per_result = []
116
 
129
 
117
-    max_pageload = max_results = max_score = max_errors = max_score_per_result = 0  # noqa
130
+    max_pageload = max_engine_times = max_results = max_score = max_errors = max_score_per_result = 0  # noqa
118
     for engine in engines.values():
131
     for engine in engines.values():
119
         if engine.stats['search_count'] == 0:
132
         if engine.stats['search_count'] == 0:
120
             continue
133
             continue
121
         results_num = \
134
         results_num = \
122
             engine.stats['result_count'] / float(engine.stats['search_count'])
135
             engine.stats['result_count'] / float(engine.stats['search_count'])
123
-        load_times = engine.stats['page_load_time'] / float(engine.stats['search_count'])  # noqa
136
+
137
+        if engine.stats['page_load_count'] != 0:
138
+            load_times = engine.stats['page_load_time'] / float(engine.stats['page_load_count'])  # noqa
139
+        else:
140
+            load_times = 0
141
+
142
+        if engine.stats['engine_time_count'] != 0:
143
+            this_engine_time = engine.stats['engine_time'] / float(engine.stats['engine_time_count'])  # noqa
144
+        else:
145
+            this_engine_time = 0
146
+
124
         if results_num:
147
         if results_num:
125
             score = engine.stats['score_count'] / float(engine.stats['search_count'])  # noqa
148
             score = engine.stats['score_count'] / float(engine.stats['search_count'])  # noqa
126
             score_per_result = score / results_num
149
             score_per_result = score / results_num
127
         else:
150
         else:
128
             score = score_per_result = 0.0
151
             score = score_per_result = 0.0
129
-        max_results = max(results_num, max_results)
152
+
130
         max_pageload = max(load_times, max_pageload)
153
         max_pageload = max(load_times, max_pageload)
154
+        max_engine_times = max(this_engine_time, max_engine_times)
155
+        max_results = max(results_num, max_results)
131
         max_score = max(score, max_score)
156
         max_score = max(score, max_score)
132
         max_score_per_result = max(score_per_result, max_score_per_result)
157
         max_score_per_result = max(score_per_result, max_score_per_result)
133
         max_errors = max(max_errors, engine.stats['errors'])
158
         max_errors = max(max_errors, engine.stats['errors'])
159
+
134
         pageloads.append({'avg': load_times, 'name': engine.name})
160
         pageloads.append({'avg': load_times, 'name': engine.name})
161
+        engine_times.append({'avg': this_engine_time, 'name': engine.name})
135
         results.append({'avg': results_num, 'name': engine.name})
162
         results.append({'avg': results_num, 'name': engine.name})
136
         scores.append({'avg': score, 'name': engine.name})
163
         scores.append({'avg': score, 'name': engine.name})
137
         errors.append({'avg': engine.stats['errors'], 'name': engine.name})
164
         errors.append({'avg': engine.stats['errors'], 'name': engine.name})
140
             'name': engine.name
167
             'name': engine.name
141
         })
168
         })
142
 
169
 
143
-    for engine in pageloads:
144
-        if max_pageload:
145
-            engine['percentage'] = int(engine['avg'] / max_pageload * 100)
146
-        else:
147
-            engine['percentage'] = 0
148
-
149
-    for engine in results:
150
-        if max_results:
151
-            engine['percentage'] = int(engine['avg'] / max_results * 100)
152
-        else:
153
-            engine['percentage'] = 0
154
-
155
-    for engine in scores:
156
-        if max_score:
157
-            engine['percentage'] = int(engine['avg'] / max_score * 100)
158
-        else:
159
-            engine['percentage'] = 0
160
-
161
-    for engine in scores_per_result:
162
-        if max_score_per_result:
163
-            engine['percentage'] = int(engine['avg']
164
-                                       / max_score_per_result * 100)
165
-        else:
166
-            engine['percentage'] = 0
167
-
168
-    for engine in errors:
169
-        if max_errors:
170
-            engine['percentage'] = int(float(engine['avg']) / max_errors * 100)
171
-        else:
172
-            engine['percentage'] = 0
170
+    pageloads = to_percentage(pageloads, max_pageload)
171
+    engine_times = to_percentage(engine_times, max_engine_times)
172
+    results = to_percentage(results, max_results)
173
+    scores = to_percentage(scores, max_score)
174
+    scores_per_result = to_percentage(scores_per_result, max_score_per_result)
175
+    erros = to_percentage(errors, max_errors)
173
 
176
 
174
     return [
177
     return [
175
         (
178
         (
179
+            gettext('Engine time (sec)'),
180
+            sorted(engine_times, key=itemgetter('avg'))
181
+        ),
182
+        (
176
             gettext('Page loads (sec)'),
183
             gettext('Page loads (sec)'),
177
             sorted(pageloads, key=itemgetter('avg'))
184
             sorted(pageloads, key=itemgetter('avg'))
178
         ),
185
         ),

+ 112
- 98
searx/search.py 파일 보기

36
 number_of_searches = 0
36
 number_of_searches = 0
37
 
37
 
38
 
38
 
39
-def search_request_wrapper(fn, url, engine_name, **kwargs):
40
-    ret = None
41
-    engine = engines[engine_name]
39
+def send_http_request(engine, request_params, timeout_limit):
40
+    response = None
42
     try:
41
     try:
43
-        ret = fn(url, **kwargs)
42
+        # create dictionary which contain all
43
+        # informations about the request
44
+        request_args = dict(
45
+            headers=request_params['headers'],
46
+            cookies=request_params['cookies'],
47
+            timeout=timeout_limit,
48
+            verify=request_params['verify']
49
+        )
50
+        # specific type of request (GET or POST)
51
+        if request_params['method'] == 'GET':
52
+            req = requests_lib.get
53
+        else:
54
+            req = requests_lib.post
55
+            request_args['data'] = request_params['data']
56
+
57
+        # for page_load_time stats
58
+        time_before_request = time()
59
+
60
+        # send the request
61
+        response = req(request_params['url'], **request_args)
62
+
44
         with threading.RLock():
63
         with threading.RLock():
64
+            # no error : reset the suspend variables
45
             engine.continuous_errors = 0
65
             engine.continuous_errors = 0
46
             engine.suspend_end_time = 0
66
             engine.suspend_end_time = 0
67
+            # update stats with current page-load-time
68
+            # only the HTTP request
69
+            engine.stats['page_load_time'] += time() - time_before_request
70
+            engine.stats['page_load_count'] += 1
71
+
72
+        # is there a timeout (no parsing in this case)
73
+        timeout_overhead = 0.2  # seconds
74
+        search_duration = time() - request_params['started']
75
+        if search_duration > timeout_limit + timeout_overhead:
76
+            logger.exception('engine timeout on HTTP request:'
77
+                             '{0} (search duration : {1} ms, time-out: {2} )'
78
+                             .format(engine.name, search_duration, timeout_limit))
79
+            with threading.RLock():
80
+                engine.stats['errors'] += 1
81
+            return False
82
+
83
+        # everything is ok : return the response
84
+        return response
85
+
47
     except:
86
     except:
48
         # increase errors stats
87
         # increase errors stats
49
         with threading.RLock():
88
         with threading.RLock():
52
             engine.suspend_end_time = time() + min(60, engine.continuous_errors)
91
             engine.suspend_end_time = time() + min(60, engine.continuous_errors)
53
 
92
 
54
         # print engine name and specific error message
93
         # print engine name and specific error message
55
-        logger.exception('engine crash: {0}'.format(engine_name))
56
-    return ret
94
+        logger.exception('engine crash: {0}'.format(engine.name))
95
+        return False
96
+
97
+
98
+def search_one_request(engine_name, query, request_params, result_container, timeout_limit):
99
+    engine = engines[engine_name]
100
+
101
+    # update request parameters dependent on
102
+    # search-engine (contained in engines folder)
103
+    engine.request(query, request_params)
104
+
105
+    # TODO add support of offline engines
106
+    if request_params['url'] is None:
107
+        return False
108
+
109
+    # ignoring empty urls
110
+    if not request_params['url']:
111
+        return False
112
+
113
+    # send request
114
+    response = send_http_request(engine, request_params, timeout_limit)
115
+
116
+    # parse response
117
+    success = None
118
+    if response:
119
+        # parse the response
120
+        response.search_params = request_params
121
+        search_results = engine.response(response)
122
+
123
+        # add results
124
+        for result in search_results:
125
+            result['engine'] = engine.name
126
+
127
+        result_container.extend(engine.name, search_results)
128
+
129
+        success = True
130
+    else:
131
+        success = False
132
+
133
+    with threading.RLock():
134
+        # update stats : total time
135
+        engine.stats['engine_time'] += time() - request_params['started']
136
+        engine.stats['engine_time_count'] += 1
57
 
137
 
138
+    #
139
+    return success
58
 
140
 
59
-def threaded_requests(requests):
60
-    timeout_limit = max(r[2]['timeout'] for r in requests)
61
-    search_start = time()
141
+
142
+def search_multiple_requests(requests, result_container, timeout_limit):
143
+    start_time = time()
62
     search_id = uuid4().__str__()
144
     search_id = uuid4().__str__()
63
-    for fn, url, request_args, engine_name in requests:
64
-        request_args['timeout'] = timeout_limit
145
+
146
+    for engine_name, query, request_params in requests:
65
         th = threading.Thread(
147
         th = threading.Thread(
66
-            target=search_request_wrapper,
67
-            args=(fn, url, engine_name),
68
-            kwargs=request_args,
148
+            target=search_one_request,
149
+            args=(engine_name, query, request_params, result_container, timeout_limit),
69
             name=search_id,
150
             name=search_id,
70
         )
151
         )
71
         th._engine_name = engine_name
152
         th._engine_name = engine_name
73
 
154
 
74
     for th in threading.enumerate():
155
     for th in threading.enumerate():
75
         if th.name == search_id:
156
         if th.name == search_id:
76
-            remaining_time = max(0.0, timeout_limit - (time() - search_start))
157
+            remaining_time = max(0.0, timeout_limit - (time() - start_time))
77
             th.join(remaining_time)
158
             th.join(remaining_time)
78
             if th.isAlive():
159
             if th.isAlive():
79
                 logger.warning('engine timeout: {0}'.format(th._engine_name))
160
                 logger.warning('engine timeout: {0}'.format(th._engine_name))
91
     }
172
     }
92
 
173
 
93
 
174
 
94
-# create a callback wrapper for the search engine results
95
-def make_callback(engine_name, callback, params, result_container):
96
-
97
-    # creating a callback wrapper for the search engine results
98
-    def process_callback(response, **kwargs):
99
-        # check if redirect comparing to the True value,
100
-        # because resp can be a Mock object, and any attribut name returns something.
101
-        if response.is_redirect is True:
102
-            logger.debug('{0} redirect on: {1}'.format(engine_name, response))
103
-            return
104
-
105
-        response.search_params = params
106
-
107
-        search_duration = time() - params['started']
108
-        # update stats with current page-load-time
109
-        with threading.RLock():
110
-            engines[engine_name].stats['page_load_time'] += search_duration
111
-
112
-        timeout_overhead = 0.2  # seconds
113
-        timeout_limit = engines[engine_name].timeout + timeout_overhead
114
-
115
-        if search_duration > timeout_limit:
116
-            with threading.RLock():
117
-                engines[engine_name].stats['errors'] += 1
118
-            return
119
-
120
-        # callback
121
-        search_results = callback(response)
122
-
123
-        # add results
124
-        for result in search_results:
125
-            result['engine'] = engine_name
126
-
127
-        result_container.extend(engine_name, search_results)
128
-
129
-    return process_callback
130
-
131
-
132
 def get_search_query_from_webapp(preferences, form):
175
 def get_search_query_from_webapp(preferences, form):
133
     query = None
176
     query = None
134
     query_engines = []
177
     query_engines = []
255
     def search(self):
298
     def search(self):
256
         global number_of_searches
299
         global number_of_searches
257
 
300
 
301
+        # start time
302
+        start_time = time()
303
+
304
+        # answeres ?
258
         answerers_results = ask(self.search_query)
305
         answerers_results = ask(self.search_query)
259
 
306
 
260
         if answerers_results:
307
         if answerers_results:
274
 
321
 
275
         search_query = self.search_query
322
         search_query = self.search_query
276
 
323
 
324
+        # max of all selected engine timeout
325
+        timeout_limit = 0
326
+
277
         # start search-reqest for all selected engines
327
         # start search-reqest for all selected engines
278
         for selected_engine in search_query.engines:
328
         for selected_engine in search_query.engines:
279
             if selected_engine['name'] not in engines:
329
             if selected_engine['name'] not in engines:
303
             request_params = default_request_params()
353
             request_params = default_request_params()
304
             request_params['headers']['User-Agent'] = user_agent
354
             request_params['headers']['User-Agent'] = user_agent
305
             request_params['category'] = selected_engine['category']
355
             request_params['category'] = selected_engine['category']
306
-            request_params['started'] = time()
356
+            request_params['started'] = start_time
307
             request_params['pageno'] = search_query.pageno
357
             request_params['pageno'] = search_query.pageno
308
 
358
 
309
             if hasattr(engine, 'language') and engine.language:
359
             if hasattr(engine, 'language') and engine.language:
315
             request_params['safesearch'] = search_query.safesearch
365
             request_params['safesearch'] = search_query.safesearch
316
             request_params['time_range'] = search_query.time_range
366
             request_params['time_range'] = search_query.time_range
317
 
367
 
318
-            # update request parameters dependent on
319
-            # search-engine (contained in engines folder)
320
-            engine.request(search_query.query.encode('utf-8'), request_params)
321
-
322
-            if request_params['url'] is None:
323
-                # TODO add support of offline engines
324
-                pass
325
-
326
-            # create a callback wrapper for the search engine results
327
-            callback = make_callback(
328
-                selected_engine['name'],
329
-                engine.response,
330
-                request_params,
331
-                self.result_container)
332
-
333
-            # create dictionary which contain all
334
-            # informations about the request
335
-            request_args = dict(
336
-                headers=request_params['headers'],
337
-                hooks=dict(response=callback),
338
-                cookies=request_params['cookies'],
339
-                timeout=engine.timeout,
340
-                verify=request_params['verify']
341
-            )
342
-
343
-            # specific type of request (GET or POST)
344
-            if request_params['method'] == 'GET':
345
-                req = requests_lib.get
346
-            else:
347
-                req = requests_lib.post
348
-                request_args['data'] = request_params['data']
349
-
350
-            # ignoring empty urls
351
-            if not request_params['url']:
352
-                continue
353
-
354
             # append request to list
368
             # append request to list
355
-            requests.append((req, request_params['url'],
356
-                             request_args,
357
-                             selected_engine['name']))
369
+            requests.append((selected_engine['name'], search_query.query.encode('utf-8'), request_params))
358
 
370
 
359
-        if not requests:
360
-            return self.result_container
361
-        # send all search-request
362
-        threaded_requests(requests)
363
-        start_new_thread(gc.collect, tuple())
371
+            # update timeout_limit
372
+            timeout_limit = max(timeout_limit, engine.timeout)
373
+
374
+        if requests:
375
+            # send all search-request
376
+            search_multiple_requests(requests, self.result_container, timeout_limit - (time() - start_time))
377
+            start_new_thread(gc.collect, tuple())
364
 
378
 
365
         # return results, suggestions, answers and infoboxes
379
         # return results, suggestions, answers and infoboxes
366
         return self.result_container
380
         return self.result_container

+ 2
- 0
searx/webapp.py 파일 보기

601
             if e.timeout > settings['outgoing']['request_timeout']:
601
             if e.timeout > settings['outgoing']['request_timeout']:
602
                 stats[e.name]['warn_timeout'] = True
602
                 stats[e.name]['warn_timeout'] = True
603
 
603
 
604
+    # get first element [0], the engine time,
605
+    # and then the second element [1] : the time (the first one is the label)
604
     for engine_stat in get_engines_stats()[0][1]:
606
     for engine_stat in get_engines_stats()[0][1]:
605
         stats[engine_stat.get('name')]['time'] = round(engine_stat.get('avg'), 3)
607
         stats[engine_stat.get('name')]['time'] = round(engine_stat.get('avg'), 3)
606
         if engine_stat.get('avg') > settings['outgoing']['request_timeout']:
608
         if engine_stat.get('avg') > settings['outgoing']['request_timeout']: