Kaynağa Gözat

[enh] timeout and total HTTP time are managed by searx.poolrequests

Alexandre Flament 7 yıl önce
ebeveyn
işleme
c1cfe97851
2 değiştirilmiş dosya ile 79 ekleme ve 42 silme
  1. 49
    2
      searx/poolrequests.py
  2. 30
    40
      searx/search.py

+ 49
- 2
searx/poolrequests.py Dosyayı Görüntüle

1
 import requests
1
 import requests
2
 
2
 
3
 from itertools import cycle
3
 from itertools import cycle
4
-from threading import RLock
4
+from threading import RLock, local
5
 from searx import settings
5
 from searx import settings
6
+from time import time
6
 
7
 
7
 
8
 
8
 class HTTPAdapterWithConnParams(requests.adapters.HTTPAdapter):
9
 class HTTPAdapterWithConnParams(requests.adapters.HTTPAdapter):
41
                               block=self._pool_block, **self._conn_params)
42
                               block=self._pool_block, **self._conn_params)
42
 
43
 
43
 
44
 
45
+threadLocal = local()
44
 connect = settings['outgoing'].get('pool_connections', 100)  # Magic number kept from previous code
46
 connect = settings['outgoing'].get('pool_connections', 100)  # Magic number kept from previous code
45
 maxsize = settings['outgoing'].get('pool_maxsize', requests.adapters.DEFAULT_POOLSIZE)  # Picked from constructor
47
 maxsize = settings['outgoing'].get('pool_maxsize', requests.adapters.DEFAULT_POOLSIZE)  # Picked from constructor
46
 if settings['outgoing'].get('source_ips'):
48
 if settings['outgoing'].get('source_ips'):
72
         super(SessionSinglePool, self).close()
74
         super(SessionSinglePool, self).close()
73
 
75
 
74
 
76
 
77
+def set_timeout_for_thread(timeout, start_time=None):
78
+    threadLocal.timeout = timeout
79
+    threadLocal.start_time = start_time
80
+
81
+
82
+def reset_time_for_thread():
83
+    threadLocal.total_time = 0
84
+
85
+
86
+def get_time_for_thread():
87
+    return threadLocal.total_time
88
+
89
+
75
 def request(method, url, **kwargs):
90
 def request(method, url, **kwargs):
76
-    """same as requests/requests/api.py request(...) except it use SessionSinglePool and force proxies"""
91
+    """same as requests/requests/api.py request(...)"""
92
+    time_before_request = time()
93
+
94
+    # session start
77
     session = SessionSinglePool()
95
     session = SessionSinglePool()
96
+
97
+    # proxies
78
     kwargs['proxies'] = settings['outgoing'].get('proxies') or None
98
     kwargs['proxies'] = settings['outgoing'].get('proxies') or None
99
+
100
+    # timeout
101
+    if 'timeout' in kwargs:
102
+        timeout = kwargs['timeout']
103
+    else:
104
+        timeout = getattr(threadLocal, 'timeout', None)
105
+        if timeout is not None:
106
+            kwargs['timeout'] = timeout
107
+
108
+    # do request
79
     response = session.request(method=method, url=url, **kwargs)
109
     response = session.request(method=method, url=url, **kwargs)
110
+
111
+    time_after_request = time()
112
+
113
+    # is there a timeout for this engine ?
114
+    if timeout is not None:
115
+        timeout_overhead = 0.2  # seconds
116
+        # start_time = when the user request started
117
+        start_time = getattr(threadLocal, 'start_time', time_before_request)
118
+        search_duration = time_after_request - start_time
119
+        if search_duration > timeout + timeout_overhead:
120
+            raise requests.exceptions.Timeout(response=response)
121
+
122
+    # session end
80
     session.close()
123
     session.close()
124
+
125
+    #
126
+    threadLocal.total_time += time_after_request - time_before_request
127
+
81
     return response
128
     return response
82
 
129
 
83
 
130
 

+ 30
- 40
searx/search.py Dosyayı Görüntüle

47
 number_of_searches = 0
47
 number_of_searches = 0
48
 
48
 
49
 
49
 
50
-def send_http_request(engine, request_params, start_time, timeout_limit):
51
-    # for page_load_time stats
52
-    time_before_request = time()
53
-
50
+def send_http_request(engine, request_params):
54
     # create dictionary which contain all
51
     # create dictionary which contain all
55
     # informations about the request
52
     # informations about the request
56
     request_args = dict(
53
     request_args = dict(
57
         headers=request_params['headers'],
54
         headers=request_params['headers'],
58
         cookies=request_params['cookies'],
55
         cookies=request_params['cookies'],
59
-        timeout=timeout_limit,
60
         verify=request_params['verify']
56
         verify=request_params['verify']
61
     )
57
     )
62
 
58
 
68
         request_args['data'] = request_params['data']
64
         request_args['data'] = request_params['data']
69
 
65
 
70
     # send the request
66
     # send the request
71
-    response = req(request_params['url'], **request_args)
72
-
73
-    # is there a timeout (no parsing in this case)
74
-    timeout_overhead = 0.2  # seconds
75
-    time_after_request = time()
76
-    search_duration = time_after_request - start_time
77
-    if search_duration > timeout_limit + timeout_overhead:
78
-        raise requests.exceptions.Timeout(response=response)
79
-
80
-    with threading.RLock():
81
-        # no error : reset the suspend variables
82
-        engine.continuous_errors = 0
83
-        engine.suspend_end_time = 0
84
-        # update stats with current page-load-time
85
-        # only the HTTP request
86
-        engine.stats['page_load_time'] += time_after_request - time_before_request
87
-        engine.stats['page_load_count'] += 1
67
+    return req(request_params['url'], **request_args)
88
 
68
 
89
-    # everything is ok : return the response
90
-    return response
91
 
69
 
92
-
93
-def search_one_request(engine, query, request_params, start_time, timeout_limit):
70
+def search_one_request(engine, query, request_params):
94
     # update request parameters dependent on
71
     # update request parameters dependent on
95
     # search-engine (contained in engines folder)
72
     # search-engine (contained in engines folder)
96
     engine.request(query, request_params)
73
     engine.request(query, request_params)
103
         return []
80
         return []
104
 
81
 
105
     # send request
82
     # send request
106
-    response = send_http_request(engine, request_params, start_time, timeout_limit)
83
+    response = send_http_request(engine, request_params)
107
 
84
 
108
     # parse the response
85
     # parse the response
109
     response.search_params = request_params
86
     response.search_params = request_params
111
 
88
 
112
 
89
 
113
 def search_one_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit):
90
 def search_one_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit):
91
+    # set timeout for all HTTP requests
92
+    requests_lib.set_timeout_for_thread(timeout_limit, start_time=start_time)
93
+    # reset the HTTP total time
94
+    requests_lib.reset_time_for_thread()
95
+
96
+    #
114
     engine = engines[engine_name]
97
     engine = engines[engine_name]
115
 
98
 
99
+    # suppose everything will be alright
100
+    requests_exception = False
101
+
116
     try:
102
     try:
117
         # send requests and parse the results
103
         # send requests and parse the results
118
-        search_results = search_one_request(engine, query, request_params, start_time, timeout_limit)
104
+        search_results = search_one_request(engine, query, request_params)
119
 
105
 
120
         # add results
106
         # add results
121
         result_container.extend(engine_name, search_results)
107
         result_container.extend(engine_name, search_results)
124
         with threading.RLock():
110
         with threading.RLock():
125
             engine.stats['engine_time'] += time() - start_time
111
             engine.stats['engine_time'] += time() - start_time
126
             engine.stats['engine_time_count'] += 1
112
             engine.stats['engine_time_count'] += 1
127
-
128
-        return True
113
+            # update stats with the total HTTP time
114
+            engine.stats['page_load_time'] += requests_lib.get_time_for_thread()
115
+            engine.stats['page_load_count'] += 1
129
 
116
 
130
     except Exception as e:
117
     except Exception as e:
131
-        engine.stats['errors'] += 1
132
-
133
         search_duration = time() - start_time
118
         search_duration = time() - start_time
134
-        requests_exception = False
119
+
120
+        with threading.RLock():
121
+            engine.stats['errors'] += 1
135
 
122
 
136
         if (issubclass(e.__class__, requests.exceptions.Timeout)):
123
         if (issubclass(e.__class__, requests.exceptions.Timeout)):
137
             result_container.add_unresponsive_engine((engine_name, gettext('timeout')))
124
             result_container.add_unresponsive_engine((engine_name, gettext('timeout')))
152
             # others errors
139
             # others errors
153
             logger.exception('engine {0} : exception : {1}'.format(engine_name, e))
140
             logger.exception('engine {0} : exception : {1}'.format(engine_name, e))
154
 
141
 
155
-        # update continuous_errors / suspend_end_time
142
+    # suspend or not the engine if there are HTTP errors
143
+    with threading.RLock():
156
         if requests_exception:
144
         if requests_exception:
157
-            with threading.RLock():
158
-                engine.continuous_errors += 1
159
-                engine.suspend_end_time = time() + min(60, engine.continuous_errors)
160
-
161
-        #
162
-        return False
145
+            # update continuous_errors / suspend_end_time
146
+            engine.continuous_errors += 1
147
+            engine.suspend_end_time = time() + min(60, engine.continuous_errors)
148
+        else:
149
+            # no HTTP error (perhaps an engine error)
150
+            # anyway, reset the suspend variables
151
+            engine.continuous_errors = 0
152
+            engine.suspend_end_time = 0
163
 
153
 
164
 
154
 
165
 def search_multiple_requests(requests, result_container, start_time, timeout_limit):
155
 def search_multiple_requests(requests, result_container, start_time, timeout_limit):