search.py 12KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355
  1. '''
  2. searx is free software: you can redistribute it and/or modify
  3. it under the terms of the GNU Affero General Public License as published by
  4. the Free Software Foundation, either version 3 of the License, or
  5. (at your option) any later version.
  6. searx is distributed in the hope that it will be useful,
  7. but WITHOUT ANY WARRANTY; without even the implied warranty of
  8. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  9. GNU Affero General Public License for more details.
  10. You should have received a copy of the GNU Affero General Public License
  11. along with searx. If not, see < http://www.gnu.org/licenses/ >.
  12. (C) 2013- by Adam Tauber, <asciimoo@gmail.com>
  13. '''
  14. import gc
  15. import threading
  16. from thread import start_new_thread
  17. from time import time
  18. from uuid import uuid4
  19. import searx.poolrequests as requests_lib
  20. from searx.engines import (
  21. categories, engines
  22. )
  23. from searx.utils import gen_useragent
  24. from searx.query import Query
  25. from searx.results import ResultContainer
  26. from searx import logger
  27. logger = logger.getChild('search')
  28. number_of_searches = 0
  29. def search_request_wrapper(fn, url, engine_name, **kwargs):
  30. ret = None
  31. engine = engines[engine_name]
  32. try:
  33. ret = fn(url, **kwargs)
  34. with threading.RLock():
  35. engine.continuous_errors = 0
  36. engine.suspend_end_time = 0
  37. except:
  38. # increase errors stats
  39. with threading.RLock():
  40. engine.stats['errors'] += 1
  41. engine.continuous_errors += 1
  42. engine.suspend_end_time = time() + min(60, engine.continuous_errors)
  43. # print engine name and specific error message
  44. logger.exception('engine crash: {0}'.format(engine_name))
  45. return ret
  46. def threaded_requests(requests):
  47. timeout_limit = max(r[2]['timeout'] for r in requests)
  48. search_start = time()
  49. search_id = uuid4().__str__()
  50. for fn, url, request_args, engine_name in requests:
  51. request_args['timeout'] = timeout_limit
  52. th = threading.Thread(
  53. target=search_request_wrapper,
  54. args=(fn, url, engine_name),
  55. kwargs=request_args,
  56. name=search_id,
  57. )
  58. th._engine_name = engine_name
  59. th.start()
  60. for th in threading.enumerate():
  61. if th.name == search_id:
  62. remaining_time = max(0.0, timeout_limit - (time() - search_start))
  63. th.join(remaining_time)
  64. if th.isAlive():
  65. logger.warning('engine timeout: {0}'.format(th._engine_name))
  66. # get default reqest parameter
  67. def default_request_params():
  68. return {
  69. 'method': 'GET',
  70. 'headers': {},
  71. 'data': {},
  72. 'url': '',
  73. 'cookies': {},
  74. 'verify': True
  75. }
  76. # create a callback wrapper for the search engine results
  77. def make_callback(engine_name, callback, params, result_container):
  78. # creating a callback wrapper for the search engine results
  79. def process_callback(response, **kwargs):
  80. # check if redirect comparing to the True value,
  81. # because resp can be a Mock object, and any attribut name returns something.
  82. if response.is_redirect is True:
  83. logger.debug('{0} redirect on: {1}'.format(engine_name, response))
  84. return
  85. response.search_params = params
  86. search_duration = time() - params['started']
  87. # update stats with current page-load-time
  88. with threading.RLock():
  89. engines[engine_name].stats['page_load_time'] += search_duration
  90. timeout_overhead = 0.2 # seconds
  91. timeout_limit = engines[engine_name].timeout + timeout_overhead
  92. if search_duration > timeout_limit:
  93. with threading.RLock():
  94. engines[engine_name].stats['errors'] += 1
  95. return
  96. # callback
  97. search_results = callback(response)
  98. # add results
  99. for result in search_results:
  100. result['engine'] = engine_name
  101. result_container.extend(engine_name, search_results)
  102. return process_callback
  103. class Search(object):
  104. """Search information container"""
  105. def __init__(self, request):
  106. # init vars
  107. super(Search, self).__init__()
  108. self.query = None
  109. self.engines = []
  110. self.categories = []
  111. self.paging = False
  112. self.pageno = 1
  113. self.lang = 'all'
  114. self.time_range = None
  115. self.is_advanced = None
  116. # set blocked engines
  117. self.disabled_engines = request.preferences.engines.get_disabled()
  118. self.result_container = ResultContainer()
  119. self.request_data = {}
  120. # set specific language if set
  121. self.lang = request.preferences.get_value('language')
  122. # set request method
  123. if request.method == 'POST':
  124. self.request_data = request.form
  125. else:
  126. self.request_data = request.args
  127. # TODO better exceptions
  128. if not self.request_data.get('q'):
  129. raise Exception('noquery')
  130. # set pagenumber
  131. pageno_param = self.request_data.get('pageno', '1')
  132. if not pageno_param.isdigit() or int(pageno_param) < 1:
  133. pageno_param = 1
  134. self.pageno = int(pageno_param)
  135. # parse query, if tags are set, which change
  136. # the serch engine or search-language
  137. query_obj = Query(self.request_data['q'], self.disabled_engines)
  138. query_obj.parse_query()
  139. # set query
  140. self.query = query_obj.getSearchQuery()
  141. # get last selected language in query, if possible
  142. # TODO support search with multible languages
  143. if len(query_obj.languages):
  144. self.lang = query_obj.languages[-1]
  145. self.time_range = self.request_data.get('time_range')
  146. self.is_advanced = self.request_data.get('advanced_search')
  147. self.engines = query_obj.engines
  148. # if engines are calculated from query,
  149. # set categories by using that informations
  150. if self.engines and query_obj.specific:
  151. self.categories = list(set(engine['category']
  152. for engine in self.engines))
  153. # otherwise, using defined categories to
  154. # calculate which engines should be used
  155. else:
  156. # set categories/engines
  157. load_default_categories = True
  158. for pd_name, pd in self.request_data.items():
  159. if pd_name == 'categories':
  160. self.categories.extend(categ for categ in map(unicode.strip, pd.split(',')) if categ in categories)
  161. elif pd_name == 'engines':
  162. pd_engines = [{'category': engines[engine].categories[0],
  163. 'name': engine}
  164. for engine in map(unicode.strip, pd.split(',')) if engine in engines]
  165. if pd_engines:
  166. self.engines.extend(pd_engines)
  167. load_default_categories = False
  168. elif pd_name.startswith('category_'):
  169. category = pd_name[9:]
  170. # if category is not found in list, skip
  171. if category not in categories:
  172. continue
  173. if pd != 'off':
  174. # add category to list
  175. self.categories.append(category)
  176. elif category in self.categories:
  177. # remove category from list if property is set to 'off'
  178. self.categories.remove(category)
  179. if not load_default_categories:
  180. if not self.categories:
  181. self.categories = list(set(engine['category']
  182. for engine in self.engines))
  183. return
  184. # if no category is specified for this search,
  185. # using user-defined default-configuration which
  186. # (is stored in cookie)
  187. if not self.categories:
  188. cookie_categories = request.preferences.get_value('categories')
  189. for ccateg in cookie_categories:
  190. if ccateg in categories:
  191. self.categories.append(ccateg)
  192. # if still no category is specified, using general
  193. # as default-category
  194. if not self.categories:
  195. self.categories = ['general']
  196. # using all engines for that search, which are
  197. # declared under the specific categories
  198. for categ in self.categories:
  199. self.engines.extend({'category': categ,
  200. 'name': engine.name}
  201. for engine in categories[categ]
  202. if (engine.name, categ) not in self.disabled_engines)
  203. # remove suspended engines
  204. self.engines = [e for e in self.engines
  205. if engines[e['name']].suspend_end_time <= time()]
  206. # do search-request
  207. def search(self, request):
  208. global number_of_searches
  209. # init vars
  210. requests = []
  211. # increase number of searches
  212. number_of_searches += 1
  213. # set default useragent
  214. # user_agent = request.headers.get('User-Agent', '')
  215. user_agent = gen_useragent()
  216. # start search-reqest for all selected engines
  217. for selected_engine in self.engines:
  218. if selected_engine['name'] not in engines:
  219. continue
  220. engine = engines[selected_engine['name']]
  221. # if paging is not supported, skip
  222. if self.pageno > 1 and not engine.paging:
  223. continue
  224. # if search-language is set and engine does not
  225. # provide language-support, skip
  226. if self.lang != 'all' and not engine.language_support:
  227. continue
  228. if self.time_range and not engine.time_range_support:
  229. continue
  230. # set default request parameters
  231. request_params = default_request_params()
  232. request_params['headers']['User-Agent'] = user_agent
  233. request_params['category'] = selected_engine['category']
  234. request_params['started'] = time()
  235. request_params['pageno'] = self.pageno
  236. if hasattr(engine, 'language') and engine.language:
  237. request_params['language'] = engine.language
  238. else:
  239. request_params['language'] = self.lang
  240. # 0 = None, 1 = Moderate, 2 = Strict
  241. request_params['safesearch'] = request.preferences.get_value('safesearch')
  242. request_params['time_range'] = self.time_range
  243. request_params['advanced_search'] = self.is_advanced
  244. # update request parameters dependent on
  245. # search-engine (contained in engines folder)
  246. engine.request(self.query.encode('utf-8'), request_params)
  247. if request_params['url'] is None:
  248. # TODO add support of offline engines
  249. pass
  250. # create a callback wrapper for the search engine results
  251. callback = make_callback(
  252. selected_engine['name'],
  253. engine.response,
  254. request_params,
  255. self.result_container)
  256. # create dictionary which contain all
  257. # informations about the request
  258. request_args = dict(
  259. headers=request_params['headers'],
  260. hooks=dict(response=callback),
  261. cookies=request_params['cookies'],
  262. timeout=engine.timeout,
  263. verify=request_params['verify']
  264. )
  265. # specific type of request (GET or POST)
  266. if request_params['method'] == 'GET':
  267. req = requests_lib.get
  268. else:
  269. req = requests_lib.post
  270. request_args['data'] = request_params['data']
  271. # ignoring empty urls
  272. if not request_params['url']:
  273. continue
  274. # append request to list
  275. requests.append((req, request_params['url'],
  276. request_args,
  277. selected_engine['name']))
  278. if not requests:
  279. return self
  280. # send all search-request
  281. threaded_requests(requests)
  282. start_new_thread(gc.collect, tuple())
  283. # return results, suggestions, answers and infoboxes
  284. return self