__init__.py 9.9KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283
  1. '''
  2. searx is free software: you can redistribute it and/or modify
  3. it under the terms of the GNU Affero General Public License as published by
  4. the Free Software Foundation, either version 3 of the License, or
  5. (at your option) any later version.
  6. searx is distributed in the hope that it will be useful,
  7. but WITHOUT ANY WARRANTY; without even the implied warranty of
  8. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  9. GNU Affero General Public License for more details.
  10. You should have received a copy of the GNU Affero General Public License
  11. along with searx. If not, see < http://www.gnu.org/licenses/ >.
  12. (C) 2013- by Adam Tauber, <asciimoo@gmail.com>
  13. '''
  14. from os.path import realpath, dirname, splitext, join
  15. from imp import load_source
  16. import grequests
  17. from itertools import izip_longest, chain
  18. from operator import itemgetter
  19. from urlparse import urlparse
  20. from searx import settings
  21. from searx.utils import gen_useragent
  22. import sys
  23. from datetime import datetime
  24. engine_dir = dirname(realpath(__file__))
  25. number_of_searches = 0
  26. engines = {}
  27. categories = {'general': []}
  28. def load_module(filename):
  29. modname = splitext(filename)[0]
  30. if modname in sys.modules:
  31. del sys.modules[modname]
  32. filepath = join(engine_dir, filename)
  33. module = load_source(modname, filepath)
  34. module.name = modname
  35. return module
  36. if not 'engines' in settings or not settings['engines']:
  37. print '[E] Error no engines found. Edit your settings.yml'
  38. exit(2)
  39. for engine_data in settings['engines']:
  40. engine_name = engine_data['engine']
  41. engine = load_module(engine_name + '.py')
  42. for param_name in engine_data:
  43. if param_name == 'engine':
  44. continue
  45. if param_name == 'categories':
  46. if engine_data['categories'] == 'none':
  47. engine.categories = []
  48. else:
  49. engine.categories = map(
  50. str.strip, engine_data['categories'].split(','))
  51. continue
  52. setattr(engine, param_name, engine_data[param_name])
  53. for engine_attr in dir(engine):
  54. if engine_attr.startswith('_'):
  55. continue
  56. if getattr(engine, engine_attr) == None:
  57. print '[E] Engine config error: Missing attribute "{0}.{1}"'.format(engine.name, engine_attr) # noqa
  58. sys.exit(1)
  59. engines[engine.name] = engine
  60. engine.stats = {
  61. 'result_count': 0,
  62. 'search_count': 0,
  63. 'page_load_time': 0,
  64. 'score_count': 0,
  65. 'errors': 0
  66. }
  67. if hasattr(engine, 'categories'):
  68. for category_name in engine.categories:
  69. categories.setdefault(category_name, []).append(engine)
  70. else:
  71. categories['general'].append(engine)
  72. def default_request_params():
  73. return {
  74. 'method': 'GET', 'headers': {}, 'data': {}, 'url': '', 'cookies': {}}
  75. def make_callback(engine_name, results, suggestions, callback, params):
  76. # creating a callback wrapper for the search engine results
  77. def process_callback(response, **kwargs):
  78. cb_res = []
  79. response.search_params = params
  80. engines[engine_name].stats['page_load_time'] += \
  81. (datetime.now() - params['started']).total_seconds()
  82. try:
  83. search_results = callback(response)
  84. except Exception, e:
  85. engines[engine_name].stats['errors'] += 1
  86. results[engine_name] = cb_res
  87. print '[E] Error with engine "{0}":\n\t{1}'.format(
  88. engine_name, str(e))
  89. return
  90. for result in search_results:
  91. result['engine'] = engine_name
  92. if 'suggestion' in result:
  93. # TODO type checks
  94. suggestions.add(result['suggestion'])
  95. continue
  96. cb_res.append(result)
  97. results[engine_name] = cb_res
  98. return process_callback
  99. def score_results(results):
  100. flat_res = filter(
  101. None, chain.from_iterable(izip_longest(*results.values())))
  102. flat_len = len(flat_res)
  103. engines_len = len(results)
  104. results = []
  105. # deduplication + scoring
  106. for i, res in enumerate(flat_res):
  107. res['parsed_url'] = urlparse(res['url'])
  108. res['engines'] = [res['engine']]
  109. weight = 1.0
  110. if hasattr(engines[res['engine']], 'weight'):
  111. weight = float(engines[res['engine']].weight)
  112. score = int((flat_len - i) / engines_len) * weight + 1
  113. duplicated = False
  114. for new_res in results:
  115. p1 = res['parsed_url'].path[:-1] if res['parsed_url'].path.endswith('/') else res['parsed_url'].path # noqa
  116. p2 = new_res['parsed_url'].path[:-1] if new_res['parsed_url'].path.endswith('/') else new_res['parsed_url'].path # noqa
  117. if res['parsed_url'].netloc == new_res['parsed_url'].netloc and\
  118. p1 == p2 and\
  119. res['parsed_url'].query == new_res['parsed_url'].query and\
  120. res.get('template') == new_res.get('template'):
  121. duplicated = new_res
  122. break
  123. if duplicated:
  124. if len(res.get('content', '')) > len(duplicated.get('content', '')): # noqa
  125. duplicated['content'] = res['content']
  126. duplicated['score'] += score
  127. duplicated['engines'].append(res['engine'])
  128. if duplicated['parsed_url'].scheme == 'https':
  129. continue
  130. elif res['parsed_url'].scheme == 'https':
  131. duplicated['url'] = res['parsed_url'].geturl()
  132. duplicated['parsed_url'] = res['parsed_url']
  133. else:
  134. res['score'] = score
  135. results.append(res)
  136. return sorted(results, key=itemgetter('score'), reverse=True)
  137. def search(query, request, selected_engines):
  138. global engines, categories, number_of_searches
  139. requests = []
  140. results = {}
  141. suggestions = set()
  142. number_of_searches += 1
  143. #user_agent = request.headers.get('User-Agent', '')
  144. user_agent = gen_useragent()
  145. for selected_engine in selected_engines:
  146. if selected_engine['name'] not in engines:
  147. continue
  148. engine = engines[selected_engine['name']]
  149. request_params = default_request_params()
  150. request_params['headers']['User-Agent'] = user_agent
  151. request_params['category'] = selected_engine['category']
  152. request_params['started'] = datetime.now()
  153. request_params = engine.request(query, request_params)
  154. callback = make_callback(
  155. selected_engine['name'],
  156. results,
  157. suggestions,
  158. engine.response,
  159. request_params
  160. )
  161. request_args = dict(
  162. headers=request_params['headers'],
  163. hooks=dict(response=callback),
  164. cookies=request_params['cookies'],
  165. timeout=settings['server']['request_timeout']
  166. )
  167. if request_params['method'] == 'GET':
  168. req = grequests.get
  169. else:
  170. req = grequests.post
  171. request_args['data'] = request_params['data']
  172. # ignoring empty urls
  173. if not request_params['url']:
  174. continue
  175. requests.append(req(request_params['url'], **request_args))
  176. grequests.map(requests)
  177. for engine_name, engine_results in results.items():
  178. engines[engine_name].stats['search_count'] += 1
  179. engines[engine_name].stats['result_count'] += len(engine_results)
  180. results = score_results(results)
  181. for result in results:
  182. for res_engine in result['engines']:
  183. engines[result['engine']].stats['score_count'] += result['score']
  184. return results, suggestions
  185. def get_engines_stats():
  186. # TODO refactor
  187. pageloads = []
  188. results = []
  189. scores = []
  190. errors = []
  191. scores_per_result = []
  192. max_pageload = max_results = max_score = max_errors = max_score_per_result = 0 # noqa
  193. for engine in engines.values():
  194. if engine.stats['search_count'] == 0:
  195. continue
  196. results_num = \
  197. engine.stats['result_count'] / float(engine.stats['search_count'])
  198. load_times = engine.stats['page_load_time'] / float(engine.stats['search_count']) # noqa
  199. if results_num:
  200. score = engine.stats['score_count'] / float(engine.stats['search_count']) # noqa
  201. score_per_result = score / results_num
  202. else:
  203. score = score_per_result = 0.0
  204. max_results = max(results_num, max_results)
  205. max_pageload = max(load_times, max_pageload)
  206. max_score = max(score, max_score)
  207. max_score_per_result = max(score_per_result, max_score_per_result)
  208. max_errors = max(max_errors, engine.stats['errors'])
  209. pageloads.append({'avg': load_times, 'name': engine.name})
  210. results.append({'avg': results_num, 'name': engine.name})
  211. scores.append({'avg': score, 'name': engine.name})
  212. errors.append({'avg': engine.stats['errors'], 'name': engine.name})
  213. scores_per_result.append({
  214. 'avg': score_per_result,
  215. 'name': engine.name
  216. })
  217. for engine in pageloads:
  218. engine['percentage'] = int(engine['avg'] / max_pageload * 100)
  219. for engine in results:
  220. engine['percentage'] = int(engine['avg'] / max_results * 100)
  221. for engine in scores:
  222. engine['percentage'] = int(engine['avg'] / max_score * 100)
  223. for engine in scores_per_result:
  224. engine['percentage'] = int(engine['avg'] / max_score_per_result * 100)
  225. for engine in errors:
  226. if max_errors:
  227. engine['percentage'] = int(float(engine['avg']) / max_errors * 100)
  228. else:
  229. engine['percentage'] = 0
  230. return [
  231. ('Page loads (sec)', sorted(pageloads, key=itemgetter('avg'))),
  232. (
  233. 'Number of results',
  234. sorted(results, key=itemgetter('avg'), reverse=True)
  235. ),
  236. ('Scores', sorted(scores, key=itemgetter('avg'), reverse=True)),
  237. (
  238. 'Scores per result',
  239. sorted(scores_per_result, key=itemgetter('avg'), reverse=True)
  240. ),
  241. ('Errors', sorted(errors, key=itemgetter('avg'), reverse=True)),
  242. ]