webapp.py 22KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684
  1. #!/usr/bin/env python
  2. '''
  3. searx is free software: you can redistribute it and/or modify
  4. it under the terms of the GNU Affero General Public License as published by
  5. the Free Software Foundation, either version 3 of the License, or
  6. (at your option) any later version.
  7. searx is distributed in the hope that it will be useful,
  8. but WITHOUT ANY WARRANTY; without even the implied warranty of
  9. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  10. GNU Affero General Public License for more details.
  11. You should have received a copy of the GNU Affero General Public License
  12. along with searx. If not, see < http://www.gnu.org/licenses/ >.
  13. (C) 2013- by Adam Tauber, <asciimoo@gmail.com>
  14. '''
  15. if __name__ == '__main__':
  16. from sys import path
  17. from os.path import realpath, dirname
  18. path.append(realpath(dirname(realpath(__file__)) + '/../'))
  19. import json
  20. import cStringIO
  21. import os
  22. import hashlib
  23. from datetime import datetime, timedelta
  24. from urllib import urlencode
  25. from werkzeug.contrib.fixers import ProxyFix
  26. from flask import (
  27. Flask, request, render_template, url_for, Response, make_response,
  28. redirect, send_from_directory
  29. )
  30. from flask.ext.babel import Babel, gettext, format_date
  31. from searx import settings, searx_dir
  32. from searx.poolrequests import get as http_get
  33. from searx.engines import (
  34. categories, engines, get_engines_stats, engine_shortcuts
  35. )
  36. from searx.utils import (
  37. UnicodeWriter, highlight_content, html_to_text, get_themes,
  38. get_static_files, get_result_templates, gen_useragent, dict_subset,
  39. prettify_url, get_blocked_engines
  40. )
  41. from searx.version import VERSION_STRING
  42. from searx.languages import language_codes
  43. from searx.https_rewrite import https_url_rewrite
  44. from searx.search import Search
  45. from searx.query import Query
  46. from searx.autocomplete import searx_bang, backends as autocomplete_backends
  47. from searx import logger
  48. try:
  49. from pygments import highlight
  50. from pygments.lexers import get_lexer_by_name
  51. from pygments.formatters import HtmlFormatter
  52. except:
  53. logger.critical("cannot import dependency: pygments")
  54. from sys import exit
  55. exit(1)
  56. logger = logger.getChild('webapp')
  57. static_path, templates_path, themes =\
  58. get_themes(settings['themes_path']
  59. if settings.get('themes_path')
  60. else searx_dir)
  61. default_theme = settings['server'].get('default_theme', 'default')
  62. static_files = get_static_files(searx_dir)
  63. result_templates = get_result_templates(searx_dir)
  64. app = Flask(
  65. __name__,
  66. static_folder=static_path,
  67. template_folder=templates_path
  68. )
  69. app.secret_key = settings['server']['secret_key']
  70. babel = Babel(app)
  71. rtl_locales = ['ar', 'arc', 'bcc', 'bqi', 'ckb', 'dv', 'fa', 'glk', 'he',
  72. 'ku', 'mzn', 'pnb'', ''ps', 'sd', 'ug', 'ur', 'yi']
  73. global_favicons = []
  74. for indice, theme in enumerate(themes):
  75. global_favicons.append([])
  76. theme_img_path = searx_dir + "/static/themes/" + theme + "/img/icons/"
  77. for (dirpath, dirnames, filenames) in os.walk(theme_img_path):
  78. global_favicons[indice].extend(filenames)
  79. cookie_max_age = 60 * 60 * 24 * 365 * 5 # 5 years
  80. _category_names = (gettext('files'),
  81. gettext('general'),
  82. gettext('music'),
  83. gettext('social media'),
  84. gettext('images'),
  85. gettext('videos'),
  86. gettext('it'),
  87. gettext('news'),
  88. gettext('map'))
  89. @babel.localeselector
  90. def get_locale():
  91. locale = request.accept_languages.best_match(settings['locales'].keys())
  92. if settings['server'].get('default_locale'):
  93. locale = settings['server']['default_locale']
  94. if request.cookies.get('locale', '') in settings['locales']:
  95. locale = request.cookies.get('locale', '')
  96. if 'locale' in request.args\
  97. and request.args['locale'] in settings['locales']:
  98. locale = request.args['locale']
  99. if 'locale' in request.form\
  100. and request.form['locale'] in settings['locales']:
  101. locale = request.form['locale']
  102. return locale
  103. # code-highlighter
  104. @app.template_filter('code_highlighter')
  105. def code_highlighter(codelines, language=None):
  106. if not language:
  107. language = 'text'
  108. try:
  109. # find lexer by programing language
  110. lexer = get_lexer_by_name(language, stripall=True)
  111. except:
  112. # if lexer is not found, using default one
  113. logger.debug('highlighter cannot find lexer for {0}'.format(language))
  114. lexer = get_lexer_by_name('text', stripall=True)
  115. html_code = ''
  116. tmp_code = ''
  117. last_line = None
  118. # parse lines
  119. for line, code in codelines:
  120. if not last_line:
  121. line_code_start = line
  122. # new codeblock is detected
  123. if last_line is not None and\
  124. last_line + 1 != line:
  125. # highlight last codepart
  126. formatter = HtmlFormatter(linenos='inline',
  127. linenostart=line_code_start)
  128. html_code = html_code + highlight(tmp_code, lexer, formatter)
  129. # reset conditions for next codepart
  130. tmp_code = ''
  131. line_code_start = line
  132. # add codepart
  133. tmp_code += code + '\n'
  134. # update line
  135. last_line = line
  136. # highlight last codepart
  137. formatter = HtmlFormatter(linenos='inline', linenostart=line_code_start)
  138. html_code = html_code + highlight(tmp_code, lexer, formatter)
  139. return html_code
  140. def get_base_url():
  141. if settings['server']['base_url']:
  142. hostname = settings['server']['base_url']
  143. else:
  144. scheme = 'http'
  145. if request.is_secure:
  146. scheme = 'https'
  147. hostname = url_for('index', _external=True, _scheme=scheme)
  148. return hostname
  149. def get_current_theme_name(override=None):
  150. """Returns theme name.
  151. Checks in this order:
  152. 1. override
  153. 2. cookies
  154. 3. settings"""
  155. if override and override in themes:
  156. return override
  157. theme_name = request.args.get('theme',
  158. request.cookies.get('theme',
  159. default_theme))
  160. if theme_name not in themes:
  161. theme_name = default_theme
  162. return theme_name
  163. def get_result_template(theme, template_name):
  164. themed_path = theme + '/result_templates/' + template_name
  165. if themed_path in result_templates:
  166. return themed_path
  167. return 'result_templates/' + template_name
  168. def url_for_theme(endpoint, override_theme=None, **values):
  169. if endpoint == 'static' and values.get('filename'):
  170. theme_name = get_current_theme_name(override=override_theme)
  171. filename_with_theme = "themes/{}/{}".format(theme_name, values['filename'])
  172. if filename_with_theme in static_files:
  173. values['filename'] = filename_with_theme
  174. return url_for(endpoint, **values)
  175. def image_proxify(url):
  176. if url.startswith('//'):
  177. url = 'https:' + url
  178. if not settings['server'].get('image_proxy') and not request.cookies.get('image_proxy'):
  179. return url
  180. hash_string = url + settings['server']['secret_key']
  181. h = hashlib.sha256(hash_string.encode('utf-8')).hexdigest()
  182. return '{0}?{1}'.format(url_for('image_proxy'),
  183. urlencode(dict(url=url.encode('utf-8'), h=h)))
  184. def render(template_name, override_theme=None, **kwargs):
  185. blocked_engines = get_blocked_engines(engines, request.cookies)
  186. autocomplete = request.cookies.get('autocomplete')
  187. if autocomplete not in autocomplete_backends:
  188. autocomplete = None
  189. nonblocked_categories = set(category for engine_name in engines
  190. for category in engines[engine_name].categories
  191. if (engine_name, category) not in blocked_engines)
  192. if 'categories' not in kwargs:
  193. kwargs['categories'] = ['general']
  194. kwargs['categories'].extend(x for x in
  195. sorted(categories.keys())
  196. if x != 'general'
  197. and x in nonblocked_categories)
  198. if 'selected_categories' not in kwargs:
  199. kwargs['selected_categories'] = []
  200. for arg in request.args:
  201. if arg.startswith('category_'):
  202. c = arg.split('_', 1)[1]
  203. if c in categories:
  204. kwargs['selected_categories'].append(c)
  205. if not kwargs['selected_categories']:
  206. cookie_categories = request.cookies.get('categories', '').split(',')
  207. for ccateg in cookie_categories:
  208. if ccateg in categories:
  209. kwargs['selected_categories'].append(ccateg)
  210. if not kwargs['selected_categories']:
  211. kwargs['selected_categories'] = ['general']
  212. if 'autocomplete' not in kwargs:
  213. kwargs['autocomplete'] = autocomplete
  214. if get_locale() in rtl_locales and 'rtl' not in kwargs:
  215. kwargs['rtl'] = True
  216. kwargs['searx_version'] = VERSION_STRING
  217. kwargs['method'] = request.cookies.get('method', 'POST')
  218. kwargs['safesearch'] = request.cookies.get('safesearch', '1')
  219. # override url_for function in templates
  220. kwargs['url_for'] = url_for_theme
  221. kwargs['image_proxify'] = image_proxify
  222. kwargs['get_result_template'] = get_result_template
  223. kwargs['theme'] = get_current_theme_name(override=override_theme)
  224. kwargs['template_name'] = template_name
  225. kwargs['cookies'] = request.cookies
  226. return render_template(
  227. '{}/{}'.format(kwargs['theme'], template_name), **kwargs)
  228. @app.route('/search', methods=['GET', 'POST'])
  229. @app.route('/', methods=['GET', 'POST'])
  230. def index():
  231. """Render index page.
  232. Supported outputs: html, json, csv, rss.
  233. """
  234. if not request.args and not request.form:
  235. return render(
  236. 'index.html',
  237. )
  238. try:
  239. search = Search(request)
  240. except:
  241. return render(
  242. 'index.html',
  243. )
  244. search.results, search.suggestions,\
  245. search.answers, search.infoboxes = search.search(request)
  246. for result in search.results:
  247. if not search.paging and engines[result['engine']].paging:
  248. search.paging = True
  249. # check if HTTPS rewrite is required
  250. if settings['server']['https_rewrite']\
  251. and result['parsed_url'].scheme == 'http':
  252. result = https_url_rewrite(result)
  253. if search.request_data.get('format', 'html') == 'html':
  254. if 'content' in result:
  255. result['content'] = highlight_content(result['content'],
  256. search.query.encode('utf-8')) # noqa
  257. result['title'] = highlight_content(result['title'],
  258. search.query.encode('utf-8'))
  259. else:
  260. if 'content' in result:
  261. result['content'] = html_to_text(result['content']).strip()
  262. # removing html content and whitespace duplications
  263. result['title'] = ' '.join(html_to_text(result['title'])
  264. .strip().split())
  265. result['pretty_url'] = prettify_url(result['url'])
  266. # TODO, check if timezone is calculated right
  267. if 'publishedDate' in result:
  268. result['pubdate'] = result['publishedDate'].strftime('%Y-%m-%d %H:%M:%S%z')
  269. if result['publishedDate'].replace(tzinfo=None) >= datetime.now() - timedelta(days=1):
  270. timedifference = datetime.now() - result['publishedDate'].replace(tzinfo=None)
  271. minutes = int((timedifference.seconds / 60) % 60)
  272. hours = int(timedifference.seconds / 60 / 60)
  273. if hours == 0:
  274. result['publishedDate'] = gettext(u'{minutes} minute(s) ago').format(minutes=minutes) # noqa
  275. else:
  276. result['publishedDate'] = gettext(u'{hours} hour(s), {minutes} minute(s) ago').format(hours=hours, minutes=minutes) # noqa
  277. else:
  278. result['publishedDate'] = format_date(result['publishedDate'])
  279. if search.request_data.get('format') == 'json':
  280. return Response(json.dumps({'query': search.query,
  281. 'results': search.results}),
  282. mimetype='application/json')
  283. elif search.request_data.get('format') == 'csv':
  284. csv = UnicodeWriter(cStringIO.StringIO())
  285. keys = ('title', 'url', 'content', 'host', 'engine', 'score')
  286. if search.results:
  287. csv.writerow(keys)
  288. for row in search.results:
  289. row['host'] = row['parsed_url'].netloc
  290. csv.writerow([row.get(key, '') for key in keys])
  291. csv.stream.seek(0)
  292. response = Response(csv.stream.read(), mimetype='application/csv')
  293. cont_disp = 'attachment;Filename=searx_-_{0}.csv'.format(search.query)
  294. response.headers.add('Content-Disposition', cont_disp)
  295. return response
  296. elif search.request_data.get('format') == 'rss':
  297. response_rss = render(
  298. 'opensearch_response_rss.xml',
  299. results=search.results,
  300. q=search.request_data['q'],
  301. number_of_results=len(search.results),
  302. base_url=get_base_url()
  303. )
  304. return Response(response_rss, mimetype='text/xml')
  305. return render(
  306. 'results.html',
  307. results=search.results,
  308. q=search.request_data['q'],
  309. selected_categories=search.categories,
  310. paging=search.paging,
  311. pageno=search.pageno,
  312. base_url=get_base_url(),
  313. suggestions=search.suggestions,
  314. answers=search.answers,
  315. infoboxes=search.infoboxes,
  316. theme=get_current_theme_name(),
  317. favicons=global_favicons[themes.index(get_current_theme_name())]
  318. )
  319. @app.route('/about', methods=['GET'])
  320. def about():
  321. """Render about page"""
  322. return render(
  323. 'about.html',
  324. rtl=False,
  325. )
  326. @app.route('/autocompleter', methods=['GET', 'POST'])
  327. def autocompleter():
  328. """Return autocompleter results"""
  329. request_data = {}
  330. # select request method
  331. if request.method == 'POST':
  332. request_data = request.form
  333. else:
  334. request_data = request.args
  335. # set blocked engines
  336. blocked_engines = get_blocked_engines(engines, request.cookies)
  337. # parse query
  338. query = Query(request_data.get('q', '').encode('utf-8'), blocked_engines)
  339. query.parse_query()
  340. # check if search query is set
  341. if not query.getSearchQuery():
  342. return '', 400
  343. # run autocompleter
  344. completer = autocomplete_backends.get(request.cookies.get('autocomplete'))
  345. # parse searx specific autocompleter results like !bang
  346. raw_results = searx_bang(query)
  347. # normal autocompletion results only appear if max 3 inner results returned
  348. if len(raw_results) <= 3 and completer:
  349. # run autocompletion
  350. raw_results.extend(completer(query.getSearchQuery()))
  351. # parse results (write :language and !engine back to result string)
  352. results = []
  353. for result in raw_results:
  354. query.changeSearchQuery(result)
  355. # add parsed result
  356. results.append(query.getFullQuery())
  357. # return autocompleter results
  358. if request_data.get('format') == 'x-suggestions':
  359. return Response(json.dumps([query.query, results]),
  360. mimetype='application/json')
  361. return Response(json.dumps(results),
  362. mimetype='application/json')
  363. @app.route('/preferences', methods=['GET', 'POST'])
  364. def preferences():
  365. """Render preferences page.
  366. Settings that are going to be saved as cookies."""
  367. lang = None
  368. image_proxy = request.cookies.get('image_proxy', settings['server'].get('image_proxy'))
  369. if request.cookies.get('language')\
  370. and request.cookies['language'] in (x[0] for x in language_codes):
  371. lang = request.cookies['language']
  372. blocked_engines = []
  373. resp = make_response(redirect(url_for('index')))
  374. if request.method == 'GET':
  375. blocked_engines = get_blocked_engines(engines, request.cookies)
  376. else: # on save
  377. selected_categories = []
  378. locale = None
  379. autocomplete = ''
  380. method = 'POST'
  381. safesearch = '1'
  382. for pd_name, pd in request.form.items():
  383. if pd_name.startswith('category_'):
  384. category = pd_name[9:]
  385. if category not in categories:
  386. continue
  387. selected_categories.append(category)
  388. elif pd_name == 'locale' and pd in settings['locales']:
  389. locale = pd
  390. elif pd_name == 'image_proxy':
  391. image_proxy = pd
  392. elif pd_name == 'autocomplete':
  393. autocomplete = pd
  394. elif pd_name == 'language' and (pd == 'all' or
  395. pd in (x[0] for
  396. x in language_codes)):
  397. lang = pd
  398. elif pd_name == 'method':
  399. method = pd
  400. elif pd_name == 'safesearch':
  401. safesearch = pd
  402. elif pd_name.startswith('engine_'):
  403. if pd_name.find('__') > -1:
  404. engine_name, category = pd_name.replace('engine_', '', 1).split('__', 1)
  405. if engine_name in engines and category in engines[engine_name].categories:
  406. blocked_engines.append((engine_name, category))
  407. elif pd_name == 'theme':
  408. theme = pd if pd in themes else default_theme
  409. else:
  410. resp.set_cookie(pd_name, pd, max_age=cookie_max_age)
  411. resp.set_cookie(
  412. 'blocked_engines', ','.join('__'.join(e) for e in blocked_engines),
  413. max_age=cookie_max_age
  414. )
  415. if locale:
  416. resp.set_cookie(
  417. 'locale', locale,
  418. max_age=cookie_max_age
  419. )
  420. if lang:
  421. resp.set_cookie(
  422. 'language', lang,
  423. max_age=cookie_max_age
  424. )
  425. if selected_categories:
  426. # cookie max age: 4 weeks
  427. resp.set_cookie(
  428. 'categories', ','.join(selected_categories),
  429. max_age=cookie_max_age
  430. )
  431. resp.set_cookie(
  432. 'autocomplete', autocomplete,
  433. max_age=cookie_max_age
  434. )
  435. resp.set_cookie('method', method, max_age=cookie_max_age)
  436. resp.set_cookie('safesearch', safesearch, max_age=cookie_max_age)
  437. resp.set_cookie('image_proxy', image_proxy, max_age=cookie_max_age)
  438. resp.set_cookie('theme', theme, max_age=cookie_max_age)
  439. return resp
  440. return render('preferences.html',
  441. locales=settings['locales'],
  442. current_locale=get_locale(),
  443. current_language=lang or 'all',
  444. image_proxy=image_proxy,
  445. language_codes=language_codes,
  446. categs=categories.items(),
  447. blocked_engines=blocked_engines,
  448. autocomplete_backends=autocomplete_backends,
  449. shortcuts={y: x for x, y in engine_shortcuts.items()},
  450. themes=themes,
  451. theme=get_current_theme_name())
  452. @app.route('/image_proxy', methods=['GET'])
  453. def image_proxy():
  454. url = request.args.get('url').encode('utf-8')
  455. if not url:
  456. return '', 400
  457. h = hashlib.sha256(url + settings['server']['secret_key'].encode('utf-8')).hexdigest()
  458. if h != request.args.get('h'):
  459. return '', 400
  460. headers = dict_subset(request.headers, {'If-Modified-Since', 'If-None-Match'})
  461. headers['User-Agent'] = gen_useragent()
  462. resp = http_get(url,
  463. stream=True,
  464. timeout=settings['server'].get('request_timeout', 2),
  465. headers=headers)
  466. if resp.status_code == 304:
  467. return '', resp.status_code
  468. if resp.status_code != 200:
  469. logger.debug('image-proxy: wrong response code: {0}'.format(resp.status_code))
  470. if resp.status_code >= 400:
  471. return '', resp.status_code
  472. return '', 400
  473. if not resp.headers.get('content-type', '').startswith('image/'):
  474. logger.debug('image-proxy: wrong content-type: {0}'.format(resp.get('content-type')))
  475. return '', 400
  476. img = ''
  477. chunk_counter = 0
  478. for chunk in resp.iter_content(1024 * 1024):
  479. chunk_counter += 1
  480. if chunk_counter > 5:
  481. return '', 502 # Bad gateway - file is too big (>5M)
  482. img += chunk
  483. headers = dict_subset(resp.headers, {'Content-Length', 'Length', 'Date', 'Last-Modified', 'Expires', 'Etag'})
  484. return Response(img, mimetype=resp.headers['content-type'], headers=headers)
  485. @app.route('/stats', methods=['GET'])
  486. def stats():
  487. """Render engine statistics page."""
  488. stats = get_engines_stats()
  489. return render(
  490. 'stats.html',
  491. stats=stats,
  492. )
  493. @app.route('/robots.txt', methods=['GET'])
  494. def robots():
  495. return Response("""User-agent: *
  496. Allow: /
  497. Allow: /about
  498. Disallow: /stats
  499. Disallow: /preferences
  500. """, mimetype='text/plain')
  501. @app.route('/opensearch.xml', methods=['GET'])
  502. def opensearch():
  503. method = 'post'
  504. # chrome/chromium only supports HTTP GET....
  505. if request.headers.get('User-Agent', '').lower().find('webkit') >= 0:
  506. method = 'get'
  507. ret = render('opensearch.xml',
  508. opensearch_method=method,
  509. host=get_base_url())
  510. resp = Response(response=ret,
  511. status=200,
  512. mimetype="text/xml")
  513. return resp
  514. @app.route('/favicon.ico')
  515. def favicon():
  516. return send_from_directory(os.path.join(app.root_path,
  517. 'static/themes',
  518. get_current_theme_name(),
  519. 'img'),
  520. 'favicon.png',
  521. mimetype='image/vnd.microsoft.icon')
  522. def run():
  523. app.run(
  524. debug=settings['server']['debug'],
  525. use_debugger=settings['server']['debug'],
  526. port=settings['server']['port']
  527. )
  528. application = app
  529. app.wsgi_app = ProxyFix(application.wsgi_app)
  530. if __name__ == "__main__":
  531. run()