Browse Source

[mod] change settings file structure according to #314

Adam Tauber 9 years ago
parent
commit
1fcf066a81
7 changed files with 46 additions and 34 deletions
  1. 1
    1
      searx/autocomplete.py
  2. 1
    1
      searx/engines/__init__.py
  3. 4
    4
      searx/poolrequests.py
  4. 15
    11
      searx/settings.yml
  5. 12
    5
      searx/settings_robot.yml
  6. 2
    1
      searx/utils.py
  7. 11
    11
      searx/webapp.py

+ 1
- 1
searx/autocomplete.py View File

@@ -29,7 +29,7 @@ from searx.poolrequests import get as http_get
29 29
 
30 30
 def get(*args, **kwargs):
31 31
     if 'timeout' not in kwargs:
32
-        kwargs['timeout'] = settings['server']['request_timeout']
32
+        kwargs['timeout'] = settings['outgoing']['request_timeout']
33 33
 
34 34
     return http_get(*args, **kwargs)
35 35
 

+ 1
- 1
searx/engines/__init__.py View File

@@ -75,7 +75,7 @@ def load_engine(engine_data):
75 75
         engine.safesearch = False
76 76
 
77 77
     if not hasattr(engine, 'timeout'):
78
-        engine.timeout = settings['server']['request_timeout']
78
+        engine.timeout = settings['outgoing']['request_timeout']
79 79
 
80 80
     if not hasattr(engine, 'shortcut'):
81 81
         engine.shortcut = ''

+ 4
- 4
searx/poolrequests.py View File

@@ -39,11 +39,11 @@ class HTTPAdapterWithConnParams(requests.adapters.HTTPAdapter):
39 39
                               block=self._pool_block, **self._conn_params)
40 40
 
41 41
 
42
-if settings.get('source_ips'):
42
+if settings['outgoing'].get('source_ips'):
43 43
     http_adapters = cycle(HTTPAdapterWithConnParams(pool_connections=100, source_address=(source_ip, 0))
44
-                          for source_ip in settings['source_ips'])
44
+                          for source_ip in settings['outgoing']['source_ips'])
45 45
     https_adapters = cycle(HTTPAdapterWithConnParams(pool_connections=100, source_address=(source_ip, 0))
46
-                           for source_ip in settings['source_ips'])
46
+                           for source_ip in settings['outgoing']['source_ips'])
47 47
 else:
48 48
     http_adapters = cycle((HTTPAdapterWithConnParams(pool_connections=100), ))
49 49
     https_adapters = cycle((HTTPAdapterWithConnParams(pool_connections=100), ))
@@ -69,7 +69,7 @@ def request(method, url, **kwargs):
69 69
     """same as requests/requests/api.py request(...) except it use SessionSinglePool and force proxies"""
70 70
     global settings
71 71
     session = SessionSinglePool()
72
-    kwargs['proxies'] = settings.get('outgoing_proxies', None)
72
+    kwargs['proxies'] = settings['outgoing'].get('proxies', None)
73 73
     response = session.request(method=method, url=url, **kwargs)
74 74
     session.close()
75 75
     return response

+ 15
- 11
searx/settings.yml View File

@@ -1,28 +1,32 @@
1
+general:
2
+    debug : False # Debug mode, only for development
3
+
1 4
 server:
2 5
     port : 8888
3 6
     bind_address : "127.0.0.1" # address to listen on
4 7
     secret_key : "ultrasecretkey" # change this!
5
-    debug : False # Debug mode, only for development
6
-    request_timeout : 2.0 # seconds
7 8
     base_url : False # Set custom base_url. Possible values: False or "https://your.custom.host/location/"
9
+    image_proxy : False # Proxying image results through searx
10
+
11
+ui:
8 12
     themes_path : "" # Custom ui themes path - leave it blank if you didn't change
9 13
     default_theme : oscar # ui theme
10
-    useragent_suffix : "" # suffix of searx_useragent, could contain informations like an email address to the administrator
11
-    image_proxy : False # Proxying image results through searx
12 14
     default_locale : "" # Default interface locale - leave blank to detect from browser information or use codes from the 'locales' config section
13 15
 
16
+outgoing: # communication with search engines
17
+    request_timeout : 2.0 # seconds
18
+    useragent_suffix : "" # suffix of searx_useragent, could contain informations like an email address to the administrator
14 19
 # uncomment below section if you want to use a proxy
15 20
 # see http://docs.python-requests.org/en/latest/user/advanced/#proxies
16 21
 # SOCKS proxies are not supported : see https://github.com/kennethreitz/requests/pull/478
17
-#outgoing_proxies :
18
-#    http : http://127.0.0.1:8080
19
-#    https: http://127.0.0.1:8080
20
-
22
+#    proxies :
23
+#        http : http://127.0.0.1:8080
24
+#        https: http://127.0.0.1:8080
21 25
 # uncomment below section only if you have more than one network interface
22 26
 # which can be the source of outgoing search requests
23
-#source_ips:
24
-#  - 1.1.1.1
25
-#  - 1.1.1.2
27
+#    source_ips:
28
+#        - 1.1.1.1
29
+#        - 1.1.1.2
26 30
 
27 31
 engines:
28 32
   - name : wikipedia

+ 12
- 5
searx/settings_robot.yml View File

@@ -1,14 +1,21 @@
1
+general:
2
+    debug : False
3
+
1 4
 server:
2 5
     port : 11111
3 6
     bind_address : 127.0.0.1
4 7
     secret_key : "ultrasecretkey" # change this!
5
-    debug : False
6
-    request_timeout : 3.0 # seconds
7
-    base_url: False
8
+    base_url : False
9
+    image_proxy : False
10
+
11
+ui:
8 12
     themes_path : ""
9 13
     default_theme : default
10
-    https_rewrite : True
11
-    image_proxy : False
14
+    default_locale : ""
15
+
16
+outgoing:
17
+    request_timeout : 1.0 # seconds
18
+    useragent_suffix : ""
12 19
 
13 20
 engines:
14 21
   - name : general_dummy

+ 2
- 1
searx/utils.py View File

@@ -26,6 +26,7 @@ ua_versions = ('33.0',
26 26
 ua_os = ('Windows NT 6.3; WOW64',
27 27
          'X11; Linux x86_64',
28 28
          'X11; Linux x86')
29
+
29 30
 ua = "Mozilla/5.0 ({os}; rv:{version}) Gecko/20100101 Firefox/{version}"
30 31
 
31 32
 blocked_tags = ('script',
@@ -40,7 +41,7 @@ def gen_useragent():
40 41
 def searx_useragent():
41 42
     return 'searx/{searx_version} {suffix}'.format(
42 43
            searx_version=VERSION_STRING,
43
-           suffix=settings['server'].get('useragent_suffix', ''))
44
+           suffix=settings['outgoing'].get('useragent_suffix', ''))
44 45
 
45 46
 
46 47
 def highlight_content(content, query):

+ 11
- 11
searx/webapp.py View File

@@ -77,11 +77,11 @@ except ImportError:
77 77
 
78 78
 
79 79
 static_path, templates_path, themes =\
80
-    get_themes(settings['themes_path']
81
-               if settings.get('themes_path')
80
+    get_themes(settings['ui']['themes_path']
81
+               if settings['ui']['themes_path']
82 82
                else searx_dir)
83 83
 
84
-default_theme = settings['server'].get('default_theme', 'default')
84
+default_theme = settings['ui']['default_theme']
85 85
 
86 86
 static_files = get_static_files(searx_dir)
87 87
 
@@ -121,15 +121,15 @@ _category_names = (gettext('files'),
121 121
                    gettext('news'),
122 122
                    gettext('map'))
123 123
 
124
-outgoing_proxies = settings.get('outgoing_proxies', None)
124
+outgoing_proxies = settings['outgoing'].get('proxies', None)
125 125
 
126 126
 
127 127
 @babel.localeselector
128 128
 def get_locale():
129 129
     locale = request.accept_languages.best_match(settings['locales'].keys())
130 130
 
131
-    if settings['server'].get('default_locale'):
132
-        locale = settings['server']['default_locale']
131
+    if settings['ui'].get('default_locale'):
132
+        locale = settings['ui']['default_locale']
133 133
 
134 134
     if request.cookies.get('locale', '') in settings['locales']:
135 135
         locale = request.cookies.get('locale', '')
@@ -640,12 +640,12 @@ def preferences():
640 640
             stats[e.name] = {'time': None,
641 641
                              'warn_timeout': False,
642 642
                              'warn_time': False}
643
-            if e.timeout > settings['server']['request_timeout']:
643
+            if e.timeout > settings['outgoing']['request_timeout']:
644 644
                 stats[e.name]['warn_timeout'] = True
645 645
 
646 646
     for engine_stat in get_engines_stats()[0][1]:
647 647
         stats[engine_stat.get('name')]['time'] = round(engine_stat.get('avg'), 3)
648
-        if engine_stat.get('avg') > settings['server']['request_timeout']:
648
+        if engine_stat.get('avg') > settings['outgoing']['request_timeout']:
649 649
             stats[engine_stat.get('name')]['warn_time'] = True
650 650
     # end of stats
651 651
 
@@ -683,7 +683,7 @@ def image_proxy():
683 683
 
684 684
     resp = requests.get(url,
685 685
                         stream=True,
686
-                        timeout=settings['server'].get('request_timeout', 2),
686
+                        timeout=settings['outgoing']['request_timeout'],
687 687
                         headers=headers,
688 688
                         proxies=outgoing_proxies)
689 689
 
@@ -775,8 +775,8 @@ def clear_cookies():
775 775
 
776 776
 def run():
777 777
     app.run(
778
-        debug=settings['server']['debug'],
779
-        use_debugger=settings['server']['debug'],
778
+        debug=settings['general']['debug'],
779
+        use_debugger=settings['general']['debug'],
780 780
         port=settings['server']['port'],
781 781
         host=settings['server']['bind_address']
782 782
     )