Browse Source

[enh] reduce the number of http outgoing connections.

Alexandre Flament 10 years ago
parent
commit
78edc16e66

+ 2
- 2
searx/autocomplete.py View File

@@ -111,7 +111,7 @@ def searx_bang(full_query):
111 111
 
112 112
 
113 113
 def dbpedia(query):
114
-    # dbpedia autocompleter
114
+    # dbpedia autocompleter, no HTTPS
115 115
     autocomplete_url = 'http://lookup.dbpedia.org/api/search.asmx/KeywordSearch?'  # noqa
116 116
 
117 117
     response = get(autocomplete_url
@@ -139,7 +139,7 @@ def duckduckgo(query):
139 139
 
140 140
 def google(query):
141 141
     # google autocompleter
142
-    autocomplete_url = 'http://suggestqueries.google.com/complete/search?client=toolbar&'  # noqa
142
+    autocomplete_url = 'https://suggestqueries.google.com/complete/search?client=toolbar&'  # noqa
143 143
 
144 144
     response = get(autocomplete_url
145 145
                    + urlencode(dict(q=query)))

+ 3
- 0
searx/engines/dailymotion.py View File

@@ -60,6 +60,9 @@ def response(resp):
60 60
         publishedDate = datetime.fromtimestamp(res['created_time'], None)
61 61
         embedded = embedded_url.format(videoid=res['id'])
62 62
 
63
+        # http to https
64
+        thumbnail = thumbnail.replace("http://", "https://")
65
+
63 66
         results.append({'template': 'videos.html',
64 67
                         'url': url,
65 68
                         'title': title,

+ 7
- 1
searx/engines/deviantart.py View File

@@ -22,7 +22,7 @@ paging = True
22 22
 
23 23
 # search-url
24 24
 base_url = 'https://www.deviantart.com/'
25
-search_url = base_url+'search?offset={offset}&{query}'
25
+search_url = base_url+'browse/all/?offset={offset}&{query}'
26 26
 
27 27
 
28 28
 # do search-request
@@ -56,6 +56,12 @@ def response(resp):
56 56
         thumbnail_src = link.xpath('.//img')[0].attrib.get('src')
57 57
         img_src = regex.sub('/', thumbnail_src)
58 58
 
59
+        # http to https, remove domain sharding
60
+        thumbnail_src = re.sub(r"https?://(th|fc)\d+.", "https://th01.", thumbnail_src)
61
+        thumbnail_src = re.sub(r"http://", "https://", thumbnail_src)
62
+
63
+        url = re.sub(r"http://(.*)\.deviantart\.com/", "https://\\1.deviantart.com/", url)
64
+
59 65
         # append result
60 66
         results.append({'url': url,
61 67
                         'title': title,

+ 3
- 0
searx/engines/digg.py View File

@@ -58,6 +58,9 @@ def response(resp):
58 58
         pubdate = result.xpath(pubdate_xpath)[0].attrib.get('datetime')
59 59
         publishedDate = parser.parse(pubdate)
60 60
 
61
+        # http to https
62
+        thumbnail = thumbnail.replace("http://static.digg.com", "https://static.digg.com")
63
+
61 64
         # append result
62 65
         results.append({'url': url,
63 66
                         'title': title,

+ 1
- 1
searx/engines/gigablast.py View File

@@ -17,7 +17,7 @@ categories = ['general']
17 17
 paging = True
18 18
 number_of_results = 5
19 19
 
20
-# search-url
20
+# search-url, invalid HTTPS certificate
21 21
 base_url = 'http://gigablast.com/'
22 22
 search_string = 'search?{query}&n={number_of_results}&s={offset}&xml=1&qh=0'
23 23
 

+ 3
- 0
searx/engines/google_images.py View File

@@ -56,6 +56,9 @@ def response(resp):
56 56
             continue
57 57
         thumbnail_src = result['tbUrl']
58 58
 
59
+        # http to https
60
+        thumbnail_src = thumbnail_src.replace("http://", "https://")
61
+
59 62
         # append result
60 63
         results.append({'url': href,
61 64
                         'title': title,

+ 2
- 2
searx/engines/www1x.py View File

@@ -19,8 +19,8 @@ import re
19 19
 categories = ['images']
20 20
 paging = False
21 21
 
22
-# search-url
23
-base_url = 'http://1x.com'
22
+# search-url, no HTTPS
23
+base_url = 'https://1x.com'
24 24
 search_url = base_url+'/backend/search.php?{query}'
25 25
 
26 26
 

+ 1
- 1
searx/tests/engines/test_deviantart.py View File

@@ -75,7 +75,7 @@ class TestDeviantartEngine(SearxTestCase):
75 75
         self.assertEqual(results[0]['title'], 'Title of image')
76 76
         self.assertEqual(results[0]['url'], 'http://url.of.result/2nd.part.of.url')
77 77
         self.assertNotIn('content', results[0])
78
-        self.assertEqual(results[0]['thumbnail_src'], 'http://url.of.thumbnail')
78
+        self.assertEqual(results[0]['thumbnail_src'], 'https://url.of.thumbnail')
79 79
 
80 80
         html = """
81 81
         <span class="tt-fh-tc" style="width: 202px;">

+ 1
- 1
searx/tests/engines/test_google_images.py View File

@@ -65,7 +65,7 @@ class TestGoogleImagesEngine(SearxTestCase):
65 65
         self.assertEqual(len(results), 1)
66 66
         self.assertEqual(results[0]['title'], 'This is the title')
67 67
         self.assertEqual(results[0]['url'], 'http://this.is.the.url')
68
-        self.assertEqual(results[0]['thumbnail_src'], 'http://thumbnail.url')
68
+        self.assertEqual(results[0]['thumbnail_src'], 'https://thumbnail.url')
69 69
         self.assertEqual(results[0]['img_src'], 'http://image.url.jpg')
70 70
         self.assertEqual(results[0]['content'], '<b>test</b>')
71 71
 

+ 2
- 2
searx/tests/engines/test_www1x.py View File

@@ -51,7 +51,7 @@ class TestWww1xEngine(SearxTestCase):
51 51
         results = www1x.response(response)
52 52
         self.assertEqual(type(results), list)
53 53
         self.assertEqual(len(results), 1)
54
-        self.assertEqual(results[0]['url'], 'http://1x.com/photo/123456')
55
-        self.assertEqual(results[0]['thumbnail_src'], 'http://1x.com/images/user/testimage-123456.jpg')
54
+        self.assertEqual(results[0]['url'], 'https://1x.com/photo/123456')
55
+        self.assertEqual(results[0]['thumbnail_src'], 'https://1x.com/images/user/testimage-123456.jpg')
56 56
         self.assertEqual(results[0]['content'], '')
57 57
         self.assertEqual(results[0]['template'], 'images.html')