summaryrefslogtreecommitdiff
path: root/searx/search/processors
diff options
context:
space:
mode:
authorAlexandre Flament <alex@al-f.net>2021-04-05 10:43:33 +0200
committerAlexandre Flament <alex@al-f.net>2021-04-12 17:25:56 +0200
commitd14994dc73ba5c95382812581dac146d9eceaafa (patch)
tree2f7720dbae8f1064fe479f986f0b198aff2beb99 /searx/search/processors
parenteaa694fb7d0e47b943bc6d6edb6cb6a40ab2d85e (diff)
[httpx] replace searx.poolrequests by searx.network
settings.yml: * outgoing.networks: * can contains network definition * propertiers: enable_http, verify, http2, max_connections, max_keepalive_connections, keepalive_expiry, local_addresses, support_ipv4, support_ipv6, proxies, max_redirects, retries * retries: 0 by default, number of times searx retries to send the HTTP request (using different IP & proxy each time) * local_addresses can be "192.168.0.1/24" (it supports IPv6) * support_ipv4 & support_ipv6: both True by default see https://github.com/searx/searx/pull/1034 * each engine can define a "network" section: * either a full network description * either reference an existing network * all HTTP requests of engine use the same HTTP configuration (it was not the case before, see proxy configuration in master)
Diffstat (limited to 'searx/search/processors')
-rw-r--r--searx/search/processors/online.py22
1 files changed, 9 insertions, 13 deletions
diff --git a/searx/search/processors/online.py b/searx/search/processors/online.py
index 24d8f53e..66719ea9 100644
--- a/searx/search/processors/online.py
+++ b/searx/search/processors/online.py
@@ -6,7 +6,7 @@ import asyncio
import httpx
-import searx.poolrequests as poolrequests
+import searx.network
from searx.engines import settings
from searx import logger
from searx.utils import gen_useragent
@@ -64,10 +64,6 @@ class OnlineProcessor(EngineProcessor):
auth=params['auth']
)
- # setting engine based proxies
- if hasattr(self.engine, 'proxies'):
- request_args['proxies'] = poolrequests.get_proxies(self.engine.proxies)
-
# max_redirects
max_redirects = params.get('max_redirects')
if max_redirects:
@@ -85,9 +81,9 @@ class OnlineProcessor(EngineProcessor):
# specific type of request (GET or POST)
if params['method'] == 'GET':
- req = poolrequests.get
+ req = searx.network.get
else:
- req = poolrequests.post
+ req = searx.network.post
request_args['data'] = params['data']
@@ -128,11 +124,11 @@ class OnlineProcessor(EngineProcessor):
def search(self, query, params, result_container, start_time, timeout_limit):
# set timeout for all HTTP requests
- poolrequests.set_timeout_for_thread(timeout_limit, start_time=start_time)
+ searx.network.set_timeout_for_thread(timeout_limit, start_time=start_time)
# reset the HTTP total time
- poolrequests.reset_time_for_thread()
- # enable HTTP only if explicitly enabled
- poolrequests.set_enable_http_protocol(self.engine.enable_http)
+ searx.network.reset_time_for_thread()
+ # set the network
+ searx.network.set_context_network_name(self.engine_name)
# suppose everything will be alright
http_exception = False
@@ -149,7 +145,7 @@ class OnlineProcessor(EngineProcessor):
# update engine time when there is no exception
engine_time = time() - start_time
- page_load_time = poolrequests.get_time_for_thread()
+ page_load_time = searx.network.get_time_for_thread()
result_container.add_timing(self.engine_name, engine_time, page_load_time)
with threading.RLock():
self.engine.stats['engine_time'] += engine_time
@@ -162,7 +158,7 @@ class OnlineProcessor(EngineProcessor):
# Timing
engine_time = time() - start_time
- page_load_time = poolrequests.get_time_for_thread()
+ page_load_time = searx.network.get_time_for_thread()
result_container.add_timing(self.engine_name, engine_time, page_load_time)
# Record the errors