summaryrefslogtreecommitdiff
path: root/searx/search
diff options
context:
space:
mode:
authorAlexandre Flament <alex@al-f.net>2022-07-19 23:40:11 +0200
committerAlexandre FLAMENT <alexandre.flament@hesge.ch>2022-10-14 13:59:22 +0000
commit32e8c2cf098ae59baae5672e70436e47299bec82 (patch)
tree2b70a212b2ccc5e26e77690f8a766f6e573c3c77 /searx/search
parent72f6367e2323b2c57b2bb479dbadccd5b690a986 (diff)
searx.network: add "verify" option to the networks
Each network can define a verify option: * false to disable certificate verification * a path to existing certificate. SearXNG uses SSL_CERT_FILE and SSL_CERT_DIR when they are defined see https://www.python-httpx.org/environment_variables/#ssl_cert_file
Diffstat (limited to 'searx/search')
-rw-r--r--searx/search/processors/online.py18
1 files changed, 14 insertions, 4 deletions
diff --git a/searx/search/processors/online.py b/searx/search/processors/online.py
index be1ca57f..dd5d1e36 100644
--- a/searx/search/processors/online.py
+++ b/searx/search/processors/online.py
@@ -7,6 +7,7 @@
from timeit import default_timer
import asyncio
+import ssl
import httpx
import searx.network
@@ -29,7 +30,6 @@ def default_request_params():
'data': {},
'url': '',
'cookies': {},
- 'verify': True,
'auth': None
# fmt: on
}
@@ -76,9 +76,15 @@ class OnlineProcessor(EngineProcessor):
def _send_http_request(self, params):
# create dictionary which contain all
# information about the request
- request_args = dict(
- headers=params['headers'], cookies=params['cookies'], verify=params['verify'], auth=params['auth']
- )
+ request_args = dict(headers=params['headers'], cookies=params['cookies'], auth=params['auth'])
+
+ # verify
+ # if not None, it overrides the verify value defined in the network.
+ # use False to accept any server certificate
+ # use a path to file to specify a server certificate
+ verify = params.get('verify')
+ if verify is not None:
+ request_args['verify'] = params['verify']
# max_redirects
max_redirects = params.get('max_redirects')
@@ -153,6 +159,10 @@ class OnlineProcessor(EngineProcessor):
# send requests and parse the results
search_results = self._search_basic(query, params)
self.extend_container(result_container, start_time, search_results)
+ except ssl.SSLError as e:
+ # requests timeout (connect or read)
+ self.handle_exception(result_container, e, suspend=True)
+ self.logger.error("SSLError {}, verify={}".format(e, searx.network.get_network(self.engine_name).verify))
except (httpx.TimeoutException, asyncio.TimeoutError) as e:
# requests timeout (connect or read)
self.handle_exception(result_container, e, suspend=True)