From 482639445447f7b3b6b6755c51c5fd57e2939db2 Mon Sep 17 00:00:00 2001 From: KekmaTime <22am014@sctce.ac.in> Date: Mon, 18 Mar 2024 20:00:26 +0530 Subject: [PATCH 1/2] Added new HTTP connection setting to the reqwest::ClientBuilder to timeout requests for fetching the search results from the upstream search engines. --- src/results/aggregator.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/src/results/aggregator.rs b/src/results/aggregator.rs index b46befd..5244a76 100644 --- a/src/results/aggregator.rs +++ b/src/results/aggregator.rs @@ -77,6 +77,7 @@ pub async fn aggregate( let client = CLIENT.get_or_init(|| { ClientBuilder::new() .timeout(Duration::from_secs(config.request_timeout as u64)) // Add timeout to request to avoid DDOSing the server + .connect_timeout(Duration::from_secs(config.request_timeout as u64)) // Add timeout to request to avoid DDOSing the server .https_only(true) .gzip(true) .brotli(true) From f011b05153712d07521c38f9f1ca9a50b4b0a435 Mon Sep 17 00:00:00 2001 From: KekmaTime <22am014@sctce.ac.in> Date: Tue, 19 Mar 2024 19:32:31 +0530 Subject: [PATCH 2/2] Provided a config option under the server section of the config to allow users to keep tcp connections alive for each request for a certain period of time --- src/results/aggregator.rs | 2 ++ src/server/routes/search.rs | 1 + websurfx/config.lua | 1 + 3 files changed, 4 insertions(+) diff --git a/src/results/aggregator.rs b/src/results/aggregator.rs index 5244a76..bd66c10 100644 --- a/src/results/aggregator.rs +++ b/src/results/aggregator.rs @@ -73,10 +73,12 @@ pub async fn aggregate( config: &Config, upstream_search_engines: &[EngineHandler], safe_search: u8, + tcp_connection_keepalive: u8, ) -> Result> { let client = CLIENT.get_or_init(|| { ClientBuilder::new() .timeout(Duration::from_secs(config.request_timeout as u64)) // Add timeout to request to avoid DDOSing the server + .tcp_keepalive(Duration::from_secs(tcp_connection_keepalive as u64)) .connect_timeout(Duration::from_secs(config.request_timeout as u64)) // Add timeout to request to avoid DDOSing the server .https_only(true) .gzip(true) diff --git a/src/server/routes/search.rs b/src/server/routes/search.rs index 103b581..c6f3b9d 100644 --- a/src/server/routes/search.rs +++ b/src/server/routes/search.rs @@ -216,6 +216,7 @@ async fn results( .filter_map(|engine| EngineHandler::new(engine).ok()) .collect::>(), safe_search_level, + 30, ) .await? } diff --git a/websurfx/config.lua b/websurfx/config.lua index 3b6c4ab..548b4aa 100644 --- a/websurfx/config.lua +++ b/websurfx/config.lua @@ -10,6 +10,7 @@ production_use = false -- whether to use production mode or not (in other words -- if production_use is set to true -- There will be a random delay before sending the request to the search engines, this is to prevent DDoSing the upstream search engines from a large number of simultaneous requests. request_timeout = 30 -- timeout for the search requests sent to the upstream search engines to be fetched (value in seconds). +tcp_connection_keepalive = 30 -- the amount of time the tcp connection should remain alive (or connected to the server). (value in seconds). rate_limiter = { number_of_requests = 20, -- The number of request that are allowed within a provided time limit. time_limit = 3, -- The time limit in which the quantity of requests that should be accepted.