From 5d06cce22041ab5e4558fe56db47560b98f28e45 Mon Sep 17 00:00:00 2001 From: ddotthomas Date: Thu, 9 May 2024 11:24:12 -0600 Subject: [PATCH] =?UTF-8?q?=E2=9C=A8=20feat(config):=20option=20to=20keep?= =?UTF-8?q?=20the=20`websurfx`=20server=20connection=20alive=20for=20a=20c?= =?UTF-8?q?ertain=20period=20for=20subsequent=20requests=20(#568)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/config/parser.rs | 7 +++++-- src/lib.rs | 6 +++++- src/results/aggregator.rs | 2 +- websurfx/config.lua | 4 +++- 4 files changed, 14 insertions(+), 5 deletions(-) diff --git a/src/config/parser.rs b/src/config/parser.rs index ecf98ee..8bed460 100644 --- a/src/config/parser.rs +++ b/src/config/parser.rs @@ -37,13 +37,15 @@ pub struct Config { pub request_timeout: u8, /// It stores the number of threads which controls the app will use to run. pub threads: u8, + /// Set the keep-alive time for client connections to the HTTP server + pub client_connection_keep_alive: u8, /// It stores configuration options for the ratelimiting middleware. pub rate_limiter: RateLimiter, /// It stores the level of safe search to be used for restricting content in the /// search results. pub safe_search: u8, /// It stores the TCP connection keepalive duration in seconds. - pub tcp_connection_keepalive: u8, + pub tcp_connection_keep_alive: u8, /// It stores the pool idle connection timeout in seconds. pub pool_idle_connection_timeout: u8, } @@ -135,9 +137,10 @@ impl Config { upstream_search_engines: globals .get::<_, HashMap>("upstream_search_engines")?, request_timeout: globals.get::<_, u8>("request_timeout")?, - tcp_connection_keepalive: globals.get::<_, u8>("tcp_connection_keepalive")?, + tcp_connection_keep_alive: globals.get::<_, u8>("tcp_connection_keep_alive")?, pool_idle_connection_timeout: globals.get::<_, u8>("pool_idle_connection_timeout")?, threads, + client_connection_keep_alive: globals.get::<_, u8>("client_connection_keep_alive")?, rate_limiter: RateLimiter { number_of_requests: rate_limiter["number_of_requests"], time_limit: rate_limiter["time_limit"], diff --git a/src/lib.rs b/src/lib.rs index 1642d69..b7f0d71 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -14,7 +14,7 @@ pub mod results; pub mod server; pub mod templates; -use std::{net::TcpListener, sync::OnceLock}; +use std::{net::TcpListener, sync::OnceLock, time::Duration}; use crate::server::router; @@ -113,6 +113,10 @@ pub fn run( .default_service(web::route().to(router::not_found)) // error page }) .workers(config.threads as usize) + // Set the keep-alive timer for client connections + .keep_alive(Duration::from_secs( + config.client_connection_keep_alive as u64, + )) // Start server on 127.0.0.1 with the user provided port number. for example 127.0.0.1:8080. .listen(listener)? .run(); diff --git a/src/results/aggregator.rs b/src/results/aggregator.rs index fe1c87b..f8f1040 100644 --- a/src/results/aggregator.rs +++ b/src/results/aggregator.rs @@ -81,7 +81,7 @@ pub async fn aggregate( .pool_idle_timeout(Duration::from_secs( config.pool_idle_connection_timeout as u64, )) - .tcp_keepalive(Duration::from_secs(config.tcp_connection_keepalive as u64)) + .tcp_keepalive(Duration::from_secs(config.tcp_connection_keep_alive as u64)) .connect_timeout(Duration::from_secs(config.request_timeout as u64)) // Add timeout to request to avoid DDOSing the server .https_only(true) .gzip(true) diff --git a/websurfx/config.lua b/websurfx/config.lua index 2b81d25..f346c1f 100644 --- a/websurfx/config.lua +++ b/websurfx/config.lua @@ -10,7 +10,7 @@ production_use = false -- whether to use production mode or not (in other words -- if production_use is set to true -- There will be a random delay before sending the request to the search engines, this is to prevent DDoSing the upstream search engines from a large number of simultaneous requests. request_timeout = 30 -- timeout for the search requests sent to the upstream search engines to be fetched (value in seconds). -tcp_connection_keepalive = 30 -- the amount of time the tcp connection should remain alive (or connected to the server). (value in seconds). +tcp_connection_keep_alive = 30 -- the amount of time the tcp connection should remain alive to the upstream search engines (or connected to the server). (value in seconds). pool_idle_connection_timeout = 30 -- timeout for the idle connections in the reqwest HTTP connection pool (value in seconds). rate_limiter = { number_of_requests = 20, -- The number of request that are allowed within a provided time limit. @@ -18,6 +18,8 @@ rate_limiter = { } -- Set whether the server will use an adaptive/dynamic HTTPS window size, see https://httpwg.org/specs/rfc9113.html#fc-principles https_adaptive_window_size = false +-- Set keep-alive timer in seconds; keeps clients connected to the HTTP server, different from the connection to upstream search engines +client_connection_keep_alive = 120 -- ### Search ### -- Filter results based on different levels. The levels provided are: