0
0
mirror of https://github.com/neon-mmd/websurfx.git synced 2024-11-23 14:38:21 -05:00

Compare commits

..

No commits in common. "2f4e4038b1a16b6f5775d15333479a106ce6f615" and "33363a83ef9ca65e2a74c003eb036b91af853a51" have entirely different histories.

4 changed files with 5 additions and 14 deletions

View File

@ -37,15 +37,13 @@ pub struct Config {
pub request_timeout: u8, pub request_timeout: u8,
/// It stores the number of threads which controls the app will use to run. /// It stores the number of threads which controls the app will use to run.
pub threads: u8, pub threads: u8,
/// Set the keep-alive time for client connections to the HTTP server
pub client_connection_keep_alive: u8,
/// It stores configuration options for the ratelimiting middleware. /// It stores configuration options for the ratelimiting middleware.
pub rate_limiter: RateLimiter, pub rate_limiter: RateLimiter,
/// It stores the level of safe search to be used for restricting content in the /// It stores the level of safe search to be used for restricting content in the
/// search results. /// search results.
pub safe_search: u8, pub safe_search: u8,
/// It stores the TCP connection keepalive duration in seconds. /// It stores the TCP connection keepalive duration in seconds.
pub tcp_connection_keep_alive: u8, pub tcp_connection_keepalive: u8,
/// It stores the pool idle connection timeout in seconds. /// It stores the pool idle connection timeout in seconds.
pub pool_idle_connection_timeout: u8, pub pool_idle_connection_timeout: u8,
} }
@ -137,10 +135,9 @@ impl Config {
upstream_search_engines: globals upstream_search_engines: globals
.get::<_, HashMap<String, bool>>("upstream_search_engines")?, .get::<_, HashMap<String, bool>>("upstream_search_engines")?,
request_timeout: globals.get::<_, u8>("request_timeout")?, request_timeout: globals.get::<_, u8>("request_timeout")?,
tcp_connection_keep_alive: globals.get::<_, u8>("tcp_connection_keep_alive")?, tcp_connection_keepalive: globals.get::<_, u8>("tcp_connection_keepalive")?,
pool_idle_connection_timeout: globals.get::<_, u8>("pool_idle_connection_timeout")?, pool_idle_connection_timeout: globals.get::<_, u8>("pool_idle_connection_timeout")?,
threads, threads,
client_connection_keep_alive: globals.get::<_, u8>("client_connection_keep_alive")?,
rate_limiter: RateLimiter { rate_limiter: RateLimiter {
number_of_requests: rate_limiter["number_of_requests"], number_of_requests: rate_limiter["number_of_requests"],
time_limit: rate_limiter["time_limit"], time_limit: rate_limiter["time_limit"],

View File

@ -14,7 +14,7 @@ pub mod results;
pub mod server; pub mod server;
pub mod templates; pub mod templates;
use std::{net::TcpListener, sync::OnceLock, time::Duration}; use std::{net::TcpListener, sync::OnceLock};
use crate::server::router; use crate::server::router;
@ -113,10 +113,6 @@ pub fn run(
.default_service(web::route().to(router::not_found)) // error page .default_service(web::route().to(router::not_found)) // error page
}) })
.workers(config.threads as usize) .workers(config.threads as usize)
// Set the keep-alive timer for client connections
.keep_alive(Duration::from_secs(
config.client_connection_keep_alive as u64,
))
// Start server on 127.0.0.1 with the user provided port number. for example 127.0.0.1:8080. // Start server on 127.0.0.1 with the user provided port number. for example 127.0.0.1:8080.
.listen(listener)? .listen(listener)?
.run(); .run();

View File

@ -81,7 +81,7 @@ pub async fn aggregate(
.pool_idle_timeout(Duration::from_secs( .pool_idle_timeout(Duration::from_secs(
config.pool_idle_connection_timeout as u64, config.pool_idle_connection_timeout as u64,
)) ))
.tcp_keepalive(Duration::from_secs(config.tcp_connection_keep_alive as u64)) .tcp_keepalive(Duration::from_secs(config.tcp_connection_keepalive as u64))
.connect_timeout(Duration::from_secs(config.request_timeout as u64)) // Add timeout to request to avoid DDOSing the server .connect_timeout(Duration::from_secs(config.request_timeout as u64)) // Add timeout to request to avoid DDOSing the server
.https_only(true) .https_only(true)
.gzip(true) .gzip(true)

View File

@ -10,7 +10,7 @@ production_use = false -- whether to use production mode or not (in other words
-- if production_use is set to true -- if production_use is set to true
-- There will be a random delay before sending the request to the search engines, this is to prevent DDoSing the upstream search engines from a large number of simultaneous requests. -- There will be a random delay before sending the request to the search engines, this is to prevent DDoSing the upstream search engines from a large number of simultaneous requests.
request_timeout = 30 -- timeout for the search requests sent to the upstream search engines to be fetched (value in seconds). request_timeout = 30 -- timeout for the search requests sent to the upstream search engines to be fetched (value in seconds).
tcp_connection_keep_alive = 30 -- the amount of time the tcp connection should remain alive to the upstream search engines (or connected to the server). (value in seconds). tcp_connection_keepalive = 30 -- the amount of time the tcp connection should remain alive (or connected to the server). (value in seconds).
pool_idle_connection_timeout = 30 -- timeout for the idle connections in the reqwest HTTP connection pool (value in seconds). pool_idle_connection_timeout = 30 -- timeout for the idle connections in the reqwest HTTP connection pool (value in seconds).
rate_limiter = { rate_limiter = {
number_of_requests = 20, -- The number of request that are allowed within a provided time limit. number_of_requests = 20, -- The number of request that are allowed within a provided time limit.
@ -18,8 +18,6 @@ rate_limiter = {
} }
-- Set whether the server will use an adaptive/dynamic HTTPS window size, see https://httpwg.org/specs/rfc9113.html#fc-principles -- Set whether the server will use an adaptive/dynamic HTTPS window size, see https://httpwg.org/specs/rfc9113.html#fc-principles
https_adaptive_window_size = false https_adaptive_window_size = false
-- Set keep-alive timer in seconds; keeps clients connected to the HTTP server, different from the connection to upstream search engines
client_connection_keep_alive = 120
-- ### Search ### -- ### Search ###
-- Filter results based on different levels. The levels provided are: -- Filter results based on different levels. The levels provided are: