0
0
mirror of https://github.com/neon-mmd/websurfx.git synced 2024-11-26 15:58:21 -05:00

Compare commits

...

7 Commits

Author SHA1 Message Date
Kekma
9b090e06a8
Merge f3ce20be6b into 236e8871c6 2024-03-22 04:25:02 +05:30
KekmaTime
f3ce20be6b Fixed import issue in tcp_connection_keepalive 2024-03-22 04:23:00 +05:30
Kekma
4bd8afd70b
Update src/results/aggregator.rs
Co-authored-by: neon_arch <mustafadhuleb53@gmail.com>
2024-03-22 04:05:46 +05:30
Kekma
c7c0bb9e2c
Update src/results/aggregator.rs
Co-authored-by: neon_arch <mustafadhuleb53@gmail.com>
2024-03-22 04:05:29 +05:30
neon_arch
75ad850e42
Merge branch 'rolling' into 526 2024-03-19 21:35:20 +03:00
KekmaTime
f011b05153 Provided a config option under the server section of the config to allow users to keep tcp connections alive for each request for a certain period of time 2024-03-19 19:32:31 +05:30
KekmaTime
4826394454 Added new HTTP connection setting to the reqwest::ClientBuilder to timeout requests for fetching the search results from the upstream search engines. 2024-03-18 20:00:26 +05:30
3 changed files with 5 additions and 0 deletions

View File

@ -42,6 +42,8 @@ pub struct Config {
/// It stores the level of safe search to be used for restricting content in the
/// search results.
pub safe_search: u8,
/// It stores the TCP connection keepalive duration in seconds.
pub tcp_connection_keepalive: u64,
}
impl Config {
@ -131,6 +133,7 @@ impl Config {
upstream_search_engines: globals
.get::<_, HashMap<String, bool>>("upstream_search_engines")?,
request_timeout: globals.get::<_, u8>("request_timeout")?,
tcp_connection_keepalive: globals.get::<_, u64>("tcp_connection_keepalive")?,
threads,
rate_limiter: RateLimiter {
number_of_requests: rate_limiter["number_of_requests"],

View File

@ -77,6 +77,7 @@ pub async fn aggregate(
let client = CLIENT.get_or_init(|| {
ClientBuilder::new()
.timeout(Duration::from_secs(config.request_timeout as u64)) // Add timeout to request to avoid DDOSing the server
.tcp_keepalive(Duration::from_secs(config.tcp_connection_keepalive as u64))
.connect_timeout(Duration::from_secs(config.request_timeout as u64)) // Add timeout to request to avoid DDOSing the server
.https_only(true)
.gzip(true)

View File

@ -10,6 +10,7 @@ production_use = false -- whether to use production mode or not (in other words
-- if production_use is set to true
-- There will be a random delay before sending the request to the search engines, this is to prevent DDoSing the upstream search engines from a large number of simultaneous requests.
request_timeout = 30 -- timeout for the search requests sent to the upstream search engines to be fetched (value in seconds).
tcp_connection_keepalive = 30 -- the amount of time the tcp connection should remain alive (or connected to the server). (value in seconds).
rate_limiter = {
number_of_requests = 20, -- The number of request that are allowed within a provided time limit.
time_limit = 3, -- The time limit in which the quantity of requests that should be accepted.