mirror of
https://github.com/neon-mmd/websurfx.git
synced 2024-11-23 06:28:23 -05:00
Merge 75ad850e42
into 2149e32c9e
This commit is contained in:
commit
d85ff18430
@ -73,10 +73,12 @@ pub async fn aggregate(
|
|||||||
config: &Config,
|
config: &Config,
|
||||||
upstream_search_engines: &[EngineHandler],
|
upstream_search_engines: &[EngineHandler],
|
||||||
safe_search: u8,
|
safe_search: u8,
|
||||||
|
tcp_connection_keepalive: u8,
|
||||||
) -> Result<SearchResults, Box<dyn std::error::Error>> {
|
) -> Result<SearchResults, Box<dyn std::error::Error>> {
|
||||||
let client = CLIENT.get_or_init(|| {
|
let client = CLIENT.get_or_init(|| {
|
||||||
ClientBuilder::new()
|
ClientBuilder::new()
|
||||||
.timeout(Duration::from_secs(config.request_timeout as u64)) // Add timeout to request to avoid DDOSing the server
|
.timeout(Duration::from_secs(config.request_timeout as u64)) // Add timeout to request to avoid DDOSing the server
|
||||||
|
.tcp_keepalive(Duration::from_secs(tcp_connection_keepalive as u64))
|
||||||
.connect_timeout(Duration::from_secs(config.request_timeout as u64)) // Add timeout to request to avoid DDOSing the server
|
.connect_timeout(Duration::from_secs(config.request_timeout as u64)) // Add timeout to request to avoid DDOSing the server
|
||||||
.https_only(true)
|
.https_only(true)
|
||||||
.gzip(true)
|
.gzip(true)
|
||||||
|
@ -216,6 +216,7 @@ async fn results(
|
|||||||
.filter_map(|engine| EngineHandler::new(engine).ok())
|
.filter_map(|engine| EngineHandler::new(engine).ok())
|
||||||
.collect::<Vec<EngineHandler>>(),
|
.collect::<Vec<EngineHandler>>(),
|
||||||
safe_search_level,
|
safe_search_level,
|
||||||
|
30,
|
||||||
)
|
)
|
||||||
.await?
|
.await?
|
||||||
}
|
}
|
||||||
|
@ -10,6 +10,7 @@ production_use = false -- whether to use production mode or not (in other words
|
|||||||
-- if production_use is set to true
|
-- if production_use is set to true
|
||||||
-- There will be a random delay before sending the request to the search engines, this is to prevent DDoSing the upstream search engines from a large number of simultaneous requests.
|
-- There will be a random delay before sending the request to the search engines, this is to prevent DDoSing the upstream search engines from a large number of simultaneous requests.
|
||||||
request_timeout = 30 -- timeout for the search requests sent to the upstream search engines to be fetched (value in seconds).
|
request_timeout = 30 -- timeout for the search requests sent to the upstream search engines to be fetched (value in seconds).
|
||||||
|
tcp_connection_keepalive = 30 -- the amount of time the tcp connection should remain alive (or connected to the server). (value in seconds).
|
||||||
rate_limiter = {
|
rate_limiter = {
|
||||||
number_of_requests = 20, -- The number of request that are allowed within a provided time limit.
|
number_of_requests = 20, -- The number of request that are allowed within a provided time limit.
|
||||||
time_limit = 3, -- The time limit in which the quantity of requests that should be accepted.
|
time_limit = 3, -- The time limit in which the quantity of requests that should be accepted.
|
||||||
|
Loading…
Reference in New Issue
Block a user