From 75536166495b2ddd453569cfc258502a1c00c578 Mon Sep 17 00:00:00 2001 From: KekmaTime <22am014@sctce.ac.in> Date: Sun, 31 Mar 2024 17:59:24 +0530 Subject: [PATCH 1/2] =?UTF-8?q?=F0=9F=94=A7=20Config:=20Updated=20`number?= =?UTF-8?q?=5Fof=5Fhttps=5Fconnections`=20for=20the=20HTTPS=20connection?= =?UTF-8?q?=20pool.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- websurfx/config.lua | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/websurfx/config.lua b/websurfx/config.lua index 2b81d25..988b3dc 100644 --- a/websurfx/config.lua +++ b/websurfx/config.lua @@ -16,9 +16,8 @@ rate_limiter = { number_of_requests = 20, -- The number of request that are allowed within a provided time limit. time_limit = 3, -- The time limit in which the quantity of requests that should be accepted. } --- Set whether the server will use an adaptive/dynamic HTTPS window size, see https://httpwg.org/specs/rfc9113.html#fc-principles https_adaptive_window_size = false - +number_of_https_connections = 10 -- the number of https connections that should be available in the connection pool. -- ### Search ### -- Filter results based on different levels. The levels provided are: -- {{ From b7d0ef7252288d2bc5cdd189f534af375f28b3f5 Mon Sep 17 00:00:00 2001 From: KekmaTime <22am014@sctce.ac.in> Date: Thu, 12 Sep 2024 21:58:49 +0530 Subject: [PATCH 2/2] (feat) : added the max idle pool per host option --- src/cache/cacher.rs | 6 +++--- src/config/parser.rs | 5 ++++- src/models/aggregation_models.rs | 14 ++++++++------ src/models/parser_models.rs | 2 +- src/results/aggregator.rs | 3 ++- src/server/routes/search.rs | 2 +- src/templates/partials/search_bar.rs | 2 +- src/templates/partials/settings_tabs/engines.rs | 2 +- .../partials/settings_tabs/user_interface.rs | 4 ++-- 9 files changed, 23 insertions(+), 17 deletions(-) diff --git a/src/cache/cacher.rs b/src/cache/cacher.rs index 604d527..5eac473 100644 --- a/src/cache/cacher.rs +++ b/src/cache/cacher.rs @@ -546,7 +546,7 @@ impl SharedCache { /// # Arguments /// /// * `url` - It takes the search url as an argument which will be used as the key to fetch the - /// cached results from the cache. + /// cached results from the cache. /// /// # Error /// @@ -563,9 +563,9 @@ impl SharedCache { /// # Arguments /// /// * `search_results` - It takes the `SearchResults` as an argument which are results that - /// needs to be cached. + /// needs to be cached. /// * `url` - It takes the search url as an argument which will be used as the key for storing - /// results in the cache. + /// results in the cache. /// /// # Error /// diff --git a/src/config/parser.rs b/src/config/parser.rs index 8bed460..5ff9444 100644 --- a/src/config/parser.rs +++ b/src/config/parser.rs @@ -48,6 +48,8 @@ pub struct Config { pub tcp_connection_keep_alive: u8, /// It stores the pool idle connection timeout in seconds. pub pool_idle_connection_timeout: u8, + /// It stores the number of https connections to keep in the pool. + pub number_of_https_connections: u8, } impl Config { @@ -57,7 +59,7 @@ impl Config { /// # Arguments /// /// * `logging_initialized` - It takes a boolean which ensures that the logging doesn't get - /// initialized twice. Pass false if the logger has not yet been initialized. + /// initialized twice. Pass false if the logger has not yet been initialized. /// /// # Error /// @@ -139,6 +141,7 @@ impl Config { request_timeout: globals.get::<_, u8>("request_timeout")?, tcp_connection_keep_alive: globals.get::<_, u8>("tcp_connection_keep_alive")?, pool_idle_connection_timeout: globals.get::<_, u8>("pool_idle_connection_timeout")?, + number_of_https_connections: globals.get::<_, u8>("number_of_https_connections")?, threads, client_connection_keep_alive: globals.get::<_, u8>("client_connection_keep_alive")?, rate_limiter: RateLimiter { diff --git a/src/models/aggregation_models.rs b/src/models/aggregation_models.rs index 01c67a6..53aa25b 100644 --- a/src/models/aggregation_models.rs +++ b/src/models/aggregation_models.rs @@ -11,7 +11,9 @@ use thesaurus::synonyms; /// A named struct to store the raw scraped search results scraped search results from the /// upstream search engines before aggregating it.It derives the Clone trait which is needed /// to write idiomatic rust using `Iterators`. -/// (href url in html in simple words). +/// +/// (href url in html in simple words). +/// #[derive(Clone, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct SearchResult { @@ -34,7 +36,7 @@ impl SearchResult { /// /// * `title` - The title of the search result. /// * `url` - The url which is accessed when clicked on it - /// (href url in html in simple words). + /// (href url in html in simple words). /// * `description` - The description of the search result. /// * `engine` - The names of the upstream engines from which this results were provided. pub fn new(title: &str, url: &str, description: &str, engine: &[&str]) -> Self { @@ -124,7 +126,7 @@ impl EngineErrorInfo { /// # Arguments /// /// * `error` - It takes the error type which occured while fetching the result from a particular - /// search engine. + /// search engine. /// * `engine` - It takes the name of the engine that failed to provide the requested search results. pub fn new(error: &EngineError, engine: &str) -> Self { Self { @@ -177,11 +179,11 @@ impl SearchResults { /// # Arguments /// /// * `results` - Takes an argument of individual serializable `SearchResult` struct - /// and stores it into a vector of `SearchResult` structs. + /// and stores it into a vector of `SearchResult` structs. /// * `page_query` - Takes an argument of current page`s search query `q` provided in - /// the search url. + /// the search url. /// * `engine_errors_info` - Takes an array of structs which contains information regarding - /// which engines failed with their names, reason and their severity color name. + /// which engines failed with their names, reason and their severity color name. pub fn new(results: Box<[SearchResult]>, engine_errors_info: Box<[EngineErrorInfo]>) -> Self { Self { results, diff --git a/src/models/parser_models.rs b/src/models/parser_models.rs index 24b3fd8..5f578a5 100644 --- a/src/models/parser_models.rs +++ b/src/models/parser_models.rs @@ -29,7 +29,7 @@ impl Style { /// /// * `theme` - It takes the parsed theme option used to set a theme for the website. /// * `colorscheme` - It takes the parsed colorscheme option used to set a colorscheme - /// for the theme being used. + /// for the theme being used. pub fn new(theme: String, colorscheme: String, animation: Option) -> Self { Style { theme, diff --git a/src/results/aggregator.rs b/src/results/aggregator.rs index 009111b..65d5a5e 100644 --- a/src/results/aggregator.rs +++ b/src/results/aggregator.rs @@ -60,7 +60,7 @@ type FutureVec = /// * `debug` - Accepts a boolean value to enable or disable debug mode option. /// * `upstream_search_engines` - Accepts a vector of search engine names which was selected by the /// * `request_timeout` - Accepts a time (secs) as a value which controls the server request timeout. -/// user through the UI or the config file. +/// user through the UI or the config file. /// /// # Error /// @@ -81,6 +81,7 @@ pub async fn aggregate( config.pool_idle_connection_timeout as u64, )) .tcp_keepalive(Duration::from_secs(config.tcp_connection_keep_alive as u64)) + .pool_max_idle_per_host(config.number_of_https_connections as usize) .connect_timeout(Duration::from_secs(config.request_timeout as u64)) // Add timeout to request to avoid DDOSing the server .https_only(true) .gzip(true) diff --git a/src/server/routes/search.rs b/src/server/routes/search.rs index d43ddec..9fa6877 100644 --- a/src/server/routes/search.rs +++ b/src/server/routes/search.rs @@ -146,7 +146,7 @@ pub async fn search( /// # Arguments /// /// * `url` - It takes the url of the current page that requested the search results for a -/// particular search query. +/// particular search query. /// * `config` - It takes a parsed config struct. /// * `query` - It takes the page number as u32 value. /// * `req` - It takes the `HttpRequest` struct as a value. diff --git a/src/templates/partials/search_bar.rs b/src/templates/partials/search_bar.rs index c40244c..9c6ea9c 100644 --- a/src/templates/partials/search_bar.rs +++ b/src/templates/partials/search_bar.rs @@ -12,7 +12,7 @@ const SAFE_SEARCH_LEVELS_NAME: [&str; 3] = ["None", "Low", "Moderate"]; /// # Arguments /// /// * `engine_errors_info` - It takes the engine errors list containing errors for each upstream -/// search engine which failed to provide results as an argument. +/// search engine which failed to provide results as an argument. /// * `safe_search_level` - It takes the safe search level with values from 0-2 as an argument. /// * `query` - It takes the current search query provided by user as an argument. /// diff --git a/src/templates/partials/settings_tabs/engines.rs b/src/templates/partials/settings_tabs/engines.rs index de67ac7..ad67f80 100644 --- a/src/templates/partials/settings_tabs/engines.rs +++ b/src/templates/partials/settings_tabs/engines.rs @@ -9,7 +9,7 @@ use maud::{html, Markup}; /// # Arguments /// /// * `engine_names` - It takes the key value pair list of all available engine names and there corresponding -/// selected (enabled/disabled) value as an argument. +/// selected (enabled/disabled) value as an argument. /// /// # Returns /// diff --git a/src/templates/partials/settings_tabs/user_interface.rs b/src/templates/partials/settings_tabs/user_interface.rs index 8f685be..87b0fb3 100644 --- a/src/templates/partials/settings_tabs/user_interface.rs +++ b/src/templates/partials/settings_tabs/user_interface.rs @@ -11,9 +11,9 @@ use std::fs::read_dir; /// # Arguments /// /// * `style_type` - It takes the style type of the values `theme` and `colorscheme` as an -/// argument. +/// argument. /// * `selected_style` - It takes the currently selected style value provided via the config file -/// as an argument. +/// as an argument. /// /// # Error ///