0
0
mirror of https://github.com/neon-mmd/websurfx.git synced 2024-11-22 14:08:23 -05:00

Compare commits

..

1 Commits

Author SHA1 Message Date
Łukasz Mariański
f6373bcd83
Merge 99fe3da258 into 193b4e36db 2024-09-15 21:54:44 +05:30
10 changed files with 18 additions and 25 deletions

6
src/cache/cacher.rs vendored
View File

@ -546,7 +546,7 @@ impl SharedCache {
/// # Arguments /// # Arguments
/// ///
/// * `url` - It takes the search url as an argument which will be used as the key to fetch the /// * `url` - It takes the search url as an argument which will be used as the key to fetch the
/// cached results from the cache. /// cached results from the cache.
/// ///
/// # Error /// # Error
/// ///
@ -563,9 +563,9 @@ impl SharedCache {
/// # Arguments /// # Arguments
/// ///
/// * `search_results` - It takes the `SearchResults` as an argument which are results that /// * `search_results` - It takes the `SearchResults` as an argument which are results that
/// needs to be cached. /// needs to be cached.
/// * `url` - It takes the search url as an argument which will be used as the key for storing /// * `url` - It takes the search url as an argument which will be used as the key for storing
/// results in the cache. /// results in the cache.
/// ///
/// # Error /// # Error
/// ///

View File

@ -49,10 +49,9 @@ pub struct Config {
pub tcp_connection_keep_alive: u8, pub tcp_connection_keep_alive: u8,
/// It stores the pool idle connection timeout in seconds. /// It stores the pool idle connection timeout in seconds.
pub pool_idle_connection_timeout: u8, pub pool_idle_connection_timeout: u8,
/// Url of the proxy to use for outgoing requests. /// Url of the proxy to use for outgoing requests.
pub proxy: Option<Proxy>, pub proxy: Option<Proxy>,
/// It stores the number of https connections to keep in the pool.
pub number_of_https_connections: u8,
} }
impl Config { impl Config {
@ -62,7 +61,7 @@ impl Config {
/// # Arguments /// # Arguments
/// ///
/// * `logging_initialized` - It takes a boolean which ensures that the logging doesn't get /// * `logging_initialized` - It takes a boolean which ensures that the logging doesn't get
/// initialized twice. Pass false if the logger has not yet been initialized. /// initialized twice. Pass false if the logger has not yet been initialized.
/// ///
/// # Error /// # Error
/// ///
@ -153,7 +152,6 @@ impl Config {
request_timeout: globals.get::<_, u8>("request_timeout")?, request_timeout: globals.get::<_, u8>("request_timeout")?,
tcp_connection_keep_alive: globals.get::<_, u8>("tcp_connection_keep_alive")?, tcp_connection_keep_alive: globals.get::<_, u8>("tcp_connection_keep_alive")?,
pool_idle_connection_timeout: globals.get::<_, u8>("pool_idle_connection_timeout")?, pool_idle_connection_timeout: globals.get::<_, u8>("pool_idle_connection_timeout")?,
number_of_https_connections: globals.get::<_, u8>("number_of_https_connections")?,
threads, threads,
client_connection_keep_alive: globals.get::<_, u8>("client_connection_keep_alive")?, client_connection_keep_alive: globals.get::<_, u8>("client_connection_keep_alive")?,
rate_limiter: RateLimiter { rate_limiter: RateLimiter {

View File

@ -11,9 +11,7 @@ use thesaurus::synonyms;
/// A named struct to store the raw scraped search results scraped search results from the /// A named struct to store the raw scraped search results scraped search results from the
/// upstream search engines before aggregating it.It derives the Clone trait which is needed /// upstream search engines before aggregating it.It derives the Clone trait which is needed
/// to write idiomatic rust using `Iterators`. /// to write idiomatic rust using `Iterators`.
/// /// (href url in html in simple words).
/// (href url in html in simple words).
///
#[derive(Clone, Serialize, Deserialize)] #[derive(Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct SearchResult { pub struct SearchResult {
@ -36,7 +34,7 @@ impl SearchResult {
/// ///
/// * `title` - The title of the search result. /// * `title` - The title of the search result.
/// * `url` - The url which is accessed when clicked on it /// * `url` - The url which is accessed when clicked on it
/// (href url in html in simple words). /// (href url in html in simple words).
/// * `description` - The description of the search result. /// * `description` - The description of the search result.
/// * `engine` - The names of the upstream engines from which this results were provided. /// * `engine` - The names of the upstream engines from which this results were provided.
pub fn new(title: &str, url: &str, description: &str, engine: &[&str]) -> Self { pub fn new(title: &str, url: &str, description: &str, engine: &[&str]) -> Self {
@ -126,7 +124,7 @@ impl EngineErrorInfo {
/// # Arguments /// # Arguments
/// ///
/// * `error` - It takes the error type which occured while fetching the result from a particular /// * `error` - It takes the error type which occured while fetching the result from a particular
/// search engine. /// search engine.
/// * `engine` - It takes the name of the engine that failed to provide the requested search results. /// * `engine` - It takes the name of the engine that failed to provide the requested search results.
pub fn new(error: &EngineError, engine: &str) -> Self { pub fn new(error: &EngineError, engine: &str) -> Self {
Self { Self {
@ -179,11 +177,11 @@ impl SearchResults {
/// # Arguments /// # Arguments
/// ///
/// * `results` - Takes an argument of individual serializable `SearchResult` struct /// * `results` - Takes an argument of individual serializable `SearchResult` struct
/// and stores it into a vector of `SearchResult` structs. /// and stores it into a vector of `SearchResult` structs.
/// * `page_query` - Takes an argument of current page`s search query `q` provided in /// * `page_query` - Takes an argument of current page`s search query `q` provided in
/// the search url. /// the search url.
/// * `engine_errors_info` - Takes an array of structs which contains information regarding /// * `engine_errors_info` - Takes an array of structs which contains information regarding
/// which engines failed with their names, reason and their severity color name. /// which engines failed with their names, reason and their severity color name.
pub fn new(results: Box<[SearchResult]>, engine_errors_info: Box<[EngineErrorInfo]>) -> Self { pub fn new(results: Box<[SearchResult]>, engine_errors_info: Box<[EngineErrorInfo]>) -> Self {
Self { Self {
results, results,

View File

@ -29,7 +29,7 @@ impl Style {
/// ///
/// * `theme` - It takes the parsed theme option used to set a theme for the website. /// * `theme` - It takes the parsed theme option used to set a theme for the website.
/// * `colorscheme` - It takes the parsed colorscheme option used to set a colorscheme /// * `colorscheme` - It takes the parsed colorscheme option used to set a colorscheme
/// for the theme being used. /// for the theme being used.
pub fn new(theme: String, colorscheme: String, animation: Option<String>) -> Self { pub fn new(theme: String, colorscheme: String, animation: Option<String>) -> Self {
Style { Style {
theme, theme,

View File

@ -60,7 +60,7 @@ type FutureVec =
/// * `debug` - Accepts a boolean value to enable or disable debug mode option. /// * `debug` - Accepts a boolean value to enable or disable debug mode option.
/// * `upstream_search_engines` - Accepts a vector of search engine names which was selected by the /// * `upstream_search_engines` - Accepts a vector of search engine names which was selected by the
/// * `request_timeout` - Accepts a time (secs) as a value which controls the server request timeout. /// * `request_timeout` - Accepts a time (secs) as a value which controls the server request timeout.
/// user through the UI or the config file. /// user through the UI or the config file.
/// ///
/// # Error /// # Error
/// ///
@ -81,7 +81,6 @@ pub async fn aggregate(
config.pool_idle_connection_timeout as u64, config.pool_idle_connection_timeout as u64,
)) ))
.tcp_keepalive(Duration::from_secs(config.tcp_connection_keep_alive as u64)) .tcp_keepalive(Duration::from_secs(config.tcp_connection_keep_alive as u64))
.pool_max_idle_per_host(config.number_of_https_connections as usize)
.connect_timeout(Duration::from_secs(config.request_timeout as u64)) // Add timeout to request to avoid DDOSing the server .connect_timeout(Duration::from_secs(config.request_timeout as u64)) // Add timeout to request to avoid DDOSing the server
.https_only(true) .https_only(true)
.gzip(true) .gzip(true)

View File

@ -146,7 +146,7 @@ pub async fn search(
/// # Arguments /// # Arguments
/// ///
/// * `url` - It takes the url of the current page that requested the search results for a /// * `url` - It takes the url of the current page that requested the search results for a
/// particular search query. /// particular search query.
/// * `config` - It takes a parsed config struct. /// * `config` - It takes a parsed config struct.
/// * `query` - It takes the page number as u32 value. /// * `query` - It takes the page number as u32 value.
/// * `req` - It takes the `HttpRequest` struct as a value. /// * `req` - It takes the `HttpRequest` struct as a value.

View File

@ -12,7 +12,7 @@ const SAFE_SEARCH_LEVELS_NAME: [&str; 3] = ["None", "Low", "Moderate"];
/// # Arguments /// # Arguments
/// ///
/// * `engine_errors_info` - It takes the engine errors list containing errors for each upstream /// * `engine_errors_info` - It takes the engine errors list containing errors for each upstream
/// search engine which failed to provide results as an argument. /// search engine which failed to provide results as an argument.
/// * `safe_search_level` - It takes the safe search level with values from 0-2 as an argument. /// * `safe_search_level` - It takes the safe search level with values from 0-2 as an argument.
/// * `query` - It takes the current search query provided by user as an argument. /// * `query` - It takes the current search query provided by user as an argument.
/// ///

View File

@ -9,7 +9,7 @@ use maud::{html, Markup};
/// # Arguments /// # Arguments
/// ///
/// * `engine_names` - It takes the key value pair list of all available engine names and there corresponding /// * `engine_names` - It takes the key value pair list of all available engine names and there corresponding
/// selected (enabled/disabled) value as an argument. /// selected (enabled/disabled) value as an argument.
/// ///
/// # Returns /// # Returns
/// ///

View File

@ -11,9 +11,9 @@ use std::fs::read_dir;
/// # Arguments /// # Arguments
/// ///
/// * `style_type` - It takes the style type of the values `theme` and `colorscheme` as an /// * `style_type` - It takes the style type of the values `theme` and `colorscheme` as an
/// argument. /// argument.
/// * `selected_style` - It takes the currently selected style value provided via the config file /// * `selected_style` - It takes the currently selected style value provided via the config file
/// as an argument. /// as an argument.
/// ///
/// # Error /// # Error
/// ///

View File

@ -18,8 +18,6 @@ rate_limiter = {
} }
-- Set whether the server will use an adaptive/dynamic HTTPS window size, see https://httpwg.org/specs/rfc9113.html#fc-principles -- Set whether the server will use an adaptive/dynamic HTTPS window size, see https://httpwg.org/specs/rfc9113.html#fc-principles
https_adaptive_window_size = false https_adaptive_window_size = false
number_of_https_connections = 10 -- the number of https connections that should be available in the connection pool.
-- Set keep-alive timer in seconds; keeps clients connected to the HTTP server, different from the connection to upstream search engines -- Set keep-alive timer in seconds; keeps clients connected to the HTTP server, different from the connection to upstream search engines
client_connection_keep_alive = 120 client_connection_keep_alive = 120