diff --git a/src/config_parser/parser.rs b/src/config_parser/parser.rs index 4625bd8..21a9bf5 100644 --- a/src/config_parser/parser.rs +++ b/src/config_parser/parser.rs @@ -20,6 +20,14 @@ pub struct Config { pub binding_ip_addr: String, pub style: Style, pub redis_connection_url: String, + pub aggregator: AggreatorConfig, +} + +/// Configuration options for the aggregator. +#[derive(Clone)] +pub struct AggreatorConfig { + /// Whether to introduce a random delay before sending the request to the search engine. + pub random_delay: bool, } impl Config { @@ -41,6 +49,8 @@ impl Config { .load(&fs::read_to_string("./websurfx/config.lua")?) .exec()?; + let aggregator_config = globals.get::<_, rlua::Table>("aggregator")?; + Ok(Config { port: globals.get::<_, u16>("port")?, binding_ip_addr: globals.get::<_, String>("binding_ip_addr")?, @@ -49,6 +59,9 @@ impl Config { globals.get::<_, String>("colorscheme")?, ), redis_connection_url: globals.get::<_, String>("redis_connection_url")?, + aggregator: AggreatorConfig { + random_delay: aggregator_config.get::<_, bool>("random_delay")?, + }, }) }) } diff --git a/src/search_results_handler/aggregator.rs b/src/search_results_handler/aggregator.rs index 5133094..8b86972 100644 --- a/src/search_results_handler/aggregator.rs +++ b/src/search_results_handler/aggregator.rs @@ -29,6 +29,7 @@ use crate::engines::{duckduckgo, searx}; /// /// * `query` - Accepts a string to query with the above upstream search engines. /// * `page` - Accepts an u32 page number. +/// * `random_delay` - Accepts a boolean value to add a random delay before making the request. /// /// # Error /// @@ -38,14 +39,17 @@ use crate::engines::{duckduckgo, searx}; pub async fn aggregate( query: &str, page: u32, + random_delay: bool, ) -> Result> { let user_agent: String = random_user_agent(); let mut result_map: HashMap = HashMap::new(); // Add a random delay before making the request. - let mut rng = rand::thread_rng(); - let delay_secs = rng.gen_range(1..10); - std::thread::sleep(Duration::from_secs(delay_secs)); + if random_delay { + let mut rng = rand::thread_rng(); + let delay_secs = rng.gen_range(1..10); + std::thread::sleep(Duration::from_secs(delay_secs)); + } // fetch results from upstream search engines simultaneously/concurrently. let (ddg_map_results, searx_map_results) = join!( diff --git a/src/server/routes.rs b/src/server/routes.rs index 85c522d..0f84cc9 100644 --- a/src/server/routes.rs +++ b/src/server/routes.rs @@ -127,7 +127,7 @@ pub async fn search( } Err(_) => { let mut results_json: crate::search_results_handler::aggregation_models::SearchResults = - aggregate(query, page).await?; + aggregate(query, page, config.aggregator.random_delay).await?; results_json.add_style(config.style.clone()); redis_cache .cache_results(serde_json::to_string(&results_json)?, &page_url)?; diff --git a/websurfx/config.lua b/websurfx/config.lua index 916a9b3..7dfd515 100644 --- a/websurfx/config.lua +++ b/websurfx/config.lua @@ -19,3 +19,8 @@ theme = "simple" -- the theme name which should be used for the website -- Caching redis_connection_url = "redis://127.0.0.1:8082" -- redis connection url address on which the client should connect on. + +-- Aggregator +aggregator = { + random_delay = false, -- whether to add random delay before sending the request to the search engine +} \ No newline at end of file