0
0
mirror of https://github.com/neon-mmd/websurfx.git synced 2024-12-22 12:28:21 -05:00

supports the option to add a random delay

This commit is contained in:
XFFXFF 2023-05-22 01:13:06 +00:00
parent 018f92526e
commit 05272884ba
4 changed files with 26 additions and 4 deletions

View File

@ -20,6 +20,14 @@ pub struct Config {
pub binding_ip_addr: String,
pub style: Style,
pub redis_connection_url: String,
pub aggregator: AggreatorConfig,
}
/// Configuration options for the aggregator.
#[derive(Clone)]
pub struct AggreatorConfig {
/// Whether to introduce a random delay before sending the request to the search engine.
pub random_delay: bool,
}
impl Config {
@ -41,6 +49,8 @@ impl Config {
.load(&fs::read_to_string("./websurfx/config.lua")?)
.exec()?;
let aggregator_config = globals.get::<_, rlua::Table>("aggregator")?;
Ok(Config {
port: globals.get::<_, u16>("port")?,
binding_ip_addr: globals.get::<_, String>("binding_ip_addr")?,
@ -49,6 +59,9 @@ impl Config {
globals.get::<_, String>("colorscheme")?,
),
redis_connection_url: globals.get::<_, String>("redis_connection_url")?,
aggregator: AggreatorConfig {
random_delay: aggregator_config.get::<_, bool>("random_delay")?,
},
})
})
}

View File

@ -29,6 +29,7 @@ use crate::engines::{duckduckgo, searx};
///
/// * `query` - Accepts a string to query with the above upstream search engines.
/// * `page` - Accepts an u32 page number.
/// * `random_delay` - Accepts a boolean value to add a random delay before making the request.
///
/// # Error
///
@ -38,14 +39,17 @@ use crate::engines::{duckduckgo, searx};
pub async fn aggregate(
query: &str,
page: u32,
random_delay: bool,
) -> Result<SearchResults, Box<dyn std::error::Error>> {
let user_agent: String = random_user_agent();
let mut result_map: HashMap<String, RawSearchResult> = HashMap::new();
// Add a random delay before making the request.
let mut rng = rand::thread_rng();
let delay_secs = rng.gen_range(1..10);
std::thread::sleep(Duration::from_secs(delay_secs));
if random_delay {
let mut rng = rand::thread_rng();
let delay_secs = rng.gen_range(1..10);
std::thread::sleep(Duration::from_secs(delay_secs));
}
// fetch results from upstream search engines simultaneously/concurrently.
let (ddg_map_results, searx_map_results) = join!(

View File

@ -127,7 +127,7 @@ pub async fn search(
}
Err(_) => {
let mut results_json: crate::search_results_handler::aggregation_models::SearchResults =
aggregate(query, page).await?;
aggregate(query, page, config.aggregator.random_delay).await?;
results_json.add_style(config.style.clone());
redis_cache
.cache_results(serde_json::to_string(&results_json)?, &page_url)?;

View File

@ -19,3 +19,8 @@ theme = "simple" -- the theme name which should be used for the website
-- Caching
redis_connection_url = "redis://127.0.0.1:8082" -- redis connection url address on which the client should connect on.
-- Aggregator
aggregator = {
random_delay = false, -- whether to add random delay before sending the request to the search engine
}