mirror of
https://github.com/neon-mmd/websurfx.git
synced 2024-11-22 05:58:21 -05:00
Merge pull request #40 from XFFXFF/random_delay
feat: support the option to choose whether to enable or disable production mode.
This commit is contained in:
commit
8904f34c6e
@ -20,6 +20,14 @@ pub struct Config {
|
|||||||
pub binding_ip_addr: String,
|
pub binding_ip_addr: String,
|
||||||
pub style: Style,
|
pub style: Style,
|
||||||
pub redis_connection_url: String,
|
pub redis_connection_url: String,
|
||||||
|
pub aggregator: AggreatorConfig,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Configuration options for the aggregator.
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct AggreatorConfig {
|
||||||
|
/// Whether to introduce a random delay before sending the request to the search engine.
|
||||||
|
pub random_delay: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Config {
|
impl Config {
|
||||||
@ -41,6 +49,15 @@ impl Config {
|
|||||||
.load(&fs::read_to_string("./websurfx/config.lua")?)
|
.load(&fs::read_to_string("./websurfx/config.lua")?)
|
||||||
.exec()?;
|
.exec()?;
|
||||||
|
|
||||||
|
let production_use = globals.get::<_, bool>("production_use")?;
|
||||||
|
let aggregator_config = if production_use {
|
||||||
|
AggreatorConfig { random_delay: true }
|
||||||
|
} else {
|
||||||
|
AggreatorConfig {
|
||||||
|
random_delay: false,
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
Ok(Config {
|
Ok(Config {
|
||||||
port: globals.get::<_, u16>("port")?,
|
port: globals.get::<_, u16>("port")?,
|
||||||
binding_ip_addr: globals.get::<_, String>("binding_ip_addr")?,
|
binding_ip_addr: globals.get::<_, String>("binding_ip_addr")?,
|
||||||
@ -49,6 +66,7 @@ impl Config {
|
|||||||
globals.get::<_, String>("colorscheme")?,
|
globals.get::<_, String>("colorscheme")?,
|
||||||
),
|
),
|
||||||
redis_connection_url: globals.get::<_, String>("redis_connection_url")?,
|
redis_connection_url: globals.get::<_, String>("redis_connection_url")?,
|
||||||
|
aggregator: aggregator_config,
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -29,6 +29,7 @@ use crate::engines::{duckduckgo, searx};
|
|||||||
///
|
///
|
||||||
/// * `query` - Accepts a string to query with the above upstream search engines.
|
/// * `query` - Accepts a string to query with the above upstream search engines.
|
||||||
/// * `page` - Accepts an u32 page number.
|
/// * `page` - Accepts an u32 page number.
|
||||||
|
/// * `random_delay` - Accepts a boolean value to add a random delay before making the request.
|
||||||
///
|
///
|
||||||
/// # Error
|
/// # Error
|
||||||
///
|
///
|
||||||
@ -38,14 +39,17 @@ use crate::engines::{duckduckgo, searx};
|
|||||||
pub async fn aggregate(
|
pub async fn aggregate(
|
||||||
query: &str,
|
query: &str,
|
||||||
page: u32,
|
page: u32,
|
||||||
|
random_delay: bool,
|
||||||
) -> Result<SearchResults, Box<dyn std::error::Error>> {
|
) -> Result<SearchResults, Box<dyn std::error::Error>> {
|
||||||
let user_agent: String = random_user_agent();
|
let user_agent: String = random_user_agent();
|
||||||
let mut result_map: HashMap<String, RawSearchResult> = HashMap::new();
|
let mut result_map: HashMap<String, RawSearchResult> = HashMap::new();
|
||||||
|
|
||||||
// Add a random delay before making the request.
|
// Add a random delay before making the request.
|
||||||
|
if random_delay {
|
||||||
let mut rng = rand::thread_rng();
|
let mut rng = rand::thread_rng();
|
||||||
let delay_secs = rng.gen_range(1..10);
|
let delay_secs = rng.gen_range(1..10);
|
||||||
std::thread::sleep(Duration::from_secs(delay_secs));
|
std::thread::sleep(Duration::from_secs(delay_secs));
|
||||||
|
}
|
||||||
|
|
||||||
// fetch results from upstream search engines simultaneously/concurrently.
|
// fetch results from upstream search engines simultaneously/concurrently.
|
||||||
let (ddg_map_results, searx_map_results) = join!(
|
let (ddg_map_results, searx_map_results) = join!(
|
||||||
|
@ -127,7 +127,7 @@ pub async fn search(
|
|||||||
}
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
let mut results_json: crate::search_results_handler::aggregation_models::SearchResults =
|
let mut results_json: crate::search_results_handler::aggregation_models::SearchResults =
|
||||||
aggregate(query, page).await?;
|
aggregate(query, page, config.aggregator.random_delay).await?;
|
||||||
results_json.add_style(config.style.clone());
|
results_json.add_style(config.style.clone());
|
||||||
redis_cache
|
redis_cache
|
||||||
.cache_results(serde_json::to_string(&results_json)?, &page_url)?;
|
.cache_results(serde_json::to_string(&results_json)?, &page_url)?;
|
||||||
|
@ -19,3 +19,7 @@ theme = "simple" -- the theme name which should be used for the website
|
|||||||
|
|
||||||
-- Caching
|
-- Caching
|
||||||
redis_connection_url = "redis://127.0.0.1:8082" -- redis connection url address on which the client should connect on.
|
redis_connection_url = "redis://127.0.0.1:8082" -- redis connection url address on which the client should connect on.
|
||||||
|
|
||||||
|
production_use = false -- whether to use production mode or not (in other words this option should be used if it is to be used to host it on the server to provide a service to a large number of users)
|
||||||
|
-- if production_use is set to true
|
||||||
|
-- There will be a random delay before sending the request to the search engines, this is to prevent DDoSing the upstream search engines from a large number of simultaneous requests.
|
Loading…
Reference in New Issue
Block a user