mirror of
https://github.com/neon-mmd/websurfx.git
synced 2024-11-21 13:38:21 -05:00
Merge pull request #13 from neon-mmd/optimise-code-for-server-use
optimise code for large scale server use and closes #7
This commit is contained in:
commit
ab7348ba3d
@ -2,9 +2,8 @@
|
||||
//! by querying the upstream duckduckgo search engine with user provided query and with a page
|
||||
//! number if provided.
|
||||
|
||||
use std::{collections::HashMap, time::Duration};
|
||||
use std::collections::HashMap;
|
||||
|
||||
use rand::Rng;
|
||||
use reqwest::header::{HeaderMap, CONTENT_TYPE, REFERER, USER_AGENT};
|
||||
use scraper::{Html, Selector};
|
||||
|
||||
@ -47,11 +46,6 @@ pub async fn results(
|
||||
}
|
||||
};
|
||||
|
||||
// Add a random delay before making the request.
|
||||
let mut rng = rand::thread_rng();
|
||||
let delay_secs = rng.gen_range(1, 10);
|
||||
std::thread::sleep(Duration::from_secs(delay_secs));
|
||||
|
||||
// initializing HeaderMap and adding appropriate headers.
|
||||
let mut header_map = HeaderMap::new();
|
||||
header_map.insert(USER_AGENT, user_agent.parse()?);
|
||||
|
@ -2,10 +2,9 @@
|
||||
//! by querying the upstream searx search engine instance with user provided query and with a page
|
||||
//! number if provided.
|
||||
|
||||
use rand::Rng;
|
||||
use reqwest::header::{HeaderMap, CONTENT_TYPE, REFERER, USER_AGENT};
|
||||
use scraper::{Html, Selector};
|
||||
use std::{collections::HashMap, time::Duration};
|
||||
use std::collections::HashMap;
|
||||
|
||||
use crate::search_results_handler::aggregation_models::RawSearchResult;
|
||||
|
||||
@ -34,11 +33,6 @@ pub async fn results(
|
||||
// so that upstream server recieves valid page number.
|
||||
let url: String = format!("https://searx.work/search?q={query}&pageno={page}");
|
||||
|
||||
// Add random delay before making the request.
|
||||
let mut rng = rand::thread_rng();
|
||||
let delay_secs = rng.gen_range(1, 10);
|
||||
std::thread::sleep(Duration::from_secs(delay_secs));
|
||||
|
||||
// initializing headers and adding appropriate headers.
|
||||
let mut header_map = HeaderMap::new();
|
||||
header_map.insert(USER_AGENT, user_agent.parse()?);
|
||||
|
@ -1,7 +1,10 @@
|
||||
//! This module provides the functionality to scrape and gathers all the results from the upstream
|
||||
//! search engines and then removes duplicate results.
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::{collections::HashMap, time::Duration};
|
||||
|
||||
use rand::Rng;
|
||||
use tokio::join;
|
||||
|
||||
use super::{
|
||||
aggregation_models::{RawSearchResult, SearchResult, SearchResults},
|
||||
@ -14,14 +17,14 @@ use crate::engines::{duckduckgo, searx};
|
||||
/// then removes duplicate results and if two results are found to be from two or more engines
|
||||
/// then puts their names together to show the results are fetched from these upstream engines
|
||||
/// and then removes all data from the HashMap and puts into a struct of all results aggregated
|
||||
/// into a vector and also adds the query used into the struct this is neccessory because
|
||||
/// into a vector and also adds the query used into the struct this is neccessory because
|
||||
/// otherwise the search bar in search remains empty if searched from the query url
|
||||
///
|
||||
/// # Example:
|
||||
///
|
||||
/// If you search from the url like `https://127.0.0.1/search?q=huston` then the search bar should
|
||||
/// contain the word huston and not remain empty.
|
||||
///
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `query` - Accepts a string to query with the above upstream search engines.
|
||||
@ -29,7 +32,7 @@ use crate::engines::{duckduckgo, searx};
|
||||
///
|
||||
/// # Error
|
||||
///
|
||||
/// Returns an error a reqwest and scraping selector errors if any error occurs in the results
|
||||
/// Returns an error a reqwest and scraping selector errors if any error occurs in the results
|
||||
/// function in either `searx` or `duckduckgo` or both otherwise returns a `SearchResults struct`
|
||||
/// containing appropriate values.
|
||||
pub async fn aggregate(
|
||||
@ -39,10 +42,19 @@ pub async fn aggregate(
|
||||
let user_agent: String = random_user_agent();
|
||||
let mut result_map: HashMap<String, RawSearchResult> = HashMap::new();
|
||||
|
||||
let ddg_map_results: HashMap<String, RawSearchResult> =
|
||||
duckduckgo::results(query, page, &user_agent).await?;
|
||||
let searx_map_results: HashMap<String, RawSearchResult> =
|
||||
searx::results(query, page, &user_agent).await?;
|
||||
// Add a random delay before making the request.
|
||||
let mut rng = rand::thread_rng();
|
||||
let delay_secs = rng.gen_range(1, 10);
|
||||
std::thread::sleep(Duration::from_secs(delay_secs));
|
||||
|
||||
// fetch results from upstream search engines simultaneously/concurrently.
|
||||
let (ddg_map_results, searx_map_results) = join!(
|
||||
duckduckgo::results(query, page, &user_agent),
|
||||
searx::results(query, page, &user_agent)
|
||||
);
|
||||
|
||||
let ddg_map_results: HashMap<String, RawSearchResult> = ddg_map_results?;
|
||||
let searx_map_results: HashMap<String, RawSearchResult> = searx_map_results?;
|
||||
|
||||
result_map.extend(ddg_map_results);
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user