diff --git a/src/search_results_handler/aggregator.rs b/src/search_results_handler/aggregator.rs index aed48aa..197c3d7 100644 --- a/src/search_results_handler/aggregator.rs +++ b/src/search_results_handler/aggregator.rs @@ -8,7 +8,7 @@ use rand::Rng; use tokio::task::JoinHandle; use super::{ - aggregation_models::{RawSearchResult, SearchResult, SearchResults}, + aggregation_models::{EngineErrorInfo, RawSearchResult, SearchResult, SearchResults}, user_agent::random_user_agent, }; @@ -18,6 +18,8 @@ use crate::engines::{ searx, }; +type FutureVec = Vec, Report>>>; + /// A function that aggregates all the scraped results from the above upstream engines and /// then removes duplicate results and if two results are found to be from two or more engines /// then puts their names together to show the results are fetched from these upstream engines @@ -70,49 +72,48 @@ pub async fn aggregate( let task_capacity: usize = search_engines.len(); - let tasks: Vec, Report>>> = - search_engines - .into_iter() - .map(|search_engine| { - let query: String = query.clone(); - let user_agent: String = user_agent.clone(); - tokio::spawn( - async move { search_engine.results(query, page, user_agent.clone()).await }, - ) - }) - .collect(); + let tasks: FutureVec = search_engines + .into_iter() + .map(|search_engine| { + let query: String = query.clone(); + let user_agent: String = user_agent.clone(); + tokio::spawn( + async move { search_engine.results(query, page, user_agent.clone()).await }, + ) + }) + .collect(); let mut outputs = Vec::with_capacity(task_capacity); for task in tasks { if let Ok(result) = task.await { - outputs.push(result.ok()) + outputs.push(result) } } + let mut engine_errors_info: Vec = Vec::new(); + let mut initial: bool = true; let mut counter: usize = 0; outputs.iter().for_each(|results| { if initial { match results { - Some(result) => { + Ok(result) => { result_map.extend(result.clone()); counter += 1; initial = false } - None => { - if debug { - log::error!( - "Error fetching results from {}", - upstream_search_engines[counter] - ); - }; + Err(error_type) => { + engine_errors_info.push(EngineErrorInfo::new( + error_type.downcast_ref::().unwrap(), + upstream_search_engines[counter].clone(), + )); counter += 1 } } } else { match results { - Some(result) => { + Ok(result) => { result.clone().into_iter().for_each(|(key, value)| { result_map .entry(key) @@ -130,13 +131,11 @@ pub async fn aggregate( }); counter += 1 } - None => { - if debug { - log::error!( - "Error fetching results from {}", - upstream_search_engines[counter] - ); - }; + Err(error_type) => { + engine_errors_info.push(EngineErrorInfo::new( + error_type.downcast_ref::().unwrap(), + upstream_search_engines[counter].clone(), + )); counter += 1 } } @@ -157,5 +156,6 @@ pub async fn aggregate( }) .collect(), query.to_string(), + engine_errors_info, )) }