diff --git a/src/config_parser/parser.rs b/src/config_parser/parser.rs index ac200cd..4c5c1e6 100644 --- a/src/config_parser/parser.rs +++ b/src/config_parser/parser.rs @@ -118,7 +118,7 @@ impl Config { { Ok("./websurfx/config.lua".to_string()) } else { - Err(format!("Config file not found!!").into()) + Err("Config file not found!!".to_string().into()) } } } diff --git a/src/engines/duckduckgo.rs b/src/engines/duckduckgo.rs index 254ab16..64c34c3 100644 --- a/src/engines/duckduckgo.rs +++ b/src/engines/duckduckgo.rs @@ -2,13 +2,15 @@ //! by querying the upstream duckduckgo search engine with user provided query and with a page //! number if provided. -use std::collections::HashMap; +use std::{collections::HashMap, time::Duration}; use reqwest::header::{HeaderMap, CONTENT_TYPE, COOKIE, REFERER, USER_AGENT}; use scraper::{Html, Selector}; use crate::search_results_handler::aggregation_models::RawSearchResult; +use super::engine_models::EngineErrorKind; + /// This function scrapes results from the upstream engine duckduckgo and puts all the scraped /// results like title, visiting_url (href in html),engine (from which engine it was fetched from) /// and description in a RawSearchResult and then adds that to HashMap whose keys are url and @@ -22,14 +24,15 @@ use crate::search_results_handler::aggregation_models::RawSearchResult; /// /// # Errors /// -/// Returns a reqwest error if the user is not connected to the internet or if their is failure to -/// reach the above `upstream search engine` page and also returns error if the scraping -/// selector fails to initialize" +/// Returns an `EngineErrorKind` if the user is not connected to the internet or if their is failure to +/// reach the above `upstream search engine` page or if the `upstream search engine` is unable to +/// provide results for the requested search query and also returns error if the scraping selector +/// or HeaderMap fails to initialize. pub async fn results( query: &str, page: u32, user_agent: &str, -) -> Result, Box> { +) -> Result, EngineErrorKind> { // Page number can be missing or empty string and so appropriate handling is required // so that upstream server recieves valid page number. let url: String = match page { @@ -54,9 +57,9 @@ pub async fn results( header_map.insert(COOKIE, "kl=wt-wt".parse()?); // fetch the html from upstream duckduckgo engine - // TODO: Write better error handling code to handle no results case. let results: String = reqwest::Client::new() .get(url) + .timeout(Duration::from_secs(30)) .headers(header_map) // add spoofed headers to emulate human behaviour .send() .await? @@ -64,6 +67,13 @@ pub async fn results( .await?; let document: Html = Html::parse_document(&results); + + let no_result: Selector = Selector::parse(".no-results")?; + + if document.select(&no_result).next().is_some() { + return Err(EngineErrorKind::EmptyResultSet); + } + let results: Selector = Selector::parse(".result")?; let result_title: Selector = Selector::parse(".result__a")?; let result_url: Selector = Selector::parse(".result__url")?; diff --git a/src/engines/engine_models.rs b/src/engines/engine_models.rs new file mode 100644 index 0000000..e763852 --- /dev/null +++ b/src/engines/engine_models.rs @@ -0,0 +1,87 @@ +//! This module provides the error enum to handle different errors associated while requesting data from +//! the upstream search engines with the search query provided by the user. + +use reqwest::header::InvalidHeaderValue; +use scraper::error::SelectorErrorKind; + +/// A custom error type used for handle engine associated errors. +/// +/// This enum provides variants three different categories of errors: +/// * `RequestError` - This variant handles all request related errors like forbidden, not found, +/// etc. +/// * `EmptyResultSet` - This variant handles the not results found error provide by the upstream +/// search engines. +/// * `UnexpectedError` - This variant handles all the errors which are unexpected or occur rarely +/// and are errors mostly related to failure in initialization of HeaderMap, Selector errors and +/// all other errors occuring within the code handling the `upstream search engines`. +#[derive(Debug)] +pub enum EngineErrorKind { + RequestError(reqwest::Error), + EmptyResultSet, + UnexpectedError { + message: String, + source: Option>, + }, +} + +/// Implementing `Display` trait to make errors writable on the stdout and also providing/passing the +/// appropriate errors that should be written to the stdout when this error is raised/encountered. +impl std::fmt::Display for EngineErrorKind { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + EngineErrorKind::RequestError(request_error) => { + write!(f, "Request error: {}", request_error) + } + EngineErrorKind::EmptyResultSet => { + write!(f, "The upstream search engine returned an empty result set") + } + EngineErrorKind::UnexpectedError { message, source } => { + write!(f, "Unexpected error: {}", message)?; + if let Some(source) = source { + write!(f, "\nCaused by: {}", source)?; + } + Ok(()) + } + } + } +} + +/// Implementing `Error` trait to make the the `EngineErrorKind` enum an error type and +/// mapping `ReqwestErrors` to `RequestError` and `UnexpectedError` errors to all other unexpected +/// errors ocurring within the code handling the upstream search engines. +impl std::error::Error for EngineErrorKind { + fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { + match self { + EngineErrorKind::RequestError(request_error) => Some(request_error), + EngineErrorKind::UnexpectedError { source, .. } => source.as_deref().map(|s| s), + _ => None, + } + } +} + +/// Implementing `From` trait to map the `SelectorErrorKind` to `UnexpectedError` variant. +impl From> for EngineErrorKind { + fn from(err: SelectorErrorKind<'_>) -> Self { + Self::UnexpectedError { + message: err.to_string(), + source: None, + } + } +} + +/// Implementing `From` trait to map the `InvalidHeaderValue` to `UnexpectedError` variant. +impl From for EngineErrorKind { + fn from(err: InvalidHeaderValue) -> Self { + Self::UnexpectedError { + message: err.to_string(), + source: Some(Box::new(err)), + } + } +} + +/// Implementing `From` trait to map all `reqwest::Error` to `UnexpectedError` variant. +impl From for EngineErrorKind { + fn from(err: reqwest::Error) -> Self { + Self::RequestError(err) + } +} diff --git a/src/engines/mod.rs b/src/engines/mod.rs index 7f390b1..f9bb8ad 100644 --- a/src/engines/mod.rs +++ b/src/engines/mod.rs @@ -1,2 +1,3 @@ pub mod duckduckgo; +pub mod engine_models; pub mod searx; diff --git a/src/engines/searx.rs b/src/engines/searx.rs index 8812dd9..feab464 100644 --- a/src/engines/searx.rs +++ b/src/engines/searx.rs @@ -8,6 +8,8 @@ use std::collections::HashMap; use crate::search_results_handler::aggregation_models::RawSearchResult; +use super::engine_models::EngineErrorKind; + /// This function scrapes results from the upstream engine duckduckgo and puts all the scraped /// results like title, visiting_url (href in html),engine (from which engine it was fetched from) /// and description in a RawSearchResult and then adds that to HashMap whose keys are url and @@ -21,14 +23,15 @@ use crate::search_results_handler::aggregation_models::RawSearchResult; /// /// # Errors /// -/// Returns a reqwest error if the user is not connected to the internet or if their is failure to -/// reach the above `upstream search engine` page and also returns error if the scraping -/// selector fails to initialize" +/// Returns an `EngineErrorKind` if the user is not connected to the internet or if their is failure to +/// reach the above `upstream search engine` page or if the `upstream search engine` is unable to +/// provide results for the requested search query and also returns error if the scraping selector +/// or HeaderMap fails to initialize. pub async fn results( query: &str, page: u32, user_agent: &str, -) -> Result, Box> { +) -> Result, EngineErrorKind> { // Page number can be missing or empty string and so appropriate handling is required // so that upstream server recieves valid page number. let url: String = format!("https://searx.work/search?q={query}&pageno={page}"); @@ -41,7 +44,6 @@ pub async fn results( header_map.insert(COOKIE, "categories=general; language=auto; locale=en; autocomplete=duckduckgo; image_proxy=1; method=POST; safesearch=2; theme=simple; results_on_new_tab=1; doi_resolver=oadoi.org; simple_style=auto; center_alignment=1; query_in_title=1; infinite_scroll=0; disabled_engines=; enabled_engines=\"archive is__general\\054yep__general\\054curlie__general\\054currency__general\\054ddg definitions__general\\054wikidata__general\\054duckduckgo__general\\054tineye__general\\054lingva__general\\054startpage__general\\054yahoo__general\\054wiby__general\\054marginalia__general\\054alexandria__general\\054wikibooks__general\\054wikiquote__general\\054wikisource__general\\054wikiversity__general\\054wikivoyage__general\\054dictzone__general\\054seznam__general\\054mojeek__general\\054naver__general\\054wikimini__general\\054brave__general\\054petalsearch__general\\054goo__general\"; disabled_plugins=; enabled_plugins=\"searx.plugins.hostname_replace\\054searx.plugins.oa_doi_rewrite\\054searx.plugins.vim_hotkeys\"; tokens=; maintab=on; enginetab=on".parse()?); // fetch the html from upstream searx instance engine - // TODO: Write better error handling code to handle no results case. let results: String = reqwest::Client::new() .get(url) .headers(header_map) // add spoofed headers to emulate human behaviours. @@ -51,6 +53,17 @@ pub async fn results( .await?; let document: Html = Html::parse_document(&results); + + let no_result: Selector = Selector::parse("#urls>.dialog-error>p")?; + + if let Some(no_result_msg) = document.select(&no_result).nth(1) { + if no_result_msg.inner_html() + == "we didn't find any results. Please use another query or search in more categories" + { + return Err(EngineErrorKind::EmptyResultSet); + } + } + let results: Selector = Selector::parse(".result")?; let result_title: Selector = Selector::parse("h3>a")?; let result_url: Selector = Selector::parse("h3>a")?;