0
0
mirror of https://github.com/neon-mmd/websurfx.git synced 2024-11-22 14:08:23 -05:00

⚙️ refactor: add several optimizations to the engine code (#180)(#178)

This commit is contained in:
neon_arch 2023-08-27 20:59:08 +03:00
parent 4ccd0486e7
commit f5f0488954
2 changed files with 36 additions and 75 deletions

View File

@ -4,14 +4,14 @@
use std::collections::HashMap; use std::collections::HashMap;
use reqwest::header::{HeaderMap, CONTENT_TYPE, COOKIE, REFERER, USER_AGENT}; use reqwest::header::HeaderMap;
use scraper::{Html, Selector}; use scraper::{Html, Selector};
use crate::results::aggregation_models::SearchResult; use crate::results::aggregation_models::SearchResult;
use super::engine_models::{EngineError, SearchEngine}; use super::engine_models::{EngineError, SearchEngine};
use error_stack::{IntoReport, Report, Result, ResultExt}; use error_stack::{Report, Result, ResultExt};
/// A new DuckDuckGo engine type defined in-order to implement the `SearchEngine` trait which allows to /// A new DuckDuckGo engine type defined in-order to implement the `SearchEngine` trait which allows to
/// reduce code duplication as well as allows to create vector of different search engines easily. /// reduce code duplication as well as allows to create vector of different search engines easily.
@ -39,9 +39,9 @@ impl SearchEngine for DuckDuckGo {
/// or HeaderMap fails to initialize. /// or HeaderMap fails to initialize.
async fn results( async fn results(
&self, &self,
query: String, query: &str,
page: u32, page: u32,
user_agent: String, user_agent: &str,
request_timeout: u8, request_timeout: u8,
) -> Result<HashMap<String, SearchResult>, EngineError> { ) -> Result<HashMap<String, SearchResult>, EngineError> {
// Page number can be missing or empty string and so appropriate handling is required // Page number can be missing or empty string and so appropriate handling is required
@ -61,38 +61,19 @@ impl SearchEngine for DuckDuckGo {
}; };
// initializing HeaderMap and adding appropriate headers. // initializing HeaderMap and adding appropriate headers.
let mut header_map = HeaderMap::new(); let header_map = HeaderMap::try_from(&HashMap::from([
header_map.insert( ("USER_AGENT".to_string(), user_agent.to_string()),
USER_AGENT, ("REFERER".to_string(), "https://google.com/".to_string()),
user_agent (
.parse() "CONTENT_TYPE".to_string(),
.into_report() "application/x-www-form-urlencoded".to_string(),
.change_context(EngineError::UnexpectedError)?, ),
); ("COOKIE".to_string(), "kl=wt-wt".to_string()),
header_map.insert( ]))
REFERER, .change_context(EngineError::UnexpectedError)?;
"https://google.com/"
.parse()
.into_report()
.change_context(EngineError::UnexpectedError)?,
);
header_map.insert(
CONTENT_TYPE,
"application/x-www-form-urlencoded"
.parse()
.into_report()
.change_context(EngineError::UnexpectedError)?,
);
header_map.insert(
COOKIE,
"kl=wt-wt"
.parse()
.into_report()
.change_context(EngineError::UnexpectedError)?,
);
let document: Html = Html::parse_document( let document: Html = Html::parse_document(
&DuckDuckGo::fetch_html_from_upstream(self, url, header_map, request_timeout).await?, &DuckDuckGo::fetch_html_from_upstream(self, &url, header_map, request_timeout).await?,
); );
let no_result: Selector = Selector::parse(".no-results") let no_result: Selector = Selector::parse(".no-results")
@ -126,8 +107,7 @@ impl SearchEngine for DuckDuckGo {
.next() .next()
.unwrap() .unwrap()
.inner_html() .inner_html()
.trim() .trim(),
.to_string(),
format!( format!(
"https://{}", "https://{}",
result result
@ -136,15 +116,15 @@ impl SearchEngine for DuckDuckGo {
.unwrap() .unwrap()
.inner_html() .inner_html()
.trim() .trim()
), )
.as_str(),
result result
.select(&result_desc) .select(&result_desc)
.next() .next()
.unwrap() .unwrap()
.inner_html() .inner_html()
.trim() .trim(),
.to_string(), &["duckduckgo"],
vec!["duckduckgo".to_string()],
) )
}) })
.map(|search_result| (search_result.url.clone(), search_result)) .map(|search_result| (search_result.url.clone(), search_result))

View File

@ -2,14 +2,14 @@
//! by querying the upstream searx search engine instance with user provided query and with a page //! by querying the upstream searx search engine instance with user provided query and with a page
//! number if provided. //! number if provided.
use reqwest::header::{HeaderMap, CONTENT_TYPE, COOKIE, REFERER, USER_AGENT}; use reqwest::header::HeaderMap;
use scraper::{Html, Selector}; use scraper::{Html, Selector};
use std::collections::HashMap; use std::collections::HashMap;
use crate::results::aggregation_models::SearchResult; use crate::results::aggregation_models::SearchResult;
use super::engine_models::{EngineError, SearchEngine}; use super::engine_models::{EngineError, SearchEngine};
use error_stack::{IntoReport, Report, Result, ResultExt}; use error_stack::{Report, Result, ResultExt};
/// A new Searx engine type defined in-order to implement the `SearchEngine` trait which allows to /// A new Searx engine type defined in-order to implement the `SearchEngine` trait which allows to
/// reduce code duplication as well as allows to create vector of different search engines easily. /// reduce code duplication as well as allows to create vector of different search engines easily.
@ -38,9 +38,9 @@ impl SearchEngine for Searx {
async fn results( async fn results(
&self, &self,
query: String, query: &str,
page: u32, page: u32,
user_agent: String, user_agent: &str,
request_timeout: u8, request_timeout: u8,
) -> Result<HashMap<String, SearchResult>, EngineError> { ) -> Result<HashMap<String, SearchResult>, EngineError> {
// Page number can be missing or empty string and so appropriate handling is required // Page number can be missing or empty string and so appropriate handling is required
@ -51,32 +51,16 @@ impl SearchEngine for Searx {
}; };
// initializing headers and adding appropriate headers. // initializing headers and adding appropriate headers.
let mut header_map = HeaderMap::new(); let header_map = HeaderMap::try_from(&HashMap::from([
header_map.insert( ("USER_AGENT".to_string(), user_agent.to_string()),
USER_AGENT, ("REFERER".to_string(), "https://google.com/".to_string()),
user_agent ("CONTENT_TYPE".to_string(), "application/x-www-form-urlencoded".to_string()),
.parse() ("COOKIE".to_string(), "categories=general; language=auto; locale=en; autocomplete=duckduckgo; image_proxy=1; method=POST; safesearch=2; theme=simple; results_on_new_tab=1; doi_resolver=oadoi.org; simple_style=auto; center_alignment=1; query_in_title=1; infinite_scroll=0; disabled_engines=; enabled_engines=\"archive is__general\\054yep__general\\054curlie__general\\054currency__general\\054ddg definitions__general\\054wikidata__general\\054duckduckgo__general\\054tineye__general\\054lingva__general\\054startpage__general\\054yahoo__general\\054wiby__general\\054marginalia__general\\054alexandria__general\\054wikibooks__general\\054wikiquote__general\\054wikisource__general\\054wikiversity__general\\054wikivoyage__general\\054dictzone__general\\054seznam__general\\054mojeek__general\\054naver__general\\054wikimini__general\\054brave__general\\054petalsearch__general\\054goo__general\"; disabled_plugins=; enabled_plugins=\"searx.plugins.hostname_replace\\054searx.plugins.oa_doi_rewrite\\054searx.plugins.vim_hotkeys\"; tokens=; maintab=on; enginetab=on".to_string())
.into_report() ]))
.change_context(EngineError::UnexpectedError)?, .change_context(EngineError::UnexpectedError)?;
);
header_map.insert(
REFERER,
"https://google.com/"
.parse()
.into_report()
.change_context(EngineError::UnexpectedError)?,
);
header_map.insert(
CONTENT_TYPE,
"application/x-www-form-urlencoded"
.parse()
.into_report()
.change_context(EngineError::UnexpectedError)?,
);
header_map.insert(COOKIE, "categories=general; language=auto; locale=en; autocomplete=duckduckgo; image_proxy=1; method=POST; safesearch=2; theme=simple; results_on_new_tab=1; doi_resolver=oadoi.org; simple_style=auto; center_alignment=1; query_in_title=1; infinite_scroll=0; disabled_engines=; enabled_engines=\"archive is__general\\054yep__general\\054curlie__general\\054currency__general\\054ddg definitions__general\\054wikidata__general\\054duckduckgo__general\\054tineye__general\\054lingva__general\\054startpage__general\\054yahoo__general\\054wiby__general\\054marginalia__general\\054alexandria__general\\054wikibooks__general\\054wikiquote__general\\054wikisource__general\\054wikiversity__general\\054wikivoyage__general\\054dictzone__general\\054seznam__general\\054mojeek__general\\054naver__general\\054wikimini__general\\054brave__general\\054petalsearch__general\\054goo__general\"; disabled_plugins=; enabled_plugins=\"searx.plugins.hostname_replace\\054searx.plugins.oa_doi_rewrite\\054searx.plugins.vim_hotkeys\"; tokens=; maintab=on; enginetab=on".parse().into_report().change_context(EngineError::UnexpectedError)?);
let document: Html = Html::parse_document( let document: Html = Html::parse_document(
&Searx::fetch_html_from_upstream(self, url, header_map, request_timeout).await?, &Searx::fetch_html_from_upstream(self, &url, header_map, request_timeout).await?,
); );
let no_result: Selector = Selector::parse("#urls>.dialog-error>p") let no_result: Selector = Selector::parse("#urls>.dialog-error>p")
@ -117,24 +101,21 @@ impl SearchEngine for Searx {
.next() .next()
.unwrap() .unwrap()
.inner_html() .inner_html()
.trim() .trim(),
.to_string(),
result result
.select(&result_url) .select(&result_url)
.next() .next()
.unwrap() .unwrap()
.value() .value()
.attr("href") .attr("href")
.unwrap() .unwrap(),
.to_string(),
result result
.select(&result_desc) .select(&result_desc)
.next() .next()
.unwrap() .unwrap()
.inner_html() .inner_html()
.trim() .trim(),
.to_string(), &["searx"],
vec!["searx".to_string()],
) )
}) })
.map(|search_result| (search_result.url.clone(), search_result)) .map(|search_result| (search_result.url.clone(), search_result))