0
0
mirror of https://github.com/neon-mmd/websurfx.git synced 2024-11-22 05:58:21 -05:00

Merge branch 'rolling' into optimise-releases

This commit is contained in:
zhou fan 2023-06-06 07:09:37 +08:00 committed by GitHub
commit 2cdfadcb2b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 124 additions and 13 deletions

View File

@ -4,7 +4,7 @@ function search_web() {
} }
search_box.addEventListener('keyup', (e) => { search_box.addEventListener('keyup', (e) => {
if (e.keyCode === 13) { if (e.key === 'Enter') {
search_web() search_web()
} }
}) })

View File

@ -118,7 +118,7 @@ impl Config {
{ {
Ok("./websurfx/config.lua".to_string()) Ok("./websurfx/config.lua".to_string())
} else { } else {
Err(format!("Config file not found!!").into()) Err("Config file not found!!".to_string().into())
} }
} }
} }

View File

@ -2,13 +2,15 @@
//! by querying the upstream duckduckgo search engine with user provided query and with a page //! by querying the upstream duckduckgo search engine with user provided query and with a page
//! number if provided. //! number if provided.
use std::collections::HashMap; use std::{collections::HashMap, time::Duration};
use reqwest::header::{HeaderMap, CONTENT_TYPE, COOKIE, REFERER, USER_AGENT}; use reqwest::header::{HeaderMap, CONTENT_TYPE, COOKIE, REFERER, USER_AGENT};
use scraper::{Html, Selector}; use scraper::{Html, Selector};
use crate::search_results_handler::aggregation_models::RawSearchResult; use crate::search_results_handler::aggregation_models::RawSearchResult;
use super::engine_models::EngineErrorKind;
/// This function scrapes results from the upstream engine duckduckgo and puts all the scraped /// This function scrapes results from the upstream engine duckduckgo and puts all the scraped
/// results like title, visiting_url (href in html),engine (from which engine it was fetched from) /// results like title, visiting_url (href in html),engine (from which engine it was fetched from)
/// and description in a RawSearchResult and then adds that to HashMap whose keys are url and /// and description in a RawSearchResult and then adds that to HashMap whose keys are url and
@ -22,14 +24,15 @@ use crate::search_results_handler::aggregation_models::RawSearchResult;
/// ///
/// # Errors /// # Errors
/// ///
/// Returns a reqwest error if the user is not connected to the internet or if their is failure to /// Returns an `EngineErrorKind` if the user is not connected to the internet or if their is failure to
/// reach the above `upstream search engine` page and also returns error if the scraping /// reach the above `upstream search engine` page or if the `upstream search engine` is unable to
/// selector fails to initialize" /// provide results for the requested search query and also returns error if the scraping selector
/// or HeaderMap fails to initialize.
pub async fn results( pub async fn results(
query: &str, query: &str,
page: u32, page: u32,
user_agent: &str, user_agent: &str,
) -> Result<HashMap<String, RawSearchResult>, Box<dyn std::error::Error>> { ) -> Result<HashMap<String, RawSearchResult>, EngineErrorKind> {
// Page number can be missing or empty string and so appropriate handling is required // Page number can be missing or empty string and so appropriate handling is required
// so that upstream server recieves valid page number. // so that upstream server recieves valid page number.
let url: String = match page { let url: String = match page {
@ -54,9 +57,9 @@ pub async fn results(
header_map.insert(COOKIE, "kl=wt-wt".parse()?); header_map.insert(COOKIE, "kl=wt-wt".parse()?);
// fetch the html from upstream duckduckgo engine // fetch the html from upstream duckduckgo engine
// TODO: Write better error handling code to handle no results case.
let results: String = reqwest::Client::new() let results: String = reqwest::Client::new()
.get(url) .get(url)
.timeout(Duration::from_secs(30))
.headers(header_map) // add spoofed headers to emulate human behaviour .headers(header_map) // add spoofed headers to emulate human behaviour
.send() .send()
.await? .await?
@ -64,6 +67,13 @@ pub async fn results(
.await?; .await?;
let document: Html = Html::parse_document(&results); let document: Html = Html::parse_document(&results);
let no_result: Selector = Selector::parse(".no-results")?;
if document.select(&no_result).next().is_some() {
return Err(EngineErrorKind::EmptyResultSet);
}
let results: Selector = Selector::parse(".result")?; let results: Selector = Selector::parse(".result")?;
let result_title: Selector = Selector::parse(".result__a")?; let result_title: Selector = Selector::parse(".result__a")?;
let result_url: Selector = Selector::parse(".result__url")?; let result_url: Selector = Selector::parse(".result__url")?;

View File

@ -0,0 +1,87 @@
//! This module provides the error enum to handle different errors associated while requesting data from
//! the upstream search engines with the search query provided by the user.
use reqwest::header::InvalidHeaderValue;
use scraper::error::SelectorErrorKind;
/// A custom error type used for handle engine associated errors.
///
/// This enum provides variants three different categories of errors:
/// * `RequestError` - This variant handles all request related errors like forbidden, not found,
/// etc.
/// * `EmptyResultSet` - This variant handles the not results found error provide by the upstream
/// search engines.
/// * `UnexpectedError` - This variant handles all the errors which are unexpected or occur rarely
/// and are errors mostly related to failure in initialization of HeaderMap, Selector errors and
/// all other errors occuring within the code handling the `upstream search engines`.
#[derive(Debug)]
pub enum EngineErrorKind {
RequestError(reqwest::Error),
EmptyResultSet,
UnexpectedError {
message: String,
source: Option<Box<dyn std::error::Error>>,
},
}
/// Implementing `Display` trait to make errors writable on the stdout and also providing/passing the
/// appropriate errors that should be written to the stdout when this error is raised/encountered.
impl std::fmt::Display for EngineErrorKind {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
EngineErrorKind::RequestError(request_error) => {
write!(f, "Request error: {}", request_error)
}
EngineErrorKind::EmptyResultSet => {
write!(f, "The upstream search engine returned an empty result set")
}
EngineErrorKind::UnexpectedError { message, source } => {
write!(f, "Unexpected error: {}", message)?;
if let Some(source) = source {
write!(f, "\nCaused by: {}", source)?;
}
Ok(())
}
}
}
}
/// Implementing `Error` trait to make the the `EngineErrorKind` enum an error type and
/// mapping `ReqwestErrors` to `RequestError` and `UnexpectedError` errors to all other unexpected
/// errors ocurring within the code handling the upstream search engines.
impl std::error::Error for EngineErrorKind {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match self {
EngineErrorKind::RequestError(request_error) => Some(request_error),
EngineErrorKind::UnexpectedError { source, .. } => source.as_deref().map(|s| s),
_ => None,
}
}
}
/// Implementing `From` trait to map the `SelectorErrorKind` to `UnexpectedError` variant.
impl From<SelectorErrorKind<'_>> for EngineErrorKind {
fn from(err: SelectorErrorKind<'_>) -> Self {
Self::UnexpectedError {
message: err.to_string(),
source: None,
}
}
}
/// Implementing `From` trait to map the `InvalidHeaderValue` to `UnexpectedError` variant.
impl From<InvalidHeaderValue> for EngineErrorKind {
fn from(err: InvalidHeaderValue) -> Self {
Self::UnexpectedError {
message: err.to_string(),
source: Some(Box::new(err)),
}
}
}
/// Implementing `From` trait to map all `reqwest::Error` to `UnexpectedError` variant.
impl From<reqwest::Error> for EngineErrorKind {
fn from(err: reqwest::Error) -> Self {
Self::RequestError(err)
}
}

View File

@ -1,2 +1,3 @@
pub mod duckduckgo; pub mod duckduckgo;
pub mod engine_models;
pub mod searx; pub mod searx;

View File

@ -8,6 +8,8 @@ use std::collections::HashMap;
use crate::search_results_handler::aggregation_models::RawSearchResult; use crate::search_results_handler::aggregation_models::RawSearchResult;
use super::engine_models::EngineErrorKind;
/// This function scrapes results from the upstream engine duckduckgo and puts all the scraped /// This function scrapes results from the upstream engine duckduckgo and puts all the scraped
/// results like title, visiting_url (href in html),engine (from which engine it was fetched from) /// results like title, visiting_url (href in html),engine (from which engine it was fetched from)
/// and description in a RawSearchResult and then adds that to HashMap whose keys are url and /// and description in a RawSearchResult and then adds that to HashMap whose keys are url and
@ -21,14 +23,15 @@ use crate::search_results_handler::aggregation_models::RawSearchResult;
/// ///
/// # Errors /// # Errors
/// ///
/// Returns a reqwest error if the user is not connected to the internet or if their is failure to /// Returns an `EngineErrorKind` if the user is not connected to the internet or if their is failure to
/// reach the above `upstream search engine` page and also returns error if the scraping /// reach the above `upstream search engine` page or if the `upstream search engine` is unable to
/// selector fails to initialize" /// provide results for the requested search query and also returns error if the scraping selector
/// or HeaderMap fails to initialize.
pub async fn results( pub async fn results(
query: &str, query: &str,
page: u32, page: u32,
user_agent: &str, user_agent: &str,
) -> Result<HashMap<String, RawSearchResult>, Box<dyn std::error::Error>> { ) -> Result<HashMap<String, RawSearchResult>, EngineErrorKind> {
// Page number can be missing or empty string and so appropriate handling is required // Page number can be missing or empty string and so appropriate handling is required
// so that upstream server recieves valid page number. // so that upstream server recieves valid page number.
let url: String = format!("https://searx.work/search?q={query}&pageno={page}"); let url: String = format!("https://searx.work/search?q={query}&pageno={page}");
@ -41,7 +44,6 @@ pub async fn results(
header_map.insert(COOKIE, "categories=general; language=auto; locale=en; autocomplete=duckduckgo; image_proxy=1; method=POST; safesearch=2; theme=simple; results_on_new_tab=1; doi_resolver=oadoi.org; simple_style=auto; center_alignment=1; query_in_title=1; infinite_scroll=0; disabled_engines=; enabled_engines=\"archive is__general\\054yep__general\\054curlie__general\\054currency__general\\054ddg definitions__general\\054wikidata__general\\054duckduckgo__general\\054tineye__general\\054lingva__general\\054startpage__general\\054yahoo__general\\054wiby__general\\054marginalia__general\\054alexandria__general\\054wikibooks__general\\054wikiquote__general\\054wikisource__general\\054wikiversity__general\\054wikivoyage__general\\054dictzone__general\\054seznam__general\\054mojeek__general\\054naver__general\\054wikimini__general\\054brave__general\\054petalsearch__general\\054goo__general\"; disabled_plugins=; enabled_plugins=\"searx.plugins.hostname_replace\\054searx.plugins.oa_doi_rewrite\\054searx.plugins.vim_hotkeys\"; tokens=; maintab=on; enginetab=on".parse()?); header_map.insert(COOKIE, "categories=general; language=auto; locale=en; autocomplete=duckduckgo; image_proxy=1; method=POST; safesearch=2; theme=simple; results_on_new_tab=1; doi_resolver=oadoi.org; simple_style=auto; center_alignment=1; query_in_title=1; infinite_scroll=0; disabled_engines=; enabled_engines=\"archive is__general\\054yep__general\\054curlie__general\\054currency__general\\054ddg definitions__general\\054wikidata__general\\054duckduckgo__general\\054tineye__general\\054lingva__general\\054startpage__general\\054yahoo__general\\054wiby__general\\054marginalia__general\\054alexandria__general\\054wikibooks__general\\054wikiquote__general\\054wikisource__general\\054wikiversity__general\\054wikivoyage__general\\054dictzone__general\\054seznam__general\\054mojeek__general\\054naver__general\\054wikimini__general\\054brave__general\\054petalsearch__general\\054goo__general\"; disabled_plugins=; enabled_plugins=\"searx.plugins.hostname_replace\\054searx.plugins.oa_doi_rewrite\\054searx.plugins.vim_hotkeys\"; tokens=; maintab=on; enginetab=on".parse()?);
// fetch the html from upstream searx instance engine // fetch the html from upstream searx instance engine
// TODO: Write better error handling code to handle no results case.
let results: String = reqwest::Client::new() let results: String = reqwest::Client::new()
.get(url) .get(url)
.headers(header_map) // add spoofed headers to emulate human behaviours. .headers(header_map) // add spoofed headers to emulate human behaviours.
@ -51,6 +53,17 @@ pub async fn results(
.await?; .await?;
let document: Html = Html::parse_document(&results); let document: Html = Html::parse_document(&results);
let no_result: Selector = Selector::parse("#urls>.dialog-error>p")?;
if let Some(no_result_msg) = document.select(&no_result).nth(1) {
if no_result_msg.inner_html()
== "we didn't find any results. Please use another query or search in more categories"
{
return Err(EngineErrorKind::EmptyResultSet);
}
}
let results: Selector = Selector::parse(".result")?; let results: Selector = Selector::parse(".result")?;
let result_title: Selector = Selector::parse("h3>a")?; let result_title: Selector = Selector::parse("h3>a")?;
let result_url: Selector = Selector::parse("h3>a")?; let result_url: Selector = Selector::parse("h3>a")?;