diff --git a/Cargo.lock b/Cargo.lock index 9f0a6d6..fdc506c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4066,7 +4066,7 @@ checksum = "1778a42e8b3b90bff8d0f5032bf22250792889a5cdc752aa0020c84abe3aaf10" [[package]] name = "websurfx" -version = "1.4.2" +version = "1.5.0" dependencies = [ "actix-cors", "actix-files", diff --git a/Cargo.toml b/Cargo.toml index 911aba3..edd1d3a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "websurfx" -version = "1.4.2" +version = "1.5.0" edition = "2021" description = "An open-source alternative to Searx that provides clean, ad-free, and organic results with incredible speed while keeping privacy and security in mind." repository = "https://github.com/neon-mmd/websurfx" diff --git a/src/engines/librex.rs b/src/engines/librex.rs new file mode 100644 index 0000000..933c7f2 --- /dev/null +++ b/src/engines/librex.rs @@ -0,0 +1,111 @@ +//! The `librex` module contains the implementation of a search engine for LibreX using the reqwest and scraper libraries. +//! It includes a `SearchEngine` trait implementation for interacting with the search engine and retrieving search results. + +use std::collections::HashMap; + +use reqwest::header::HeaderMap; +use reqwest::Client; +use scraper::Html; + +use crate::models::aggregation_models::SearchResult; +use crate::models::engine_models::{EngineError, SearchEngine}; + +use error_stack::{Report, Result, ResultExt}; + +use super::search_result_parser::SearchResultParser; + +/// Represents the LibreX search engine. +pub struct LibreX { + /// The parser used to extract search results from HTML documents. + parser: SearchResultParser, +} + +impl LibreX { + /// Creates a new instance of LibreX with a default configuration. + /// + /// # Returns + /// + /// Returns a `Result` containing `LibreX` if successful, otherwise an `EngineError`. + pub fn new() -> Result { + Ok(Self { + parser: SearchResultParser::new( + ".text-result-container>p", + ".text-result-container", + ".text-result-wrapper>a>h2", + ".text-result-wrapper>a", + ".text-result-wrapper>span", + )?, + }) + } +} + +#[async_trait::async_trait] +impl SearchEngine for LibreX { + /// Retrieves search results from LibreX based on the provided query, page, user agent, and client. + /// + /// # Arguments + /// + /// * `query` - The search query. + /// * `page` - The page number for pagination. + /// * `user_agent` - The user agent string. + /// * `client` - The reqwest client for making HTTP requests. + /// * `_safe_search` - A parameter for safe search (not currently used). + /// + /// # Returns + /// + /// Returns a `Result` containing a `HashMap` of search results if successful, otherwise an `EngineError`. + /// The `Err` variant is explicit for better documentation. + async fn results( + &self, + query: &str, + page: u32, + user_agent: &str, + client: &Client, + _safe_search: u8, + ) -> Result, EngineError> { + // Page number can be missing or empty string and so appropriate handling is required + // so that upstream server recieves valid page number. + let url: String = match page { + 1 | 0 => { + format!("https://search.ahwx.org/search.php?q={query}&p=0&t=10") + } + _ => { + format!( + "https://search.ahwx.org/search.php?q={query}&p={}&t=10", + page * 10, + ) + } + }; + + // initializing HeaderMap and adding appropriate headers. + let header_map = HeaderMap::try_from(&HashMap::from([ + ("USER_AGENT".to_string(), user_agent.to_string()), + ("REFERER".to_string(), "https://google.com/".to_string()), + ("CONTENT_TYPE".to_string(), "application/x-www-form-urlencoded".to_string()), + ( + "COOKIE".to_string(), + "theme=amoled; disable_special=on; disable_frontends=on; language=en; number_of_results=10; safe_search=on; save=1".to_string(), + ), + ])) + .change_context(EngineError::UnexpectedError)?; + + let document: Html = Html::parse_document( + &LibreX::fetch_html_from_upstream(self, &url, header_map, client).await?, + ); + + if self.parser.parse_for_no_results(&document).next().is_some() { + return Err(Report::new(EngineError::EmptyResultSet)); + } + + // scrape all the results from the html + self.parser + .parse_for_results(&document, |title, url, desc| { + Some(SearchResult::new( + title.inner_html().trim(), + url.inner_html().trim(), + desc.inner_html().trim(), + &["librex"], + )) + }) + } +} diff --git a/src/engines/mod.rs b/src/engines/mod.rs index 53d720b..b6a50f5 100644 --- a/src/engines/mod.rs +++ b/src/engines/mod.rs @@ -5,6 +5,7 @@ pub mod brave; pub mod duckduckgo; +pub mod librex; pub mod search_result_parser; pub mod searx; pub mod startpage; diff --git a/src/models/engine_models.rs b/src/models/engine_models.rs index 1ab04ed..70496cd 100644 --- a/src/models/engine_models.rs +++ b/src/models/engine_models.rs @@ -158,6 +158,10 @@ impl EngineHandler { let engine = crate::engines::startpage::Startpage::new()?; ("startpage", Box::new(engine)) } + "librex" => { + let engine = crate::engines::librex::LibreX::new()?; + ("librex", Box::new(engine)) + } _ => { return Err(Report::from(EngineError::NoSuchEngineFound( engine_name.to_string(), diff --git a/websurfx/config.lua b/websurfx/config.lua index 22e2c4f..62ae412 100644 --- a/websurfx/config.lua +++ b/websurfx/config.lua @@ -54,4 +54,5 @@ upstream_search_engines = { Searx = false, Brave = false, Startpage = false, + LibreX = false, } -- select the upstream search engines from which the results should be fetched.