From 0d2d44988970225323cb35225d20e1b9443fae46 Mon Sep 17 00:00:00 2001 From: neon_arch Date: Sun, 3 Sep 2023 19:21:23 +0300 Subject: [PATCH 1/6] =?UTF-8?q?=E2=9A=99=EF=B8=8F=20refactor:=20add=20lint?= =?UTF-8?q?s=20to=20the=20codebase=20to=20ensure=20proper=20code=20style?= =?UTF-8?q?=20(#205)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/lib.rs | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/lib.rs b/src/lib.rs index cd83d8a..52fb56d 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,6 +1,10 @@ //! This main library module provides the functionality to provide and handle the Tcp server //! and register all the routes for the `websurfx` meta search engine website. +#![forbid(unsafe_code, clippy::panic)] +#![deny(missing_docs, clippy::missing_docs_in_private_items, clippy::perf)] +#![warn(clippy::cognitive_complexity, rust_2018_idioms)] + pub mod cache; pub mod config; pub mod engines; @@ -40,7 +44,7 @@ use handler::paths::{file_path, FileType}; /// let server = run(listener,config).expect("Failed to start server"); /// ``` pub fn run(listener: TcpListener, config: Config) -> std::io::Result { - let mut handlebars: Handlebars = Handlebars::new(); + let mut handlebars: Handlebars<'_> = Handlebars::new(); let public_folder_path: String = file_path(FileType::Theme)?; @@ -48,7 +52,7 @@ pub fn run(listener: TcpListener, config: Config) -> std::io::Result { .register_templates_directory(".html", format!("{}/templates", public_folder_path)) .unwrap(); - let handlebars_ref: web::Data = web::Data::new(handlebars); + let handlebars_ref: web::Data> = web::Data::new(handlebars); let cloned_config_threads_opt: u8 = config.threads; From 049b1c1ddda15d7b109b0be8c2942b8f60736f33 Mon Sep 17 00:00:00 2001 From: neon_arch Date: Sun, 3 Sep 2023 19:23:34 +0300 Subject: [PATCH 2/6] =?UTF-8?q?=E2=9A=99=EF=B8=8F=20refactor:=20change=20&?= =?UTF-8?q?=20add=20documentation=20to=20the=20code=20based=20on=20the=20l?= =?UTF-8?q?ints=20(#205)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/cache/cacher.rs | 5 +-- src/cache/mod.rs | 3 ++ src/config/mod.rs | 3 ++ src/config/parser.rs | 37 ++++++++--------- src/config/parser_models.rs | 9 ++-- src/engines/duckduckgo.rs | 20 +-------- src/engines/engine_models.rs | 68 ++++++++++++++++++++++++++----- src/engines/mod.rs | 5 +++ src/engines/searx.rs | 19 --------- src/handler/mod.rs | 4 ++ src/handler/paths.rs | 46 +++++++++++---------- src/results/aggregation_models.rs | 40 ++++++++++-------- src/results/mod.rs | 4 ++ src/results/user_agent.rs | 2 + src/server/mod.rs | 5 +++ src/server/routes.rs | 39 ++++++++++-------- 16 files changed, 177 insertions(+), 132 deletions(-) diff --git a/src/cache/cacher.rs b/src/cache/cacher.rs index 44d0710..e268ac2 100644 --- a/src/cache/cacher.rs +++ b/src/cache/cacher.rs @@ -6,11 +6,8 @@ use redis::{Client, Commands, Connection}; /// A named struct which stores the redis Connection url address to which the client will /// connect to. -/// -/// # Fields -/// -/// * `redis_connection_url` - It stores the redis Connection url address. pub struct RedisCache { + /// It stores the redis Connection url address. connection: Connection, } diff --git a/src/cache/mod.rs b/src/cache/mod.rs index de7dd4e..82e3377 100644 --- a/src/cache/mod.rs +++ b/src/cache/mod.rs @@ -1 +1,4 @@ +//! This module provides the modules which provide the functionality to cache the aggregated +//! results fetched and aggregated from the upstream search engines in a json format. + pub mod cacher; diff --git a/src/config/mod.rs b/src/config/mod.rs index 11ce559..331a3d7 100644 --- a/src/config/mod.rs +++ b/src/config/mod.rs @@ -1,2 +1,5 @@ +//! This module provides the modules which handles the functionality to parse the lua config +//! and convert the config options into rust readable form. + pub mod parser; pub mod parser_models; diff --git a/src/config/parser.rs b/src/config/parser.rs index 4639013..ca53f1b 100644 --- a/src/config/parser.rs +++ b/src/config/parser.rs @@ -9,42 +9,36 @@ use rlua::Lua; use std::{collections::HashMap, fs, thread::available_parallelism}; /// A named struct which stores the parsed config file options. -/// -/// # Fields -// -/// * `port` - It stores the parsed port number option on which the server should launch. -/// * `binding_ip` - It stores the parsed ip address option on which the server should launch -/// * `style` - It stores the theming options for the website. -/// * `redis_url` - It stores the redis connection url address on which the redis -/// client should connect. -/// * `aggregator` - It stores the option to whether enable or disable production use. -/// * `logging` - It stores the option to whether enable or disable logs. -/// * `debug` - It stores the option to whether enable or disable debug mode. -/// * `upstream_search_engines` - It stores all the engine names that were enabled by the user. -/// * `request_timeout` - It stores the time (secs) which controls the server request timeout. -/// * `threads` - It stores the number of threads which controls the app will use to run. #[derive(Clone)] pub struct Config { + /// It stores the parsed port number option on which the server should launch. pub port: u16, + /// It stores the parsed ip address option on which the server should launch pub binding_ip: String, + /// It stores the theming options for the website. pub style: Style, + /// It stores the redis connection url address on which the redis + /// client should connect. pub redis_url: String, + /// It stores the option to whether enable or disable production use. pub aggregator: AggregatorConfig, + /// It stores the option to whether enable or disable logs. pub logging: bool, + /// It stores the option to whether enable or disable debug mode. pub debug: bool, + /// It stores all the engine names that were enabled by the user. pub upstream_search_engines: Vec, + /// It stores the time (secs) which controls the server request timeout. pub request_timeout: u8, + /// It stores the number of threads which controls the app will use to run. pub threads: u8, } /// Configuration options for the aggregator. -/// -/// # Fields -/// -/// * `random_delay` - It stores the option to whether enable or disable random delays between -/// requests. #[derive(Clone)] pub struct AggregatorConfig { + /// It stores the option to whether enable or disable random delays between + /// requests. pub random_delay: bool, } @@ -115,6 +109,11 @@ impl Config { } /// a helper function that sets the proper logging level +/// +/// # Arguments +/// +/// * `debug` - It takes the option to whether enable or disable debug mode. +/// * `logging` - It takes the option to whether enable or disable logs. fn set_logging_level(debug: bool, logging: bool) { if let Ok(pkg_env_var) = std::env::var("PKG_ENV") { if pkg_env_var.to_lowercase() == "dev" { diff --git a/src/config/parser_models.rs b/src/config/parser_models.rs index 0bc52d8..aa0b86f 100644 --- a/src/config/parser_models.rs +++ b/src/config/parser_models.rs @@ -12,15 +12,12 @@ use serde::{Deserialize, Serialize}; /// order to allow the deserializing the json back to struct in aggregate function in /// aggregator.rs and create a new struct out of it and then serialize it back to json and pass /// it to the template files. -/// -/// # Fields -// -/// * `theme` - It stores the parsed theme option used to set a theme for the website. -/// * `colorscheme` - It stores the parsed colorscheme option used to set a colorscheme for the -/// theme being used. #[derive(Serialize, Deserialize, Clone)] pub struct Style { + /// It stores the parsed theme option used to set a theme for the website. pub theme: String, + /// It stores the parsed colorscheme option used to set a colorscheme for the + /// theme being used. pub colorscheme: String, } diff --git a/src/engines/duckduckgo.rs b/src/engines/duckduckgo.rs index 11b7d86..0649385 100644 --- a/src/engines/duckduckgo.rs +++ b/src/engines/duckduckgo.rs @@ -19,25 +19,7 @@ pub struct DuckDuckGo; #[async_trait::async_trait] impl SearchEngine for DuckDuckGo { - /// This function scrapes results from the upstream engine duckduckgo and puts all the scraped - /// results like title, visiting_url (href in html),engine (from which engine it was fetched from) - /// and description in a RawSearchResult and then adds that to HashMap whose keys are url and - /// values are RawSearchResult struct and then returns it within a Result enum. - /// - /// # Arguments - /// - /// * `query` - Takes the user provided query to query to the upstream search engine with. - /// * `page` - Takes an u32 as an argument. - /// * `user_agent` - Takes a random user agent string as an argument. - /// * `request_timeout` - Takes a time (secs) as a value which controls the server request timeout. - /// - /// # Errors - /// - /// Returns an `EngineErrorKind` if the user is not connected to the internet or if their is failure to - /// reach the above `upstream search engine` page or if the `upstream search engine` is unable to - /// provide results for the requested search query and also returns error if the scraping selector - /// or HeaderMap fails to initialize. - async fn results( + async fn results( &self, query: String, page: u32, diff --git a/src/engines/engine_models.rs b/src/engines/engine_models.rs index d33d13c..2f28ee5 100644 --- a/src/engines/engine_models.rs +++ b/src/engines/engine_models.rs @@ -6,19 +6,18 @@ use error_stack::{IntoReport, Result, ResultExt}; use std::{collections::HashMap, fmt, time::Duration}; /// A custom error type used for handle engine associated errors. -/// -/// This enum provides variants three different categories of errors: -/// * `RequestError` - This variant handles all request related errors like forbidden, not found, -/// etc. -/// * `EmptyResultSet` - This variant handles the not results found error provide by the upstream -/// search engines. -/// * `UnexpectedError` - This variant handles all the errors which are unexpected or occur rarely -/// and are errors mostly related to failure in initialization of HeaderMap, Selector errors and -/// all other errors occurring within the code handling the `upstream search engines`. #[derive(Debug)] pub enum EngineError { + /// This variant handles all request related errors like forbidden, not found, + /// etc. EmptyResultSet, + /// This variant handles the not results found error provide by the upstream + /// search engines. RequestError, + /// This variant handles all the errors which are unexpected or occur rarely + /// and are errors mostly related to failure in initialization of HeaderMap, + /// Selector errors and all other errors occurring within the code handling + /// the `upstream search engines`. UnexpectedError, } @@ -46,6 +45,23 @@ impl error_stack::Context for EngineError {} /// A trait to define common behavior for all search engines. #[async_trait::async_trait] pub trait SearchEngine: Sync + Send { + /// This helper function fetches/requests the search results from the upstream search engine in + /// an html form. + /// + /// # Arguments + /// + /// * `url` - It takes the url of the upstream search engine with the user requested search + /// query appended in the search parameters. + /// * `header_map` - It takes the http request headers to be sent to the upstream engine in + /// order to prevent being detected as a bot. It takes the header as a HeaderMap type. + /// * `request_timeout` - It takes the request timeout value as seconds which is used to limit + /// the amount of time for each request to remain connected when until the results can be provided + /// by the upstream engine. + /// + /// # Error + /// + /// It returns the html data as a string if the upstream engine provides the data as expected + /// otherwise it returns a custom `EngineError`. async fn fetch_html_from_upstream( &self, url: String, @@ -67,6 +83,24 @@ pub trait SearchEngine: Sync + Send { .change_context(EngineError::RequestError)?) } + /// This function scrapes results from the upstream engine and puts all the scraped results like + /// title, visiting_url (href in html),engine (from which engine it was fetched from) and description + /// in a RawSearchResult and then adds that to HashMap whose keys are url and values are RawSearchResult + /// struct and then returns it within a Result enum. + /// + /// # Arguments + /// + /// * `query` - Takes the user provided query to query to the upstream search engine with. + /// * `page` - Takes an u32 as an argument. + /// * `user_agent` - Takes a random user agent string as an argument. + /// * `request_timeout` - Takes a time (secs) as a value which controls the server request timeout. + /// + /// # Errors + /// + /// Returns an `EngineErrorKind` if the user is not connected to the internet or if their is failure to + /// reach the above `upstream search engine` page or if the `upstream search engine` is unable to + /// provide results for the requested search query and also returns error if the scraping selector + /// or HeaderMap fails to initialize. async fn results( &self, query: String, @@ -76,8 +110,12 @@ pub trait SearchEngine: Sync + Send { ) -> Result, EngineError>; } +/// A named struct which stores the engine struct with the name of the associated engine. pub struct EngineHandler { + /// It stores the engine struct wrapped in a box smart pointer as the engine struct implements + /// the `SearchEngine` trait. engine: Box, + /// It stores the name of the engine to which the struct is associated to. name: &'static str, } @@ -88,7 +126,15 @@ impl Clone for EngineHandler { } impl EngineHandler { - /// parses an engine name into an engine handler, returns none if the engine is unknown + /// Parses an engine name into an engine handler. + /// + /// # Arguments + /// + /// * `engine_name` - It takes the name of the engine to which the struct was associated to. + /// + /// # Returns + /// + /// It returns an option either containing the value or a none if the engine is unknown pub fn new(engine_name: &str) -> Option { let engine: (&'static str, Box) = match engine_name.to_lowercase().as_str() { @@ -103,6 +149,8 @@ impl EngineHandler { }) } + /// This function converts the EngineHandler type into a tuple containing the engine name and + /// the associated engine struct. pub fn into_name_engine(self) -> (&'static str, Box) { (self.name, self.engine) } diff --git a/src/engines/mod.rs b/src/engines/mod.rs index f9bb8ad..8267c93 100644 --- a/src/engines/mod.rs +++ b/src/engines/mod.rs @@ -1,3 +1,8 @@ +//! This module provides different modules which handles the functionlity to fetch results from the +//! upstream search engines based on user requested queries. Also provides different models to +//! provide a standard functions to be implemented for all the upstream search engine handling +//! code. Moreover, it also provides a custom error for the upstream search engine handling code. + pub mod duckduckgo; pub mod engine_models; pub mod searx; diff --git a/src/engines/searx.rs b/src/engines/searx.rs index 4ad41f5..3f261ad 100644 --- a/src/engines/searx.rs +++ b/src/engines/searx.rs @@ -17,25 +17,6 @@ pub struct Searx; #[async_trait::async_trait] impl SearchEngine for Searx { - /// This function scrapes results from the upstream engine duckduckgo and puts all the scraped - /// results like title, visiting_url (href in html),engine (from which engine it was fetched from) - /// and description in a RawSearchResult and then adds that to HashMap whose keys are url and - /// values are RawSearchResult struct and then returns it within a Result enum. - /// - /// # Arguments - /// - /// * `query` - Takes the user provided query to query to the upstream search engine with. - /// * `page` - Takes an u32 as an argument. - /// * `user_agent` - Takes a random user agent string as an argument. - /// * `request_timeout` - Takes a time (secs) as a value which controls the server request timeout. - /// - /// # Errors - /// - /// Returns an `EngineErrorKind` if the user is not connected to the internet or if their is failure to - /// reach the above `upstream search engine` page or if the `upstream search engine` is unable to - /// provide results for the requested search query and also returns error if the scraping selector - /// or HeaderMap fails to initialize. - async fn results( &self, query: String, diff --git a/src/handler/mod.rs b/src/handler/mod.rs index 8118b29..188767d 100644 --- a/src/handler/mod.rs +++ b/src/handler/mod.rs @@ -1 +1,5 @@ +//! This module provides modules which provide the functionality to handle paths for different +//! files present on different paths and provide one appropriate path on which it is present and +//! can be used. + pub mod paths; diff --git a/src/handler/paths.rs b/src/handler/paths.rs index 9b4fa07..44228d8 100644 --- a/src/handler/paths.rs +++ b/src/handler/paths.rs @@ -6,20 +6,31 @@ use std::io::Error; use std::path::Path; // ------- Constants -------- -static PUBLIC_DIRECTORY_NAME: &str = "public"; -static COMMON_DIRECTORY_NAME: &str = "websurfx"; -static CONFIG_FILE_NAME: &str = "config.lua"; -static ALLOWLIST_FILE_NAME: &str = "allowlist.txt"; -static BLOCKLIST_FILE_NAME: &str = "blocklist.txt"; +/// The constant holding the name of the theme folder. +const PUBLIC_DIRECTORY_NAME: &str = "public"; +/// The constant holding the name of the common folder. +const COMMON_DIRECTORY_NAME: &str = "websurfx"; +/// The constant holding the name of the config file. +const CONFIG_FILE_NAME: &str = "config.lua"; +/// The constant holding the name of the AllowList text file. +const ALLOWLIST_FILE_NAME: &str = "allowlist.txt"; +/// The constant holding the name of the BlockList text file. +const BLOCKLIST_FILE_NAME: &str = "blocklist.txt"; +/// An enum type which provides different variants to handle paths for various files/folders. #[derive(Hash, PartialEq, Eq, Debug)] pub enum FileType { + /// This variant handles all the paths associated with the config file. Config, + /// This variant handles all the paths associated with the Allowlist text file. AllowList, + /// This variant handles all the paths associated with the BlockList text file. BlockList, + /// This variant handles all the paths associated with the public folder (Theme folder). Theme, } +/// A static variable which stores the different filesystem paths for various file/folder types. static FILE_PATHS_FOR_DIFF_FILE_TYPES: once_cell::sync::Lazy>> = once_cell::sync::Lazy::new(|| { HashMap::from([ @@ -72,26 +83,19 @@ static FILE_PATHS_FOR_DIFF_FILE_TYPES: once_cell::sync::Lazy folder/file not found!!` error if the give file_type folder/file is not +/// present on the path on which it is being tested. /// -/// # Error +/// # Example +/// +/// If this function is give the file_type of Theme variant then the theme folder is checked by the +/// following steps: /// -/// Returns a `Theme (public) folder not found!!` error if the theme folder is not present under following -/// paths which are: /// 1. `/opt/websurfx` if it not present here then it fallbacks to the next one (2) /// 2. Under project folder ( or codebase in other words) if it is not present /// here then it returns an error as mentioned above. @@ -106,6 +110,6 @@ pub fn file_path(file_type: FileType) -> Result { // if no of the configs above exist, return error Err(Error::new( std::io::ErrorKind::NotFound, - format!("{:?} file not found!!", file_type), + format!("{:?} file/folder not found!!", file_type), )) } diff --git a/src/results/aggregation_models.rs b/src/results/aggregation_models.rs index e985765..76d896d 100644 --- a/src/results/aggregation_models.rs +++ b/src/results/aggregation_models.rs @@ -8,20 +8,17 @@ use crate::{config::parser_models::Style, engines::engine_models::EngineError}; /// A named struct to store the raw scraped search results scraped search results from the /// upstream search engines before aggregating it.It derives the Clone trait which is needed /// to write idiomatic rust using `Iterators`. -/// -/// # Fields -/// -/// * `title` - The title of the search result. -/// * `url` - The url which is accessed when clicked on it /// (href url in html in simple words). -/// * `description` - The description of the search result. -/// * `engine` - The names of the upstream engines from which this results were provided. #[derive(Clone, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct SearchResult { + /// The title of the search result. pub title: String, + /// The url which is accessed when clicked on it pub url: String, + /// The description of the search result. pub description: String, + /// The names of the upstream engines from which this results were provided. pub engine: Vec, } @@ -63,15 +60,27 @@ impl SearchResult { } } -/// +/// A named struct that stores the error info related to the upstream search engines. #[derive(Serialize, Deserialize)] pub struct EngineErrorInfo { + /// It stores the error type which occured while fetching the result from a particular search + /// engine. pub error: String, + /// It stores the name of the engine that failed to provide the requested search results. pub engine: String, + /// It stores the name of the color to indicate whether how severe the particular error is (In + /// other words it indicates the severity of the error/issue). pub severity_color: String, } impl EngineErrorInfo { + /// Constructs a new `SearchResult` with the given arguments needed for the struct. + /// + /// # Arguments + /// + /// * `error` - It takes the error type which occured while fetching the result from a particular + /// search engine. + /// * `engine` - It takes the name of the engine that failed to provide the requested search results. pub fn new(error: &EngineError, engine: String) -> Self { Self { error: match error { @@ -91,23 +100,18 @@ impl EngineErrorInfo { /// A named struct to store, serialize, deserialize the all the search results scraped and /// aggregated from the upstream search engines. -/// -/// # Fields -/// -/// * `results` - Stores the individual serializable `SearchResult` struct into a vector of /// `SearchResult` structs. -/// * `page_query` - Stores the current pages search query `q` provided in the search url. -/// * `style` - Stores the theming options for the website. -/// * `engine_errors_info` - Stores the information on which engines failed with their engine name -/// and the type of error that caused it. -/// * `empty_result_set` - Stores a boolean which indicates that no engines gave a result for the -/// given search query. #[derive(Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct SearchResults { + /// Stores the individual serializable `SearchResult` struct into a vector of pub results: Vec, + /// Stores the current pages search query `q` provided in the search url. pub page_query: String, + /// Stores the theming options for the website. pub style: Style, + /// Stores the information on which engines failed with their engine name + /// and the type of error that caused it. pub engine_errors_info: Vec, } diff --git a/src/results/mod.rs b/src/results/mod.rs index 0c13442..b08eec0 100644 --- a/src/results/mod.rs +++ b/src/results/mod.rs @@ -1,3 +1,7 @@ +//! This module provides modules that handle the functionality to aggregate the fetched search +//! results from the upstream search engines and filters it if safe search is set to 3 or 4. Also, +//! provides various models to aggregate search results into a standardized form. + pub mod aggregation_models; pub mod aggregator; pub mod user_agent; diff --git a/src/results/user_agent.rs b/src/results/user_agent.rs index 13166bf..8946e84 100644 --- a/src/results/user_agent.rs +++ b/src/results/user_agent.rs @@ -2,6 +2,8 @@ use fake_useragent::{Browsers, UserAgents, UserAgentsBuilder}; +/// A static variable which stores the initially build `UserAgents` struct. So as it can be resused +/// again and again without the need of reinitializing the `UserAgents` struct. static USER_AGENTS: once_cell::sync::Lazy = once_cell::sync::Lazy::new(|| { UserAgentsBuilder::new() .cache(false) diff --git a/src/server/mod.rs b/src/server/mod.rs index 6a664ab..f5d2ce9 100644 --- a/src/server/mod.rs +++ b/src/server/mod.rs @@ -1 +1,6 @@ +//! This module provides modules that handle the functionality of handling different routes/paths +//! for the `websurfx` search engine website. Also it handles the parsing of search parameters in +//! the search route. Also, caches the next, current and previous search results in the search +//! routes with the help of the redis server. + pub mod routes; diff --git a/src/server/routes.rs b/src/server/routes.rs index 8910f8f..818fac5 100644 --- a/src/server/routes.rs +++ b/src/server/routes.rs @@ -17,16 +17,13 @@ use serde::Deserialize; use tokio::join; /// A named struct which deserializes all the user provided search parameters and stores them. -/// -/// # Fields -/// -/// * `q` - It stores the search parameter option `q` (or query in simple words) -/// of the search url. -/// * `page` - It stores the search parameter `page` (or pageno in simple words) -/// of the search url. #[derive(Deserialize)] struct SearchParams { + /// It stores the search parameter option `q` (or query in simple words) + /// of the search url. q: Option, + /// It stores the search parameter `page` (or pageno in simple words) + /// of the search url. page: Option, } @@ -54,17 +51,14 @@ pub async fn not_found( } /// A named struct which is used to deserialize the cookies fetched from the client side. -/// -/// # Fields -/// -/// * `theme` - It stores the theme name used in the website. -/// * `colorscheme` - It stores the colorscheme name used for the website theme. -/// * `engines` - It stores the user selected upstream search engines selected from the UI. #[allow(dead_code)] #[derive(Deserialize)] struct Cookie { + /// It stores the theme name used in the website. theme: String, + /// It stores the colorscheme name used for the website theme. colorscheme: String, + /// It stores the user selected upstream search engines selected from the UI. engines: Vec, } @@ -149,8 +143,21 @@ pub async fn search( } } -/// Fetches the results for a query and page. -/// First checks the redis cache, if that fails it gets proper results +/// Fetches the results for a query and page. It First checks the redis cache, if that +/// fails it gets proper results by requesting from the upstream search engines. +/// +/// # Arguments +/// +/// * `url` - It takes the url of the current page that requested the search results for a +/// particular search query. +/// * `config` - It takes a parsed config struct. +/// * `query` - It takes the page number as u32 value. +/// * `req` - It takes the `HttpRequest` struct as a value. +/// +/// # Error +/// +/// It returns the `SearchResults` struct if the search results could be successfully fetched from +/// the cache or from the upstream search engines otherwise it returns an appropriate error. async fn results( url: String, config: &Config, @@ -158,7 +165,7 @@ async fn results( page: u32, req: HttpRequest, ) -> Result> { - //Initialize redis cache connection struct + // Initialize redis cache connection struct let mut redis_cache = RedisCache::new(config.redis_url.clone())?; // fetch the cached results json. let cached_results_json = redis_cache.cached_json(&url); From 0672b55349fe6e2a65f0419993e309d50dd2351f Mon Sep 17 00:00:00 2001 From: neon_arch Date: Sun, 3 Sep 2023 19:25:33 +0300 Subject: [PATCH 3/6] =?UTF-8?q?=F0=9F=9A=80=20chore:=20bump=20the=20app=20?= =?UTF-8?q?version=20(#205)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- Cargo.lock | 74 +++++++++++++++++++++++++++--------------------------- Cargo.toml | 2 +- 2 files changed, 38 insertions(+), 38 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 6bbaab6..67165f7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -103,7 +103,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e01ed3140b2f8d422c68afa1ed2e85d996ea619c988ac834d255db32138655cb" dependencies = [ "quote 1.0.33", - "syn 2.0.29", + "syn 2.0.30", ] [[package]] @@ -216,7 +216,7 @@ dependencies = [ "actix-router", "proc-macro2 1.0.66", "quote 1.0.33", - "syn 2.0.29", + "syn 2.0.30", ] [[package]] @@ -302,7 +302,7 @@ checksum = "bc00ceb34980c03614e35a3a4e218276a0a824e911d07651cd0d858a51e8c0f0" dependencies = [ "proc-macro2 1.0.66", "quote 1.0.33", - "syn 2.0.29", + "syn 2.0.30", ] [[package]] @@ -520,18 +520,18 @@ dependencies = [ [[package]] name = "clap" -version = "4.4.1" +version = "4.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c8d502cbaec4595d2e7d5f61e318f05417bd2b66fdc3809498f0d3fdf0bea27" +checksum = "6a13b88d2c62ff462f88e4a121f17a82c1af05693a2f192b5c38d14de73c19f6" dependencies = [ "clap_builder", ] [[package]] name = "clap_builder" -version = "4.4.1" +version = "4.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5891c7bc0edb3e1c2204fc5e94009affabeb1821c9e5fdc3959536c5c0bb984d" +checksum = "2bb9faaa7c2ef94b2743a21f5a29e6f0010dff4caa69ac8e9d6cf8b6fa74da08" dependencies = [ "anstyle", "clap_lex", @@ -798,7 +798,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13b588ba4ac1a99f7f2964d24b3d896ddc6bf847ee3855dbd4366f058cfcd331" dependencies = [ "quote 1.0.33", - "syn 2.0.29", + "syn 2.0.30", ] [[package]] @@ -1188,9 +1188,9 @@ checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7" [[package]] name = "handlebars" -version = "4.3.7" +version = "4.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83c3372087601b532857d332f5957cbae686da52bb7810bf038c3e3c3cc2fa0d" +checksum = "c39b3bc2a8f715298032cf5087e58573809374b08160aa7d750582bdb82d2683" dependencies = [ "log", "pest", @@ -1631,9 +1631,9 @@ checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771" [[package]] name = "memchr" -version = "2.6.2" +version = "2.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5486aed0026218e61b8a01d5fbd5a0a134649abb71a0e53b7bc088529dced86e" +checksum = "8f232d6ef707e1956a43342693d2a31e72989554d58299d7a88738cc95b0d35c" [[package]] name = "memoffset" @@ -1783,9 +1783,9 @@ dependencies = [ [[package]] name = "object" -version = "0.32.0" +version = "0.32.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77ac5bbd07aea88c60a577a1ce218075ffd59208b2d7ca97adf9bfc5aeb21ebe" +checksum = "9cf5f9dd3933bd50a9e1f149ec995f39ae2c496d31fd772c1fd45ebc27e902b0" dependencies = [ "memchr", ] @@ -1825,7 +1825,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2 1.0.66", "quote 1.0.33", - "syn 2.0.29", + "syn 2.0.30", ] [[package]] @@ -1944,7 +1944,7 @@ dependencies = [ "pest_meta", "proc-macro2 1.0.66", "quote 1.0.33", - "syn 2.0.29", + "syn 2.0.30", ] [[package]] @@ -2046,7 +2046,7 @@ dependencies = [ "phf_shared 0.11.2", "proc-macro2 1.0.66", "quote 1.0.33", - "syn 2.0.29", + "syn 2.0.30", ] [[package]] @@ -2349,9 +2349,9 @@ dependencies = [ [[package]] name = "redis" -version = "0.23.2" +version = "0.23.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffd6543a7bc6428396845f6854ccf3d1ae8823816592e2cbe74f20f50f209d02" +checksum = "4f49cdc0bb3f412bf8e7d1bd90fe1d9eb10bc5c399ba90973c14662a27b3f8ba" dependencies = [ "combine", "itoa 1.0.9", @@ -2379,9 +2379,9 @@ dependencies = [ [[package]] name = "regex" -version = "1.9.4" +version = "1.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12de2eff854e5fa4b1295edd650e227e9d8fb0c9e90b12e7f36d6a6811791a29" +checksum = "697061221ea1b4a94a624f67d0ae2bfe4e22b8a17b6a192afb11046542cc8c47" dependencies = [ "aho-corasick", "memchr", @@ -2391,9 +2391,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.3.7" +version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49530408a136e16e5b486e883fbb6ba058e8e4e8ae6621a77b048b314336e629" +checksum = "c2f401f4955220693b56f8ec66ee9c78abffd8d1c4f23dc41a23839eb88f0795" dependencies = [ "aho-corasick", "memchr", @@ -2527,9 +2527,9 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.10" +version = "0.38.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed6248e1caa625eb708e266e06159f135e8c26f2bb7ceb72dc4b2766d0340964" +checksum = "c0c3dde1fc030af041adc40e79c0e7fbcf431dd24870053d187d7c66e4b87453" dependencies = [ "bitflags 2.4.0", "errno", @@ -2687,7 +2687,7 @@ checksum = "4eca7ac642d82aa35b60049a6eccb4be6be75e599bd2e9adb5f875a737654af2" dependencies = [ "proc-macro2 1.0.66", "quote 1.0.33", - "syn 2.0.29", + "syn 2.0.30", ] [[package]] @@ -2926,9 +2926,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.29" +version = "2.0.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c324c494eba9d92503e6f1ef2e6df781e78f6a7705a0202d9801b198807d518a" +checksum = "0ddc1f908d32ec46858c2d3b3daa00cc35bf4b6841ce4355c7bb3eedf2283a68" dependencies = [ "proc-macro2 1.0.66", "quote 1.0.33", @@ -2982,22 +2982,22 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.47" +version = "1.0.48" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97a802ec30afc17eee47b2855fc72e0c4cd62be9b4efe6591edde0ec5bd68d8f" +checksum = "9d6d7a740b8a666a7e828dd00da9c0dc290dff53154ea77ac109281de90589b7" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.47" +version = "1.0.48" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6bb623b56e39ab7dcd4b1b98bb6c8f8d907ed255b18de254088016b27a8ee19b" +checksum = "49922ecae66cc8a249b77e68d1d0623c1b2c514f0060c27cdc68bd62a1219d35" dependencies = [ "proc-macro2 1.0.66", "quote 1.0.33", - "syn 2.0.29", + "syn 2.0.30", ] [[package]] @@ -3152,7 +3152,7 @@ checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e" dependencies = [ "proc-macro2 1.0.66", "quote 1.0.33", - "syn 2.0.29", + "syn 2.0.30", ] [[package]] @@ -3474,7 +3474,7 @@ dependencies = [ "once_cell", "proc-macro2 1.0.66", "quote 1.0.33", - "syn 2.0.29", + "syn 2.0.30", "wasm-bindgen-shared", ] @@ -3508,7 +3508,7 @@ checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" dependencies = [ "proc-macro2 1.0.66", "quote 1.0.33", - "syn 2.0.29", + "syn 2.0.30", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -3531,7 +3531,7 @@ dependencies = [ [[package]] name = "websurfx" -version = "0.18.6" +version = "0.20.1" dependencies = [ "actix-cors", "actix-files", diff --git a/Cargo.toml b/Cargo.toml index ed0e24b..cc7309a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "websurfx" -version = "0.18.6" +version = "0.20.1" edition = "2021" description = "An open-source alternative to Searx that provides clean, ad-free, and organic results with incredible speed while keeping privacy and security in mind." repository = "https://github.com/neon-mmd/websurfx" From 453dbdc47d623725017a684c19ff46b9855c8928 Mon Sep 17 00:00:00 2001 From: neon_arch Date: Sun, 3 Sep 2023 19:34:22 +0300 Subject: [PATCH 4/6] =?UTF-8?q?=F0=9F=A7=B9=20chore:=20make=20rustfmt=20ha?= =?UTF-8?q?ppy=20(#205)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/engines/duckduckgo.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/engines/duckduckgo.rs b/src/engines/duckduckgo.rs index 0649385..5b7a452 100644 --- a/src/engines/duckduckgo.rs +++ b/src/engines/duckduckgo.rs @@ -19,7 +19,7 @@ pub struct DuckDuckGo; #[async_trait::async_trait] impl SearchEngine for DuckDuckGo { - async fn results( + async fn results( &self, query: String, page: u32, From 1a222217c45db5e1248f6808bf04ba060bc7f06e Mon Sep 17 00:00:00 2001 From: neon_arch Date: Tue, 12 Sep 2023 17:59:33 +0300 Subject: [PATCH 5/6] =?UTF-8?q?=F0=9F=A7=B9=20chore:=20make=20github=20act?= =?UTF-8?q?ions=20happy=20(#205)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/cache/error.rs | 9 +++------ src/results/aggregation_models.rs | 13 ++++++------- src/server/routes.rs | 3 ++- 3 files changed, 11 insertions(+), 14 deletions(-) diff --git a/src/cache/error.rs b/src/cache/error.rs index efd87c9..8bdb977 100644 --- a/src/cache/error.rs +++ b/src/cache/error.rs @@ -5,15 +5,12 @@ use std::fmt; use redis::RedisError; /// A custom error type used for handling redis async pool associated errors. -/// -/// This enum provides variants three different categories of errors: -/// * `RedisError` - This variant handles all errors related to `RedisError`, -/// * `PoolExhaustionWithConnectionDropError` - This variant handles the error -/// which occurs when all the connections in the connection pool return a connection -/// dropped redis error. #[derive(Debug)] pub enum PoolError { + /// This variant handles all errors related to `RedisError`, RedisError(RedisError), + /// This variant handles the errors which occurs when all the connections + /// in the connection pool return a connection dropped redis error. PoolExhaustionWithConnectionDropError, } diff --git a/src/results/aggregation_models.rs b/src/results/aggregation_models.rs index 495572a..30316e2 100644 --- a/src/results/aggregation_models.rs +++ b/src/results/aggregation_models.rs @@ -114,12 +114,12 @@ pub struct SearchResults { /// Stores the information on which engines failed with their engine name /// and the type of error that caused it. pub engine_errors_info: Vec, - /// Stores the flag option which holds the check value that the following - /// search query was disallowed when the safe search level set to 4 and it + /// Stores the flag option which holds the check value that the following + /// search query was disallowed when the safe search level set to 4 and it /// was present in the `Blocklist` file. pub disallowed: bool, - /// Stores the flag option which holds the check value that the following - /// search query was filtered when the safe search level set to 3 and it + /// Stores the flag option which holds the check value that the following + /// search query was filtered when the safe search level set to 3 and it /// was present in the `Blocklist` file. pub filtered: bool, } @@ -133,9 +133,8 @@ impl SearchResults { /// and stores it into a vector of `SearchResult` structs. /// * `page_query` - Takes an argument of current page`s search query `q` provided in /// the search url. - /// * `empty_result_set` - Takes a boolean which indicates that no engines gave a result for the - /// given search query. - /// * `` + /// * `engine_errors_info` - Takes an array of structs which contains information regarding + /// which engines failed with their names, reason and their severity color name. pub fn new( results: Vec, page_query: &str, diff --git a/src/server/routes.rs b/src/server/routes.rs index 96ad737..29b36aa 100644 --- a/src/server/routes.rs +++ b/src/server/routes.rs @@ -230,7 +230,8 @@ async fn results( // UI and use that. let mut results: SearchResults = match req.cookie("appCookie") { Some(cookie_value) => { - let cookie_value: Cookie = serde_json::from_str(cookie_value.name_value().1)?; + let cookie_value: Cookie<'_> = + serde_json::from_str(cookie_value.name_value().1)?; let engines: Vec = cookie_value .engines From c60fdb8366b6244d6c6cf8061618a9734ef8aa3f Mon Sep 17 00:00:00 2001 From: neon_arch Date: Tue, 12 Sep 2023 18:53:32 +0300 Subject: [PATCH 6/6] =?UTF-8?q?=F0=9F=A7=B9=20chore:=20make=20github=20act?= =?UTF-8?q?ions=20happy=20(#205)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/config/parser.rs | 2 +- src/server/routes.rs | 4 ++++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/src/config/parser.rs b/src/config/parser.rs index e3b3588..4490bc6 100644 --- a/src/config/parser.rs +++ b/src/config/parser.rs @@ -34,7 +34,7 @@ pub struct Config { pub threads: u8, /// It stores configuration options for the ratelimiting middleware. pub rate_limiter: RateLimiter, - /// It stores the level of safe search to be used for restricting content in the + /// It stores the level of safe search to be used for restricting content in the /// search results. pub safe_search: u8, } diff --git a/src/server/routes.rs b/src/server/routes.rs index 29b36aa..57aa413 100644 --- a/src/server/routes.rs +++ b/src/server/routes.rs @@ -33,6 +33,8 @@ struct SearchParams { /// It stores the search parameter `page` (or pageno in simple words) /// of the search url. page: Option, + /// It stores the search parameter `safesearch` (or safe search level in simple words) of the + /// search url. safesearch: Option, } @@ -275,6 +277,8 @@ async fn results( } } +/// A helper function which checks whether the search query contains any keywords which should be +/// disallowed/allowed based on the regex based rules present in the blocklist and allowlist files. fn is_match_from_filter_list( file_path: &str, query: &str,