diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 324dfbf..41ff84e 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -14,7 +14,7 @@ Know how to fix or improve a github action?. Consider Submitting a Pull request ## Source Code -You should know atleast one of the things below to start contributing: +You should know at least one of the things below to start contributing: - Rust basics - Actix-web crate basics diff --git a/PULL_REQUEST_TEMPLATE.md b/PULL_REQUEST_TEMPLATE.md index a7ad130..28cb6b7 100644 --- a/PULL_REQUEST_TEMPLATE.md +++ b/PULL_REQUEST_TEMPLATE.md @@ -16,7 +16,7 @@ ## Author's checklist - + ## Related issues diff --git a/README.md b/README.md index 29f2cb2..e9c7e5c 100644 --- a/README.md +++ b/README.md @@ -59,7 +59,7 @@ - **Community** - [📊 System Requirements](#system-requirements-) - [🗨️ FAQ (Frequently Asked Questions)](#faq-frequently-asked-questions-) - - [📣 More Contributers Wanted](#more-contributers-wanted-) + - [📣 More Contributors Wanted](#more-contributors-wanted-) - [💖 Supporting Websurfx](#supporting-websurfx-) - [📘 Documentation](#documentation-) - [🛣️ Roadmap](#roadmap-) @@ -165,7 +165,7 @@ Websurfx is based on Rust due to its memory safety features, which prevents vuln **[⬆️ Back to Top](#--)** -# More Contributers Wanted 📣 +# More Contributors Wanted 📣 We are looking for more willing contributors to help grow this project. For more information on how you can contribute, check out the [project board](https://github.com/neon-mmd/websurfx/projects?query=is%3Aopen) and the [CONTRIBUTING.md](CONTRIBUTING.md) file for guidelines and rules for making contributions. diff --git a/public/static/pagination.js b/public/static/pagination.js index 72ce320..92f9272 100644 --- a/public/static/pagination.js +++ b/public/static/pagination.js @@ -30,8 +30,8 @@ function navigate_backward() { let page = parseInt(searchParams.get('page')); if (isNaN(page)) { - page = 1; - } else if (page > 1) { + page = 0; + } else if (page > 0) { page--; } diff --git a/src/config_parser/parser.rs b/src/config_parser/parser.rs index 4c5c1e6..5b4c2d8 100644 --- a/src/config_parser/parser.rs +++ b/src/config_parser/parser.rs @@ -24,46 +24,35 @@ pub struct Config { pub binding_ip_addr: String, pub style: Style, pub redis_connection_url: String, - pub aggregator: AggreatorConfig, + pub aggregator: AggregatorConfig, pub logging: bool, pub debug: bool, } /// Configuration options for the aggregator. #[derive(Clone)] -pub struct AggreatorConfig { +pub struct AggregatorConfig { /// Whether to introduce a random delay before sending the request to the search engine. pub random_delay: bool, } impl Config { /// A function which parses the config.lua file and puts all the parsed options in the newly - /// contructed Config struct and returns it. + /// constructed Config struct and returns it. /// /// # Error /// /// Returns a lua parse error if parsing of the config.lua file fails or has a syntax error - /// or io error if the config.lua file doesn't exists otherwise it returns a newly contructed + /// or io error if the config.lua file doesn't exists otherwise it returns a newly constructed /// Config struct with all the parsed config options from the parsed config file. pub fn parse() -> Result> { Lua::new().context(|context| -> Result> { let globals = context.globals(); context - .load(&fs::read_to_string( - Config::handle_different_config_file_path()?, - )?) + .load(&fs::read_to_string(Config::get_config_path()?)?) .exec()?; - let production_use = globals.get::<_, bool>("production_use")?; - let aggregator_config = if production_use { - AggreatorConfig { random_delay: true } - } else { - AggreatorConfig { - random_delay: false, - } - }; - Ok(Config { port: globals.get::<_, u16>("port")?, binding_ip_addr: globals.get::<_, String>("binding_ip_addr")?, @@ -72,7 +61,9 @@ impl Config { globals.get::<_, String>("colorscheme")?, ), redis_connection_url: globals.get::<_, String>("redis_connection_url")?, - aggregator: aggregator_config, + aggregator: AggregatorConfig { + random_delay: globals.get::<_, bool>("production_use")?, + }, logging: globals.get::<_, bool>("logging")?, debug: globals.get::<_, bool>("debug")?, }) @@ -90,35 +81,37 @@ impl Config { /// one (3). /// 3. `websurfx/` (under project folder ( or codebase in other words)) if it is not present /// here then it returns an error as mentioned above. - fn handle_different_config_file_path() -> Result> { - if Path::new( - format!( - "{}/.config/{}/config.lua", - std::env::var("HOME").unwrap(), - COMMON_DIRECTORY_NAME - ) - .as_str(), - ) - .exists() - { - Ok(format!( + fn get_config_path() -> Result> { + // check user config + + let path = format!( + "{}/.config/{}/config.lua", + std::env::var("HOME").unwrap(), + COMMON_DIRECTORY_NAME + ); + if Path::new(path.as_str()).exists() { + return Ok(format!( "{}/.config/{}/{}", std::env::var("HOME").unwrap(), COMMON_DIRECTORY_NAME, CONFIG_FILE_NAME - )) - } else if Path::new( - format!("/etc/xdg/{}/{}", COMMON_DIRECTORY_NAME, CONFIG_FILE_NAME).as_str(), - ) - .exists() - { - Ok("/etc/xdg/websurfx/config.lua".to_string()) - } else if Path::new(format!("./{}/{}", COMMON_DIRECTORY_NAME, CONFIG_FILE_NAME).as_str()) + )); + } + + // look for config in /etc/xdg + if Path::new(format!("/etc/xdg/{}/{}", COMMON_DIRECTORY_NAME, CONFIG_FILE_NAME).as_str()) .exists() { - Ok("./websurfx/config.lua".to_string()) - } else { - Err("Config file not found!!".to_string().into()) + return Ok("/etc/xdg/websurfx/config.lua".to_string()); } + + // use dev config + if Path::new(format!("./{}/{}", COMMON_DIRECTORY_NAME, CONFIG_FILE_NAME).as_str()).exists() + { + return Ok("./websurfx/config.lua".to_string()); + } + + // if no of the configs above exist, return error + Err("Config file not found!!".to_string().into()) } } diff --git a/src/config_parser/parser_models.rs b/src/config_parser/parser_models.rs index 42baf0d..0bc52d8 100644 --- a/src/config_parser/parser_models.rs +++ b/src/config_parser/parser_models.rs @@ -1,5 +1,5 @@ //! This module provides public models for handling, storing and serializing parsed config file -//! options from config.lua by grouping them togather. +//! options from config.lua by grouping them together. use serde::{Deserialize, Serialize}; diff --git a/src/engines/duckduckgo.rs b/src/engines/duckduckgo.rs index 70c3a87..44d7b0d 100644 --- a/src/engines/duckduckgo.rs +++ b/src/engines/duckduckgo.rs @@ -36,7 +36,7 @@ pub async fn results( user_agent: &str, ) -> Result, EngineError> { // Page number can be missing or empty string and so appropriate handling is required - // so that upstream server recieves valid page number. + // so that upstream server receives valid page number. let url: String = match page { 1 => { format!("https://html.duckduckgo.com/html/?q={query}&s=&dc=&v=1&o=json&api=/d.js") @@ -86,7 +86,7 @@ pub async fn results( let results: String = reqwest::Client::new() .get(url) .timeout(Duration::from_secs(5)) - .headers(header_map) // add spoofed headers to emulate human behaviour + .headers(header_map) // add spoofed headers to emulate human behavior .send() .await .into_report() diff --git a/src/engines/engine_models.rs b/src/engines/engine_models.rs index 7a58688..091c62d 100644 --- a/src/engines/engine_models.rs +++ b/src/engines/engine_models.rs @@ -13,7 +13,7 @@ use std::fmt; /// search engines. /// * `UnexpectedError` - This variant handles all the errors which are unexpected or occur rarely /// and are errors mostly related to failure in initialization of HeaderMap, Selector errors and -/// all other errors occuring within the code handling the `upstream search engines`. +/// all other errors occurring within the code handling the `upstream search engines`. #[derive(Debug)] pub enum EngineError { EmptyResultSet, diff --git a/src/search_results_handler/aggregator.rs b/src/search_results_handler/aggregator.rs index cba266c..f5719b0 100644 --- a/src/search_results_handler/aggregator.rs +++ b/src/search_results_handler/aggregator.rs @@ -17,7 +17,7 @@ use crate::engines::{duckduckgo, searx}; /// then removes duplicate results and if two results are found to be from two or more engines /// then puts their names together to show the results are fetched from these upstream engines /// and then removes all data from the HashMap and puts into a struct of all results aggregated -/// into a vector and also adds the query used into the struct this is neccessory because +/// into a vector and also adds the query used into the struct this is necessary because /// otherwise the search bar in search remains empty if searched from the query url /// /// # Example: diff --git a/src/server/routes.rs b/src/server/routes.rs index 9234d8d..60d3007 100644 --- a/src/server/routes.rs +++ b/src/server/routes.rs @@ -1,5 +1,5 @@ //! This module provides the functionality to handle different routes of the `websurfx` -//! meta search engine website and provide approriate response to each route/page +//! meta search engine website and provide appropriate response to each route/page //! when requested. use std::fs::read_to_string; @@ -82,40 +82,16 @@ pub async fn search( .insert_header(("location", "/")) .finish()) } else { - let page_url: String; // Declare the page_url variable without initializing it - - // ... - - let page = match params.page { - Some(page_number) => { - if page_number <= 1 { - page_url = format!( - "http://{}:{}/search?q={}&page={}", - config.binding_ip_addr, config.port, query, 1 - ); - 1 - } else { - page_url = format!( - "http://{}:{}/search?q={}&page={}", - config.binding_ip_addr, config.port, query, page_number - ); - - page_number - } - } - None => { - page_url = format!( - "http://{}:{}{}&page={}", - config.binding_ip_addr, - config.port, - req.uri(), - 1 - ); - - 1 - } + let page = match ¶ms.page { + Some(page) => *page, + None => 0, }; + let page_url = format!( + "http://{}:{}/search?q={}&page={}", + config.binding_ip_addr, config.port, query, page + ); + // fetch the cached results json. let cached_results_json = redis_cache.cached_results_json(&page_url); // check if fetched results was indeed fetched or it was an error and if so