diff --git a/.gitpod.yml b/.gitpod.yml index 9913c27..4eeabff 100644 --- a/.gitpod.yml +++ b/.gitpod.yml @@ -1,22 +1,23 @@ --- -image: ubuntu:latest +image: gitpod/workspace-base # Commands that will run on workspace start tasks: - name: Setup, Install & Build - - before: apt install cargo redis-server nodejs npm && cargo test - - init: cargo install cargo-watch - - command: redis-server --port 8080 & cargo watch -q -w "." -x "run" + before: apt install cargo redis-server nodejs npm -y && cargo test + init: cargo install cargo-watch + command: redis-server --port 8080 & cargo watch -q -w "." -x "run" # Ports to expose on workspace startup ports: - name: Website description: Website Preview port: 8080 - onOpen: open-browser + onOpen: open-preview # vscode IDE setup vscode: extensions: - vadimcn.vscode-lldb - - rust-lang.rust-analyzer + - cschleiden.vscode-github-actions + - rust-lang.rust - bungcip.better-toml - serayuzgur.crates - usernamehw.errorlens @@ -27,10 +28,8 @@ vscode: - evgeniypeshkov.syntax-highlighter - redhat.vscode-yaml - ms-azuretools.vscode-docker - - GitHub.vscode-github-actions - Catppuccin.catppuccin-vsc - PKief.material-icon-theme - - tal7aouy.rainbow-bracket - oderwat.indent-rainbow - formulahendry.auto-rename-tag - eamodio.gitlens diff --git a/src/cache/cacher.rs b/src/cache/cacher.rs index 31494c4..44d0710 100644 --- a/src/cache/cacher.rs +++ b/src/cache/cacher.rs @@ -41,7 +41,7 @@ impl RedisCache { /// # Arguments /// /// * `url` - It takes an url as a string. - pub fn get_cached_json(&mut self, url: &str) -> Result> { + pub fn cached_json(&mut self, url: &str) -> Result> { let hashed_url_string = Self::hash_url(url); Ok(self.connection.get(hashed_url_string)?) } diff --git a/src/config/parser.rs b/src/config/parser.rs index f760e7d..e411732 100644 --- a/src/config/parser.rs +++ b/src/config/parser.rs @@ -50,7 +50,7 @@ impl Config { let globals = context.globals(); context - .load(&fs::read_to_string(Config::get_config_path()?)?) + .load(&fs::read_to_string(Config::config_path()?)?) .exec()?; Ok(Config { @@ -81,7 +81,7 @@ impl Config { /// one (3). /// 3. `websurfx/` (under project folder ( or codebase in other words)) if it is not present /// here then it returns an error as mentioned above. - fn get_config_path() -> Result> { + fn config_path() -> Result> { // check user config let path = format!( diff --git a/src/handler/public_paths.rs b/src/handler/public_paths.rs index 7f0924b..18d51e4 100644 --- a/src/handler/public_paths.rs +++ b/src/handler/public_paths.rs @@ -17,7 +17,7 @@ static PUBLIC_DIRECTORY_NAME: &str = "public"; /// 1. `/opt/websurfx` if it not present here then it fallbacks to the next one (2) /// 2. Under project folder ( or codebase in other words) if it is not present /// here then it returns an error as mentioned above. -pub fn get_public_path() -> Result { +pub fn public_path() -> Result { if Path::new(format!("/opt/websurfx/{}/", PUBLIC_DIRECTORY_NAME).as_str()).exists() { return Ok(format!("/opt/websurfx/{}", PUBLIC_DIRECTORY_NAME)); } diff --git a/src/lib.rs b/src/lib.rs index 30ebe797..e226e14 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -16,7 +16,7 @@ use actix_files as fs; use actix_web::{dev::Server, middleware::Logger, web, App, HttpServer}; use config::parser::Config; use handlebars::Handlebars; -use handler::public_paths::get_public_path; +use handler::public_paths::public_path; /// Runs the web server on the provided TCP listener and returns a `Server` instance. /// @@ -41,7 +41,7 @@ use handler::public_paths::get_public_path; pub fn run(listener: TcpListener, config: Config) -> std::io::Result { let mut handlebars: Handlebars = Handlebars::new(); - let public_folder_path: String = get_public_path()?; + let public_folder_path: String = public_path()?; handlebars .register_templates_directory(".html", format!("{}/templates", public_folder_path)) diff --git a/src/server/routes.rs b/src/server/routes.rs index cecdcdd..e33848d 100644 --- a/src/server/routes.rs +++ b/src/server/routes.rs @@ -7,7 +7,7 @@ use std::fs::read_to_string; use crate::{ cache::cacher::RedisCache, config::parser::Config, - handler::public_paths::get_public_path, + handler::public_paths::public_path, results::{aggregation_models::SearchResults, aggregator::aggregate}, }; use actix_web::{get, web, HttpRequest, HttpResponse}; @@ -89,7 +89,7 @@ pub async fn search( "http://{}:{}/search?q={}&page={}", config.binding_ip, config.port, query, page ); - let results_json = get_results(url, &config, query, page).await?; + let results_json = results(url, &config, query, page).await?; let page_content: String = hbs.render("search", &results_json)?; Ok(HttpResponse::Ok().body(page_content)) } @@ -101,7 +101,7 @@ pub async fn search( /// Fetches the results for a query and page. /// First checks the redis cache, if that fails it gets proper results -async fn get_results( +async fn results( url: String, config: &Config, query: &str, @@ -110,7 +110,7 @@ async fn get_results( //Initialize redis cache connection struct let mut redis_cache = RedisCache::new(config.redis_url.clone())?; // fetch the cached results json. - let cached_results_json = redis_cache.get_cached_json(&url); + let cached_results_json = redis_cache.cached_json(&url); // check if fetched results was indeed fetched or it was an error and if so // handle the data accordingly. match cached_results_json { @@ -128,7 +128,7 @@ async fn get_results( /// Handles the route of robots.txt page of the `websurfx` meta search engine website. #[get("/robots.txt")] pub async fn robots_data(_req: HttpRequest) -> Result> { - let page_content: String = read_to_string(format!("{}/robots.txt", get_public_path()?))?; + let page_content: String = read_to_string(format!("{}/robots.txt", public_path()?))?; Ok(HttpResponse::Ok() .content_type("text/plain; charset=ascii") .body(page_content))