0
0
mirror of https://github.com/neon-mmd/websurfx.git synced 2024-11-22 05:58:21 -05:00

Rename getter functions

This commit is contained in:
Trickshotblaster 2023-07-04 15:11:30 -07:00
parent 2dbffcc6b7
commit 99936bcb1a
5 changed files with 11 additions and 11 deletions

2
src/cache/cacher.rs vendored
View File

@ -41,7 +41,7 @@ impl RedisCache {
/// # Arguments /// # Arguments
/// ///
/// * `url` - It takes an url as a string. /// * `url` - It takes an url as a string.
pub fn get_cached_json(&mut self, url: &str) -> Result<String, Box<dyn std::error::Error>> { pub fn cached_json(&mut self, url: &str) -> Result<String, Box<dyn std::error::Error>> {
let hashed_url_string = Self::hash_url(url); let hashed_url_string = Self::hash_url(url);
Ok(self.connection.get(hashed_url_string)?) Ok(self.connection.get(hashed_url_string)?)
} }

View File

@ -50,7 +50,7 @@ impl Config {
let globals = context.globals(); let globals = context.globals();
context context
.load(&fs::read_to_string(Config::get_config_path()?)?) .load(&fs::read_to_string(Config::config_path()?)?)
.exec()?; .exec()?;
Ok(Config { Ok(Config {
@ -81,7 +81,7 @@ impl Config {
/// one (3). /// one (3).
/// 3. `websurfx/` (under project folder ( or codebase in other words)) if it is not present /// 3. `websurfx/` (under project folder ( or codebase in other words)) if it is not present
/// here then it returns an error as mentioned above. /// here then it returns an error as mentioned above.
fn get_config_path() -> Result<String, Box<dyn std::error::Error>> { fn config_path() -> Result<String, Box<dyn std::error::Error>> {
// check user config // check user config
let path = format!( let path = format!(

View File

@ -17,7 +17,7 @@ static PUBLIC_DIRECTORY_NAME: &str = "public";
/// 1. `/opt/websurfx` if it not present here then it fallbacks to the next one (2) /// 1. `/opt/websurfx` if it not present here then it fallbacks to the next one (2)
/// 2. Under project folder ( or codebase in other words) if it is not present /// 2. Under project folder ( or codebase in other words) if it is not present
/// here then it returns an error as mentioned above. /// here then it returns an error as mentioned above.
pub fn get_public_path() -> Result<String, Error> { pub fn public_path() -> Result<String, Error> {
if Path::new(format!("/opt/websurfx/{}/", PUBLIC_DIRECTORY_NAME).as_str()).exists() { if Path::new(format!("/opt/websurfx/{}/", PUBLIC_DIRECTORY_NAME).as_str()).exists() {
return Ok(format!("/opt/websurfx/{}", PUBLIC_DIRECTORY_NAME)); return Ok(format!("/opt/websurfx/{}", PUBLIC_DIRECTORY_NAME));
} }

View File

@ -16,7 +16,7 @@ use actix_files as fs;
use actix_web::{dev::Server, middleware::Logger, web, App, HttpServer}; use actix_web::{dev::Server, middleware::Logger, web, App, HttpServer};
use config::parser::Config; use config::parser::Config;
use handlebars::Handlebars; use handlebars::Handlebars;
use handler::public_paths::get_public_path; use handler::public_paths::public_path;
/// Runs the web server on the provided TCP listener and returns a `Server` instance. /// Runs the web server on the provided TCP listener and returns a `Server` instance.
/// ///
@ -41,7 +41,7 @@ use handler::public_paths::get_public_path;
pub fn run(listener: TcpListener, config: Config) -> std::io::Result<Server> { pub fn run(listener: TcpListener, config: Config) -> std::io::Result<Server> {
let mut handlebars: Handlebars = Handlebars::new(); let mut handlebars: Handlebars = Handlebars::new();
let public_folder_path: String = get_public_path()?; let public_folder_path: String = public_path()?;
handlebars handlebars
.register_templates_directory(".html", format!("{}/templates", public_folder_path)) .register_templates_directory(".html", format!("{}/templates", public_folder_path))

View File

@ -7,7 +7,7 @@ use std::fs::read_to_string;
use crate::{ use crate::{
cache::cacher::RedisCache, cache::cacher::RedisCache,
config::parser::Config, config::parser::Config,
handler::public_paths::get_public_path, handler::public_paths::public_path,
results::{aggregation_models::SearchResults, aggregator::aggregate}, results::{aggregation_models::SearchResults, aggregator::aggregate},
}; };
use actix_web::{get, web, HttpRequest, HttpResponse}; use actix_web::{get, web, HttpRequest, HttpResponse};
@ -89,7 +89,7 @@ pub async fn search(
"http://{}:{}/search?q={}&page={}", "http://{}:{}/search?q={}&page={}",
config.binding_ip, config.port, query, page config.binding_ip, config.port, query, page
); );
let results_json = get_results(url, &config, query, page).await?; let results_json = results(url, &config, query, page).await?;
let page_content: String = hbs.render("search", &results_json)?; let page_content: String = hbs.render("search", &results_json)?;
Ok(HttpResponse::Ok().body(page_content)) Ok(HttpResponse::Ok().body(page_content))
} }
@ -101,7 +101,7 @@ pub async fn search(
/// Fetches the results for a query and page. /// Fetches the results for a query and page.
/// First checks the redis cache, if that fails it gets proper results /// First checks the redis cache, if that fails it gets proper results
async fn get_results( async fn results(
url: String, url: String,
config: &Config, config: &Config,
query: &str, query: &str,
@ -110,7 +110,7 @@ async fn get_results(
//Initialize redis cache connection struct //Initialize redis cache connection struct
let mut redis_cache = RedisCache::new(config.redis_url.clone())?; let mut redis_cache = RedisCache::new(config.redis_url.clone())?;
// fetch the cached results json. // fetch the cached results json.
let cached_results_json = redis_cache.get_cached_json(&url); let cached_results_json = redis_cache.cached_json(&url);
// check if fetched results was indeed fetched or it was an error and if so // check if fetched results was indeed fetched or it was an error and if so
// handle the data accordingly. // handle the data accordingly.
match cached_results_json { match cached_results_json {
@ -128,7 +128,7 @@ async fn get_results(
/// Handles the route of robots.txt page of the `websurfx` meta search engine website. /// Handles the route of robots.txt page of the `websurfx` meta search engine website.
#[get("/robots.txt")] #[get("/robots.txt")]
pub async fn robots_data(_req: HttpRequest) -> Result<HttpResponse, Box<dyn std::error::Error>> { pub async fn robots_data(_req: HttpRequest) -> Result<HttpResponse, Box<dyn std::error::Error>> {
let page_content: String = read_to_string(format!("{}/robots.txt", get_public_path()?))?; let page_content: String = read_to_string(format!("{}/robots.txt", public_path()?))?;
Ok(HttpResponse::Ok() Ok(HttpResponse::Ok()
.content_type("text/plain; charset=ascii") .content_type("text/plain; charset=ascii")
.body(page_content)) .body(page_content))