0
0
mirror of https://github.com/neon-mmd/websurfx.git synced 2024-12-22 20:38:22 -05:00

Merge pull request #133 from MilimTheTrueOne/rolling

Renaming and Refactoring
This commit is contained in:
alamin655 2023-07-04 13:19:53 +05:30 committed by GitHub
commit 9c8b826398
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
17 changed files with 81 additions and 75 deletions

View File

@ -5,7 +5,7 @@
use std::net::TcpListener;
use websurfx::{config_parser::parser::Config, run};
use websurfx::{config::parser::Config, run};
/// The function that launches the main server and registers all the routes of the website.
///
@ -26,7 +26,7 @@ async fn main() -> std::io::Result<()> {
log::info!("started server on port {}", config.port);
let listener = TcpListener::bind((config.binding_ip_addr.clone(), config.port))?;
let listener = TcpListener::bind((config.binding_ip.clone(), config.port))?;
run(listener, config)?.await
}

8
src/cache/cacher.rs vendored
View File

@ -32,7 +32,7 @@ impl RedisCache {
/// # Arguments
///
/// * `url` - It takes an url as string.
fn compute_url_hash(url: &str) -> String {
fn hash_url(url: &str) -> String {
format!("{:?}", compute(url))
}
@ -41,8 +41,8 @@ impl RedisCache {
/// # Arguments
///
/// * `url` - It takes an url as a string.
pub fn cached_results_json(&mut self, url: &str) -> Result<String, Box<dyn std::error::Error>> {
let hashed_url_string = Self::compute_url_hash(url);
pub fn get_cached_json(&mut self, url: &str) -> Result<String, Box<dyn std::error::Error>> {
let hashed_url_string = Self::hash_url(url);
Ok(self.connection.get(hashed_url_string)?)
}
@ -59,7 +59,7 @@ impl RedisCache {
json_results: String,
url: &str,
) -> Result<(), Box<dyn std::error::Error>> {
let hashed_url_string = Self::compute_url_hash(url);
let hashed_url_string = Self::hash_url(url);
// put results_json into cache
self.connection.set(&hashed_url_string, json_results)?;

View File

@ -14,16 +14,16 @@ static CONFIG_FILE_NAME: &str = "config.lua";
/// # Fields
//
/// * `port` - It stores the parsed port number option on which the server should launch.
/// * `binding_ip_addr` - It stores the parsed ip address option on which the server should launch
/// * `binding_ip` - It stores the parsed ip address option on which the server should launch
/// * `style` - It stores the theming options for the website.
/// * `redis_connection_url` - It stores the redis connection url address on which the redis
/// * `redis_url` - It stores the redis connection url address on which the redis
/// client should connect.
#[derive(Clone)]
pub struct Config {
pub port: u16,
pub binding_ip_addr: String,
pub binding_ip: String,
pub style: Style,
pub redis_connection_url: String,
pub redis_url: String,
pub aggregator: AggregatorConfig,
pub logging: bool,
pub debug: bool,
@ -55,12 +55,12 @@ impl Config {
Ok(Config {
port: globals.get::<_, u16>("port")?,
binding_ip_addr: globals.get::<_, String>("binding_ip_addr")?,
binding_ip: globals.get::<_, String>("binding_ip")?,
style: Style::new(
globals.get::<_, String>("theme")?,
globals.get::<_, String>("colorscheme")?,
),
redis_connection_url: globals.get::<_, String>("redis_connection_url")?,
redis_url: globals.get::<_, String>("redis_url")?,
aggregator: AggregatorConfig {
random_delay: globals.get::<_, bool>("production_use")?,
},

View File

@ -7,7 +7,7 @@ use std::{collections::HashMap, time::Duration};
use reqwest::header::{HeaderMap, CONTENT_TYPE, COOKIE, REFERER, USER_AGENT};
use scraper::{Html, Selector};
use crate::search_results_handler::aggregation_models::RawSearchResult;
use crate::results::aggregation_models::RawSearchResult;
use super::engine_models::EngineError;

View File

@ -6,7 +6,7 @@ use reqwest::header::{HeaderMap, CONTENT_TYPE, COOKIE, REFERER, USER_AGENT};
use scraper::{Html, Selector};
use std::collections::HashMap;
use crate::search_results_handler::aggregation_models::RawSearchResult;
use crate::results::aggregation_models::RawSearchResult;
use super::engine_models::EngineError;
use error_stack::{IntoReport, Report, Result, ResultExt};

View File

@ -1 +1 @@
pub mod public_path_handler;
pub mod public_paths;

View File

@ -17,15 +17,17 @@ static PUBLIC_DIRECTORY_NAME: &str = "public";
/// 1. `/opt/websurfx` if it not present here then it fallbacks to the next one (2)
/// 2. Under project folder ( or codebase in other words) if it is not present
/// here then it returns an error as mentioned above.
pub fn handle_different_public_path() -> Result<String, Error> {
pub fn get_public_path() -> Result<String, Error> {
if Path::new(format!("/opt/websurfx/{}/", PUBLIC_DIRECTORY_NAME).as_str()).exists() {
Ok(format!("/opt/websurfx/{}", PUBLIC_DIRECTORY_NAME))
} else if Path::new(format!("./{}/", PUBLIC_DIRECTORY_NAME).as_str()).exists() {
Ok(format!("./{}", PUBLIC_DIRECTORY_NAME))
} else {
Err(Error::new(
std::io::ErrorKind::NotFound,
"Themes (public) folder not found!!",
))
return Ok(format!("/opt/websurfx/{}", PUBLIC_DIRECTORY_NAME));
}
if Path::new(format!("./{}/", PUBLIC_DIRECTORY_NAME).as_str()).exists() {
return Ok(format!("./{}", PUBLIC_DIRECTORY_NAME));
}
Err(Error::new(
std::io::ErrorKind::NotFound,
"Themes (public) folder not found!!",
))
}

View File

@ -2,10 +2,10 @@
//! and register all the routes for the `websurfx` meta search engine website.
pub mod cache;
pub mod config_parser;
pub mod config;
pub mod engines;
pub mod handler;
pub mod search_results_handler;
pub mod results;
pub mod server;
use std::net::TcpListener;
@ -14,9 +14,9 @@ use crate::server::routes;
use actix_files as fs;
use actix_web::{dev::Server, middleware::Logger, web, App, HttpServer};
use config_parser::parser::Config;
use config::parser::Config;
use handlebars::Handlebars;
use handler::public_path_handler::handle_different_public_path;
use handler::public_paths::get_public_path;
/// Runs the web server on the provided TCP listener and returns a `Server` instance.
///
@ -32,7 +32,7 @@ use handler::public_path_handler::handle_different_public_path;
///
/// ```rust
/// use std::net::TcpListener;
/// use websurfx::{config_parser::parser::Config, run};
/// use websurfx::{config::parser::Config, run};
///
/// let config = Config::parse().unwrap();
/// let listener = TcpListener::bind("127.0.0.1:8080").expect("Failed to bind address");
@ -41,7 +41,7 @@ use handler::public_path_handler::handle_different_public_path;
pub fn run(listener: TcpListener, config: Config) -> std::io::Result<Server> {
let mut handlebars: Handlebars = Handlebars::new();
let public_folder_path: String = handle_different_public_path()?;
let public_folder_path: String = get_public_path()?;
handlebars
.register_templates_directory(".html", format!("{}/templates", public_folder_path))

View File

@ -3,7 +3,7 @@
use serde::{Deserialize, Serialize};
use crate::config_parser::parser_models::Style;
use crate::config::parser_models::Style;
/// A named struct to store, serialize and deserializes the individual search result from all the
/// scraped and aggregated search results from the upstream search engines.

View File

@ -6,9 +6,9 @@ use std::fs::read_to_string;
use crate::{
cache::cacher::RedisCache,
config_parser::parser::Config,
handler::public_path_handler::handle_different_public_path,
search_results_handler::{aggregation_models::SearchResults, aggregator::aggregate},
config::parser::Config,
handler::public_paths::get_public_path,
results::{aggregation_models::SearchResults, aggregator::aggregate},
};
use actix_web::{get, web, HttpRequest, HttpResponse};
use handlebars::Handlebars;
@ -73,46 +73,25 @@ pub async fn search(
) -> Result<HttpResponse, Box<dyn std::error::Error>> {
let params = web::Query::<SearchParams>::from_query(req.query_string())?;
//Initialize redis cache connection struct
let mut redis_cache = RedisCache::new(config.redis_connection_url.clone())?;
match &params.q {
Some(query) => {
if query.trim().is_empty() {
Ok(HttpResponse::Found()
return Ok(HttpResponse::Found()
.insert_header(("location", "/"))
.finish())
} else {
let page = match &params.page {
Some(page) => *page,
None => 0,
};
let page_url = format!(
"http://{}:{}/search?q={}&page={}",
config.binding_ip_addr, config.port, query, page
);
// fetch the cached results json.
let cached_results_json = redis_cache.cached_results_json(&page_url);
// check if fetched results was indeed fetched or it was an error and if so
// handle the data accordingly.
match cached_results_json {
Ok(results_json) => {
let new_results_json: SearchResults = serde_json::from_str(&results_json)?;
let page_content: String = hbs.render("search", &new_results_json)?;
Ok(HttpResponse::Ok().body(page_content))
}
Err(_) => {
let mut results_json: crate::search_results_handler::aggregation_models::SearchResults =
aggregate(query, page, config.aggregator.random_delay, config.debug).await?;
results_json.add_style(config.style.clone());
redis_cache
.cache_results(serde_json::to_string(&results_json)?, &page_url)?;
let page_content: String = hbs.render("search", &results_json)?;
Ok(HttpResponse::Ok().body(page_content))
}
}
.finish());
}
let page = match &params.page {
Some(page) => *page,
None => 0,
};
let url = format!(
"http://{}:{}/search?q={}&page={}",
config.binding_ip, config.port, query, page
);
let results_json = get_results(url, &config, query, page).await?;
let page_content: String = hbs.render("search", &results_json)?;
Ok(HttpResponse::Ok().body(page_content))
}
None => Ok(HttpResponse::Found()
.insert_header(("location", "/"))
@ -120,11 +99,36 @@ pub async fn search(
}
}
/// Fetches the results for a query and page.
/// First checks the redis cache, if that fails it gets proper results
async fn get_results(
url: String,
config: &Config,
query: &str,
page: u32,
) -> Result<SearchResults, Box<dyn std::error::Error>> {
//Initialize redis cache connection struct
let mut redis_cache = RedisCache::new(config.redis_url.clone())?;
// fetch the cached results json.
let cached_results_json = redis_cache.get_cached_json(&url);
// check if fetched results was indeed fetched or it was an error and if so
// handle the data accordingly.
match cached_results_json {
Ok(results_json) => Ok(serde_json::from_str::<SearchResults>(&results_json).unwrap()),
Err(_) => {
let mut results_json: crate::results::aggregation_models::SearchResults =
aggregate(query, page, config.aggregator.random_delay, config.debug).await?;
results_json.add_style(config.style.clone());
redis_cache.cache_results(serde_json::to_string(&results_json)?, &url)?;
Ok(results_json)
}
}
}
/// Handles the route of robots.txt page of the `websurfx` meta search engine website.
#[get("/robots.txt")]
pub async fn robots_data(_req: HttpRequest) -> Result<HttpResponse, Box<dyn std::error::Error>> {
let page_content: String =
read_to_string(format!("{}/robots.txt", handle_different_public_path()?))?;
let page_content: String = read_to_string(format!("{}/robots.txt", get_public_path()?))?;
Ok(HttpResponse::Ok()
.content_type("text/plain; charset=ascii")
.body(page_content))

View File

@ -1,7 +1,7 @@
use std::net::TcpListener;
use handlebars::Handlebars;
use websurfx::{config_parser::parser::Config, run};
use websurfx::{config::parser::Config, run};
// Starts a new instance of the HTTP server, bound to a random available port
fn spawn_app() -> String {
@ -41,5 +41,5 @@ async fn test_index() {
assert_eq!(res.text().await.unwrap(), template);
}
// TODO: Write tests for tesing parameters for search function that if provided with something
// TODO: Write tests for testing parameters for search function that if provided with something
// other than u32 like alphabets and special characters than it should panic

View File

@ -4,7 +4,7 @@ debug = false -- an option to enable or disable debug mode.
-- ### Server ###
port = "8080" -- port on which server should be launched
binding_ip_addr = "127.0.0.1" --ip address on the which server should be launched.
binding_ip = "127.0.0.1" --ip address on the which server should be launched.
production_use = false -- whether to use production mode or not (in other words this option should be used if it is to be used to host it on the server to provide a service to a large number of users)
-- if production_use is set to true
-- There will be a random delay before sending the request to the search engines, this is to prevent DDoSing the upstream search engines from a large number of simultaneous requests.
@ -25,4 +25,4 @@ colorscheme = "catppuccin-mocha" -- the colorscheme name which should be used fo
theme = "simple" -- the theme name which should be used for the website
-- ### Caching ###
redis_connection_url = "redis://127.0.0.1:8082" -- redis connection url address on which the client should connect on.
redis_url = "redis://127.0.0.1:8082" -- redis connection url address on which the client should connect on.