mirror of
https://github.com/neon-mmd/websurfx.git
synced 2024-11-22 22:18:23 -05:00
991f3f59de
* ♻️ refactor: initialize & store the config & cache structs as a constant (#486) - initializes & stores the config & cache structs as a static constant. - Pass the config & cache structs as a static reference to all the functions handling their respective route. * ⚡ perf: replace hashmaps with vectors for fetching & aggregating results (#486) - replace hashmaps with vectors for fetching, collecting & aggregating results as it tends to be contigous & cache efficient data structure. - refactor & redesign algorithms for fetching & aggregating results centered around vectors in aggregate function. * ➕ build: add the future crate (#486) * ⚡ perf: use `futureunordered` for collecting results fetched from the tokio spawn tasks (#486) - using the `futureunordered` instead of vector for collecting results reduces the time it takes to fetch the results as the results do not need to come in specific order so any result that gets fetched first gets collected in the `futureunordered` type. Co-authored-by: Spencerjibz <spencernajib2@gmail.com> * ⚡ perf: initialize new async connections parallely using tokio spawn tasks (#486) * ⚡ perf: initialize redis pipeline struct once with the default size of 3 (#486) * ⚡ perf: reduce branch predictions by reducing conditional code branches (#486) * ✅ test(unit): provide unit test for the `get_safesearch_level` function (#486) * ⚡ perf: reduce clones & use index based loop to improve search results filtering performance (#486) * 🚨 fix(clippy): make clippy/format checks happy (#486) * 🚨 fix(build): make the cargo build check happy (#486) * ⚡ perf: reduce the amount of clones, to_owneds & to_strings (#486) * ⚡ perf: use async crates & methods & make functions async (#486) * 🔖 chore(release): bump the app version (#486) --------- Co-authored-by: Spencerjibz <spencernajib2@gmail.com>
121 lines
4.1 KiB
Rust
121 lines
4.1 KiB
Rust
//! This main library module provides the functionality to provide and handle the Tcp server
|
|
//! and register all the routes for the `websurfx` meta search engine website.
|
|
|
|
#![forbid(unsafe_code, clippy::panic)]
|
|
#![deny(missing_docs, clippy::missing_docs_in_private_items, clippy::perf)]
|
|
#![warn(clippy::cognitive_complexity, rust_2018_idioms)]
|
|
|
|
pub mod cache;
|
|
pub mod config;
|
|
pub mod engines;
|
|
pub mod handler;
|
|
pub mod models;
|
|
pub mod results;
|
|
pub mod server;
|
|
pub mod templates;
|
|
|
|
use std::{net::TcpListener, sync::OnceLock};
|
|
|
|
use crate::server::router;
|
|
|
|
use actix_cors::Cors;
|
|
use actix_files as fs;
|
|
use actix_governor::{Governor, GovernorConfigBuilder};
|
|
use actix_web::{
|
|
dev::Server,
|
|
http::header,
|
|
middleware::{Compress, Logger},
|
|
web, App, HttpServer,
|
|
};
|
|
use cache::cacher::{Cacher, SharedCache};
|
|
use config::parser::Config;
|
|
use handler::{file_path, FileType};
|
|
|
|
/// A static constant for holding the cache struct.
|
|
static SHARED_CACHE: OnceLock<SharedCache> = OnceLock::new();
|
|
|
|
/// Runs the web server on the provided TCP listener and returns a `Server` instance.
|
|
///
|
|
/// # Arguments
|
|
///
|
|
/// * `listener` - A `TcpListener` instance representing the address and port to listen on.
|
|
///
|
|
/// # Returns
|
|
///
|
|
/// Returns a `Result` containing a `Server` instance on success, or an `std::io::Error` on failure.
|
|
///
|
|
/// # Example
|
|
///
|
|
/// ```rust
|
|
/// use std::{net::TcpListener, sync::OnceLock};
|
|
/// use websurfx::{config::parser::Config, run, cache::cacher::create_cache};
|
|
///
|
|
/// /// A static constant for holding the parsed config.
|
|
/// static CONFIG: OnceLock<Config> = OnceLock::new();
|
|
///
|
|
/// #[tokio::main]
|
|
/// async fn main(){
|
|
/// // Initialize the parsed config globally.
|
|
/// let config = CONFIG.get_or_init(|| Config::parse(true).unwrap());
|
|
/// let listener = TcpListener::bind("127.0.0.1:8080").expect("Failed to bind address");
|
|
/// let cache = create_cache(config).await;
|
|
/// let server = run(listener,&config,cache).expect("Failed to start server");
|
|
/// }
|
|
/// ```
|
|
pub fn run(
|
|
listener: TcpListener,
|
|
config: &'static Config,
|
|
cache: impl Cacher + 'static,
|
|
) -> std::io::Result<Server> {
|
|
let public_folder_path: &str = file_path(FileType::Theme)?;
|
|
|
|
let cache = SHARED_CACHE.get_or_init(|| SharedCache::new(cache));
|
|
|
|
let server = HttpServer::new(move || {
|
|
let cors: Cors = Cors::default()
|
|
.allow_any_origin()
|
|
.allowed_methods(vec!["GET"])
|
|
.allowed_headers(vec![
|
|
header::ORIGIN,
|
|
header::CONTENT_TYPE,
|
|
header::REFERER,
|
|
header::COOKIE,
|
|
]);
|
|
|
|
App::new()
|
|
// Compress the responses provided by the server for the client requests.
|
|
.wrap(Compress::default())
|
|
.wrap(Logger::default()) // added logging middleware for logging.
|
|
.app_data(web::Data::new(config))
|
|
.app_data(web::Data::new(cache))
|
|
.wrap(cors)
|
|
.wrap(Governor::new(
|
|
&GovernorConfigBuilder::default()
|
|
.per_second(config.rate_limiter.time_limit as u64)
|
|
.burst_size(config.rate_limiter.number_of_requests as u32)
|
|
.finish()
|
|
.unwrap(),
|
|
))
|
|
// Serve images and static files (css and js files).
|
|
.service(
|
|
fs::Files::new("/static", format!("{}/static", public_folder_path))
|
|
.show_files_listing(),
|
|
)
|
|
.service(
|
|
fs::Files::new("/images", format!("{}/images", public_folder_path))
|
|
.show_files_listing(),
|
|
)
|
|
.service(router::robots_data) // robots.txt
|
|
.service(router::index) // index page
|
|
.service(server::routes::search::search) // search page
|
|
.service(router::about) // about page
|
|
.service(router::settings) // settings page
|
|
.default_service(web::route().to(router::not_found)) // error page
|
|
})
|
|
.workers(config.threads as usize)
|
|
// Start server on 127.0.0.1 with the user provided port number. for example 127.0.0.1:8080.
|
|
.listen(listener)?
|
|
.run();
|
|
Ok(server)
|
|
}
|