diff --git a/src/bin/websurfx.rs b/src/bin/websurfx.rs index 1852695..c3d8c38 100644 --- a/src/bin/websurfx.rs +++ b/src/bin/websurfx.rs @@ -5,7 +5,7 @@ #[cfg(not(feature = "dhat-heap"))] use mimalloc::MiMalloc; -use std::net::TcpListener; +use std::{net::TcpListener, sync::OnceLock}; use websurfx::{cache::cacher::create_cache, config::parser::Config, run}; /// A dhat heap memory profiler @@ -17,6 +17,9 @@ static ALLOC: dhat::Alloc = dhat::Alloc; #[global_allocator] static GLOBAL: MiMalloc = MiMalloc; +/// A static constant for holding the parsed config. +static CONFIG: OnceLock = OnceLock::new(); + /// The function that launches the main server and registers all the routes of the website. /// /// # Error @@ -29,10 +32,10 @@ async fn main() -> std::io::Result<()> { #[cfg(feature = "dhat-heap")] let _profiler = dhat::Profiler::new_heap(); - // Initialize the parsed config file. - let config = Config::parse(false).unwrap(); + // Initialize the parsed config globally. + let config = CONFIG.get_or_init(|| Config::parse(false).unwrap()); - let cache = create_cache(&config).await; + let cache = create_cache(config).await; log::info!( "started server on port {} and IP {}", diff --git a/src/lib.rs b/src/lib.rs index ec35273..19702db 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -14,7 +14,7 @@ pub mod results; pub mod server; pub mod templates; -use std::net::TcpListener; +use std::{net::TcpListener, sync::OnceLock}; use crate::server::router; @@ -31,6 +31,9 @@ use cache::cacher::{Cacher, SharedCache}; use config::parser::Config; use handler::{file_path, FileType}; +/// A static constant for holding the cache struct. +static SHARED_CACHE: OnceLock = OnceLock::new(); + /// Runs the web server on the provided TCP listener and returns a `Server` instance. /// /// # Arguments @@ -57,14 +60,14 @@ use handler::{file_path, FileType}; /// ``` pub fn run( listener: TcpListener, - config: Config, + config: &'static Config, cache: impl Cacher + 'static, ) -> std::io::Result { let public_folder_path: &str = file_path(FileType::Theme)?; let cloned_config_threads_opt: u8 = config.threads; - let cache = web::Data::new(SharedCache::new(cache)); + let cache = SHARED_CACHE.get_or_init(|| SharedCache::new(cache)); let server = HttpServer::new(move || { let cors: Cors = Cors::default() @@ -81,8 +84,8 @@ pub fn run( // Compress the responses provided by the server for the client requests. .wrap(Compress::default()) .wrap(Logger::default()) // added logging middleware for logging. - .app_data(web::Data::new(config.clone())) - .app_data(cache.clone()) + .app_data(web::Data::new(config)) + .app_data(web::Data::new(cache)) .wrap(cors) .wrap(Governor::new( &GovernorConfigBuilder::default() diff --git a/src/server/router.rs b/src/server/router.rs index c46e79d..aa2a9cc 100644 --- a/src/server/router.rs +++ b/src/server/router.rs @@ -11,7 +11,9 @@ use std::fs::read_to_string; /// Handles the route of index page or main page of the `websurfx` meta search engine website. #[get("/")] -pub async fn index(config: web::Data) -> Result> { +pub async fn index( + config: web::Data<&'static Config>, +) -> Result> { Ok(HttpResponse::Ok().content_type(ContentType::html()).body( crate::templates::views::index::index( &config.style.colorscheme, @@ -25,7 +27,7 @@ pub async fn index(config: web::Data) -> Result, + config: web::Data<&'static Config>, ) -> Result> { Ok(HttpResponse::Ok().content_type(ContentType::html()).body( crate::templates::views::not_found::not_found( @@ -49,7 +51,9 @@ pub async fn robots_data(_req: HttpRequest) -> Result) -> Result> { +pub async fn about( + config: web::Data<&'static Config>, +) -> Result> { Ok(HttpResponse::Ok().content_type(ContentType::html()).body( crate::templates::views::about::about( &config.style.colorscheme, @@ -63,7 +67,7 @@ pub async fn about(config: web::Data) -> Result, + config: web::Data<&'static Config>, ) -> Result> { Ok(HttpResponse::Ok().content_type(ContentType::html()).body( crate::templates::views::settings::settings( diff --git a/src/server/routes/search.rs b/src/server/routes/search.rs index 16cfa28..1125b8d 100644 --- a/src/server/routes/search.rs +++ b/src/server/routes/search.rs @@ -37,8 +37,8 @@ use tokio::join; #[get("/search")] pub async fn search( req: HttpRequest, - config: web::Data, - cache: web::Data, + config: web::Data<&'static Config>, + cache: web::Data<&'static SharedCache>, ) -> Result> { use std::sync::Arc; let params = web::Query::::from_query(req.query_string())?; @@ -158,8 +158,8 @@ pub async fn search( /// It returns the `SearchResults` struct if the search results could be successfully fetched from /// the cache or from the upstream search engines otherwise it returns an appropriate error. async fn results( - config: &Config, - cache: &web::Data, + config: &'static Config, + cache: &'static SharedCache, query: &str, page: u32, search_settings: &server_models::Cookie<'_>, diff --git a/tests/index.rs b/tests/index.rs index 563c2d9..010795d 100644 --- a/tests/index.rs +++ b/tests/index.rs @@ -1,14 +1,17 @@ -use std::net::TcpListener; +use std::{net::TcpListener, sync::OnceLock}; use websurfx::{config::parser::Config, run, templates::views}; +/// A static constant for holding the parsed config. +static CONFIG: OnceLock = OnceLock::new(); + // Starts a new instance of the HTTP server, bound to a random available port async fn spawn_app() -> String { // Binding to port 0 will trigger the OS to assign a port for us. let listener = TcpListener::bind("127.0.0.1:0").expect("Failed to bind random port"); let port = listener.local_addr().unwrap().port(); - let config = Config::parse(false).unwrap(); - let cache = websurfx::cache::cacher::create_cache(&config).await; + let config = CONFIG.get_or_init(|| Config::parse(false).unwrap()); + let cache = websurfx::cache::cacher::create_cache(config).await; let server = run(listener, config, cache).expect("Failed to bind address"); tokio::spawn(server);