diff --git a/src/cache/cacher.rs b/src/cache/cacher.rs index 810ced4..900affc 100644 --- a/src/cache/cacher.rs +++ b/src/cache/cacher.rs @@ -6,6 +6,8 @@ use mini_moka::sync::Cache as MokaCache; use std::time::Duration; use tokio::sync::Mutex; +use crate::results::aggregation_models::SearchResults; + use super::{error::PoolError, redis_cacher::RedisCache}; /// Different implementations for caching, currently it is possible to cache in-memory or in Redis. @@ -14,7 +16,7 @@ pub enum Cache { /// Encapsulates the Redis based cache Redis(RedisCache), /// Contains the in-memory cache. - InMemory(MokaCache), + InMemory(MokaCache), } impl Cache { @@ -37,9 +39,13 @@ impl Cache { /// # Arguments /// /// * `url` - It takes an url as a string. - pub async fn cached_json(&mut self, url: &str) -> Result> { + pub async fn cached_json(&mut self, url: &str) -> Result> { match self { - Cache::Redis(redis_cache) => redis_cache.cached_json(url).await, + Cache::Redis(redis_cache) => { + let json = redis_cache.cached_json(url).await?; + Ok(serde_json::from_str::(&json) + .map_err(|_| PoolError::SerializationError)?) + } Cache::InMemory(in_memory) => match in_memory.get(&url.to_string()) { Some(res) => Ok(res), None => Err(Report::new(PoolError::MissingValue)), @@ -56,13 +62,17 @@ impl Cache { /// * `url` - It takes the url as a String. pub async fn cache_results( &mut self, - json_results: String, + search_results: SearchResults, url: &str, ) -> Result<(), Report> { match self { - Cache::Redis(redis_cache) => redis_cache.cache_results(&json_results, url).await, + Cache::Redis(redis_cache) => { + let json = serde_json::to_string(&search_results) + .map_err(|_| PoolError::SerializationError)?; + redis_cache.cache_results(&json, url).await + } Cache::InMemory(cache) => { - cache.insert(url.to_string(), json_results); + cache.insert(url.to_string(), search_results); Ok(()) } } @@ -82,20 +92,20 @@ impl SharedCache { } } - /// A function which fetches the cached json results as json string. - pub async fn cached_json(&self, url: &str) -> Result> { + /// A function which retrieves the cached SearchResulsts from the internal cache. + pub async fn cached_json(&self, url: &str) -> Result> { let mut mut_cache = self.cache.lock().await; mut_cache.cached_json(url).await } /// A function which caches the results by using the `url` as the key and - /// `json results` as the value and stores it in the cache + /// `SearchResults` as the value. pub async fn cache_results( &self, - json_results: String, + search_results: SearchResults, url: &str, ) -> Result<(), Report> { let mut mut_cache = self.cache.lock().await; - mut_cache.cache_results(json_results, url).await + mut_cache.cache_results(search_results, url).await } } diff --git a/src/cache/error.rs b/src/cache/error.rs index 5f5d0e0..c1f9177 100644 --- a/src/cache/error.rs +++ b/src/cache/error.rs @@ -12,6 +12,7 @@ pub enum PoolError { /// This variant handles the errors which occurs when all the connections /// in the connection pool return a connection dropped redis error. PoolExhaustionWithConnectionDropError, + SerializationError, MissingValue, } @@ -34,6 +35,9 @@ impl fmt::Display for PoolError { PoolError::MissingValue => { write!(f, "The value is missing from the cache") } + PoolError::SerializationError => { + write!(f, "Unable to serialize, deserialize from the cache") + } } } } diff --git a/src/lib.rs b/src/lib.rs index 68f6e01..d310e90 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -44,7 +44,8 @@ use handler::paths::{file_path, FileType}; /// /// let config = Config::parse(true).unwrap(); /// let listener = TcpListener::bind("127.0.0.1:8080").expect("Failed to bind address"); -/// let server = run(listener,config).expect("Failed to start server"); +/// let cache = Cache::new_in_memory(); +/// let server = run(listener,config,cache).expect("Failed to start server"); /// ``` pub fn run(listener: TcpListener, config: Config, cache: Cache) -> std::io::Result { let mut handlebars: Handlebars<'_> = Handlebars::new(); diff --git a/src/models/aggregation_models.rs b/src/models/aggregation_models.rs index ea4a914..656297f 100644 --- a/src/models/aggregation_models.rs +++ b/src/models/aggregation_models.rs @@ -102,7 +102,7 @@ impl EngineErrorInfo { /// A named struct to store, serialize, deserialize the all the search results scraped and /// aggregated from the upstream search engines. /// `SearchResult` structs. -#[derive(Serialize, Deserialize, Default)] +#[derive(Serialize, Deserialize, Default, Clone)] #[serde(rename_all = "camelCase")] pub struct SearchResults { /// Stores the individual serializable `SearchResult` struct into a vector of diff --git a/src/server/routes/search.rs b/src/server/routes/search.rs index ea78034..27949f5 100644 --- a/src/server/routes/search.rs +++ b/src/server/routes/search.rs @@ -192,12 +192,12 @@ async fn results( safe_search: u8, ) -> Result> { // fetch the cached results json. - let cached_results_json = cache.cached_json(&url).await.ok(); + let cached_results = cache.cached_json(&url).await; // check if fetched cache results was indeed fetched or it was an error and if so // handle the data accordingly. - match cached_results_json { - Some(results) => Ok(serde_json::from_str::(&results)?), - None => { + match cached_results { + Ok(results) => Ok(results), + Err(_) => { if safe_search == 4 { let mut results: SearchResults = SearchResults::default(); let mut _flag: bool = @@ -208,9 +208,7 @@ async fn results( results.set_disallowed(); results.add_style(&config.style); results.set_page_query(query); - cache - .cache_results(serde_json::to_string(&results)?, &url) - .await?; + cache.cache_results(results.clone(), &url).await?; return Ok(results); } } @@ -258,8 +256,7 @@ async fn results( results.set_filtered(); } results.add_style(&config.style); - let json_results = serde_json::to_string(&results)?; - cache.cache_results(json_results, &url).await?; + cache.cache_results(results.clone(), &url).await?; Ok(results) } }