From d33129c4c964dc2b64a37c616fff70bc23155806 Mon Sep 17 00:00:00 2001 From: neon_arch Date: Sun, 17 Sep 2023 19:56:48 +0300 Subject: [PATCH] =?UTF-8?q?=F0=9F=A7=B9=20chore:=20make=20clippy=20happy?= =?UTF-8?q?=20(#244)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/cache/cacher.rs | 56 ++++++++++++++++++++------------------- src/cache/error.rs | 14 +++++----- src/cache/redis_cacher.rs | 14 +++++----- tests/index.rs | 1 + 4 files changed, 44 insertions(+), 41 deletions(-) diff --git a/src/cache/cacher.rs b/src/cache/cacher.rs index 1ab10df..12f88ff 100644 --- a/src/cache/cacher.rs +++ b/src/cache/cacher.rs @@ -10,7 +10,7 @@ use tokio::sync::Mutex; use crate::{config::parser::Config, models::aggregation_models::SearchResults}; -use super::error::PoolError; +use super::error::CacheError; #[cfg(feature = "redis-cache")] use super::redis_cacher::RedisCache; @@ -42,25 +42,27 @@ impl Cache { /// It returns a newly initialized variant based on the feature enabled by the user. pub async fn build(_config: &Config) -> Self { #[cfg(all(feature = "redis-cache", feature = "memory-cache"))] - log::info!("Using a hybrid cache"); - #[cfg(all(feature = "redis-cache", feature = "memory-cache"))] - return Cache::new_hybrid( - RedisCache::new(&_config.redis_url, 5) - .await - .expect("Redis cache configured"), - ); + { + log::info!("Using a hybrid cache"); + Cache::new_hybrid( + RedisCache::new(&_config.redis_url, 5) + .await + .expect("Redis cache configured"), + ) + } #[cfg(all(feature = "redis-cache", not(feature = "memory-cache")))] - log::info!("Listening redis server on {}", &_config.redis_url); - #[cfg(all(feature = "redis-cache", not(feature = "memory-cache")))] - return Cache::new( - RedisCache::new(&_config.redis_url, 5) - .await - .expect("Redis cache configured"), - ); + { + log::info!("Listening redis server on {}", &_config.redis_url); + Cache::new( + RedisCache::new(&_config.redis_url, 5) + .await + .expect("Redis cache configured"), + ) + } #[cfg(all(feature = "memory-cache", not(feature = "redis-cache")))] { log::info!("Using an in-memory cache"); - return Cache::new_in_memory(); + Cache::new_in_memory() } #[cfg(not(any(feature = "memory-cache", feature = "redis-cache")))] { @@ -131,27 +133,27 @@ impl Cache { /// /// Returns the `SearchResults` from the cache if the program executes normally otherwise /// returns a `CacheError` if the results cannot be retrieved from the cache. - pub async fn cached_json(&mut self, _url: &str) -> Result> { + pub async fn cached_json(&mut self, _url: &str) -> Result> { match self { - Cache::Disabled => Err(Report::new(PoolError::MissingValue)), + Cache::Disabled => Err(Report::new(CacheError::MissingValue)), #[cfg(all(feature = "redis-cache", not(feature = "memory-cache")))] Cache::Redis(redis_cache) => { let json = redis_cache.cached_json(_url).await?; Ok(serde_json::from_str::(&json) - .map_err(|_| PoolError::SerializationError)?) + .map_err(|_| CacheError::SerializationError)?) } #[cfg(all(feature = "memory-cache", not(feature = "redis-cache")))] Cache::InMemory(in_memory) => match in_memory.get(&_url.to_string()) { Some(res) => Ok(res), - None => Err(Report::new(PoolError::MissingValue)), + None => Err(Report::new(CacheError::MissingValue)), }, #[cfg(all(feature = "redis-cache", feature = "memory-cache"))] Cache::Hybrid(redis_cache, in_memory) => match redis_cache.cached_json(_url).await { Ok(res) => Ok(serde_json::from_str::(&res) - .map_err(|_| PoolError::SerializationError)?), + .map_err(|_| CacheError::SerializationError)?), Err(_) => match in_memory.get(&_url.to_string()) { Some(res) => Ok(res), - None => Err(Report::new(PoolError::MissingValue)), + None => Err(Report::new(CacheError::MissingValue)), }, }, } @@ -174,13 +176,13 @@ impl Cache { &mut self, _search_results: &SearchResults, _url: &str, - ) -> Result<(), Report> { + ) -> Result<(), Report> { match self { Cache::Disabled => Ok(()), #[cfg(all(feature = "redis-cache", not(feature = "memory-cache")))] Cache::Redis(redis_cache) => { let json = serde_json::to_string(_search_results) - .map_err(|_| PoolError::SerializationError)?; + .map_err(|_| CacheError::SerializationError)?; redis_cache.cache_results(&json, _url).await } #[cfg(all(feature = "memory-cache", not(feature = "redis-cache")))] @@ -191,7 +193,7 @@ impl Cache { #[cfg(all(feature = "memory-cache", feature = "redis-cache"))] Cache::Hybrid(redis_cache, cache) => { let json = serde_json::to_string(_search_results) - .map_err(|_| PoolError::SerializationError)?; + .map_err(|_| CacheError::SerializationError)?; match redis_cache.cache_results(&json, _url).await { Ok(_) => Ok(()), Err(_) => { @@ -235,7 +237,7 @@ impl SharedCache { /// /// Returns a `SearchResults` struct containing the search results from the cache if nothing /// goes wrong otherwise returns a `CacheError`. - pub async fn cached_json(&self, url: &str) -> Result> { + pub async fn cached_json(&self, url: &str) -> Result> { let mut mut_cache = self.cache.lock().await; mut_cache.cached_json(url).await } @@ -258,7 +260,7 @@ impl SharedCache { &self, search_results: &SearchResults, url: &str, - ) -> Result<(), Report> { + ) -> Result<(), Report> { let mut mut_cache = self.cache.lock().await; mut_cache.cache_results(search_results, url).await } diff --git a/src/cache/error.rs b/src/cache/error.rs index 9efda32..62c9098 100644 --- a/src/cache/error.rs +++ b/src/cache/error.rs @@ -7,7 +7,7 @@ use redis::RedisError; /// A custom error type used for handling redis async pool associated errors. #[derive(Debug)] -pub enum PoolError { +pub enum CacheError { /// This variant handles all errors related to `RedisError`, #[cfg(feature = "redis-cache")] RedisError(RedisError), @@ -20,31 +20,31 @@ pub enum PoolError { MissingValue, } -impl fmt::Display for PoolError { +impl fmt::Display for CacheError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { #[cfg(feature = "redis-cache")] - PoolError::RedisError(redis_error) => { + CacheError::RedisError(redis_error) => { if let Some(detail) = redis_error.detail() { write!(f, "{}", detail) } else { write!(f, "") } } - PoolError::PoolExhaustionWithConnectionDropError => { + CacheError::PoolExhaustionWithConnectionDropError => { write!( f, "Error all connections from the pool dropped with connection error" ) } - PoolError::MissingValue => { + CacheError::MissingValue => { write!(f, "The value is missing from the cache") } - PoolError::SerializationError => { + CacheError::SerializationError => { write!(f, "Unable to serialize, deserialize from the cache") } } } } -impl error_stack::Context for PoolError {} +impl error_stack::Context for CacheError {} diff --git a/src/cache/redis_cacher.rs b/src/cache/redis_cacher.rs index 8c74bac..e90344f 100644 --- a/src/cache/redis_cacher.rs +++ b/src/cache/redis_cacher.rs @@ -6,7 +6,7 @@ use futures::future::try_join_all; use md5::compute; use redis::{aio::ConnectionManager, AsyncCommands, Client, RedisError}; -use super::error::PoolError; +use super::error::CacheError; /// A named struct which stores the redis Connection url address to which the client will /// connect to. @@ -72,7 +72,7 @@ impl RedisCache { /// /// Returns the results as a String from the cache on success otherwise returns a `CacheError` /// on a failure. - pub async fn cached_json(&mut self, url: &str) -> Result> { + pub async fn cached_json(&mut self, url: &str) -> Result> { self.current_connection = Default::default(); let hashed_url_string: &str = &self.hash_url(url); @@ -95,7 +95,7 @@ impl RedisCache { self.current_connection += 1; if self.current_connection == self.pool_size { return Err(Report::new( - PoolError::PoolExhaustionWithConnectionDropError, + CacheError::PoolExhaustionWithConnectionDropError, )); } result = self.connection_pool[self.current_connection as usize] @@ -103,7 +103,7 @@ impl RedisCache { .await; continue; } - false => return Err(Report::new(PoolError::RedisError(error))), + false => return Err(Report::new(CacheError::RedisError(error))), }, Ok(res) => return Ok(res), } @@ -127,7 +127,7 @@ impl RedisCache { &mut self, json_results: &str, url: &str, - ) -> Result<(), Report> { + ) -> Result<(), Report> { self.current_connection = Default::default(); let hashed_url_string: &str = &self.hash_url(url); @@ -150,7 +150,7 @@ impl RedisCache { self.current_connection += 1; if self.current_connection == self.pool_size { return Err(Report::new( - PoolError::PoolExhaustionWithConnectionDropError, + CacheError::PoolExhaustionWithConnectionDropError, )); } result = self.connection_pool[self.current_connection as usize] @@ -158,7 +158,7 @@ impl RedisCache { .await; continue; } - false => return Err(Report::new(PoolError::RedisError(error))), + false => return Err(Report::new(CacheError::RedisError(error))), }, Ok(_) => return Ok(()), } diff --git a/tests/index.rs b/tests/index.rs index ab56e1d..91d0814 100644 --- a/tests/index.rs +++ b/tests/index.rs @@ -12,6 +12,7 @@ fn spawn_app() -> String { let server = run( listener, config, + #[cfg(all(feature = "memory-cache", not(feature = "redis-cache")))] websurfx::cache::cacher::Cache::new_in_memory(), ) .expect("Failed to bind address");