0
0
mirror of https://github.com/neon-mmd/websurfx.git synced 2024-11-21 21:48:21 -05:00

Merge pull request #22 from XFFXFF/refactor

refactor RedisCache to create fewer connections
This commit is contained in:
neon_arch 2023-05-15 07:57:52 +00:00 committed by GitHub
commit d54270f892
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 23 additions and 27 deletions

2
.gitignore vendored
View File

@ -1 +1,3 @@
/target /target
dump.rdb

36
src/cache/cacher.rs vendored
View File

@ -10,9 +10,8 @@ use redis::{Client, Commands, Connection};
/// # Fields /// # Fields
/// ///
/// * `redis_connection_url` - It stores the redis Connection url address. /// * `redis_connection_url` - It stores the redis Connection url address.
#[derive(Clone)]
pub struct RedisCache { pub struct RedisCache {
redis_connection_url: String, connection: Connection,
} }
impl RedisCache { impl RedisCache {
@ -21,10 +20,11 @@ impl RedisCache {
/// # Arguments /// # Arguments
/// ///
/// * `redis_connection_url` - It stores the redis Connection url address. /// * `redis_connection_url` - It stores the redis Connection url address.
pub fn new(redis_connection_url: String) -> Self { pub fn new(redis_connection_url: String) -> Result<Self, Box<dyn std::error::Error>> {
RedisCache { let client = Client::open(redis_connection_url)?;
redis_connection_url, let connection = client.get_connection()?;
} let redis_cache = RedisCache { connection };
Ok(redis_cache)
} }
/// A helper function which computes the hash of the url and formats and returns it as string. /// A helper function which computes the hash of the url and formats and returns it as string.
@ -32,7 +32,7 @@ impl RedisCache {
/// # Arguments /// # Arguments
/// ///
/// * `url` - It takes an url as string. /// * `url` - It takes an url as string.
fn compute_url_hash(self, url: &str) -> String { fn compute_url_hash(url: &str) -> String {
format!("{:?}", compute(url)) format!("{:?}", compute(url))
} }
@ -41,11 +41,9 @@ impl RedisCache {
/// # Arguments /// # Arguments
/// ///
/// * `url` - It takes an url as a string. /// * `url` - It takes an url as a string.
pub fn cached_results_json(self, url: String) -> Result<String, Box<dyn std::error::Error>> { pub fn cached_results_json(&mut self, url: &str) -> Result<String, Box<dyn std::error::Error>> {
let hashed_url_string = self.clone().compute_url_hash(&url); let hashed_url_string = Self::compute_url_hash(url);
let mut redis_connection: Connection = Ok(self.connection.get(hashed_url_string)?)
Client::open(self.redis_connection_url)?.get_connection()?;
Ok(redis_connection.get(hashed_url_string)?)
} }
/// A function which caches the results by using the hashed `url` as the key and /// A function which caches the results by using the hashed `url` as the key and
@ -57,20 +55,18 @@ impl RedisCache {
/// * `json_results` - It takes the json results string as an argument. /// * `json_results` - It takes the json results string as an argument.
/// * `url` - It takes the url as a String. /// * `url` - It takes the url as a String.
pub fn cache_results( pub fn cache_results(
self, &mut self,
json_results: String, json_results: String,
url: String, url: &str,
) -> Result<(), Box<dyn std::error::Error>> { ) -> Result<(), Box<dyn std::error::Error>> {
let hashed_url_string = self.clone().compute_url_hash(&url); let hashed_url_string = Self::compute_url_hash(url);
let mut redis_connection: Connection =
Client::open(self.redis_connection_url)?.get_connection()?;
// put results_json into cache // put results_json into cache
redis_connection.set(hashed_url_string.clone(), json_results)?; self.connection.set(&hashed_url_string, json_results)?;
// Set the TTL for the key to 60 seconds // Set the TTL for the key to 60 seconds
redis_connection self.connection
.expire::<String, u32>(hashed_url_string.clone(), 60) .expire::<String, u32>(hashed_url_string, 60)
.unwrap(); .unwrap();
Ok(()) Ok(())

View File

@ -73,7 +73,7 @@ pub async fn search(
let params = web::Query::<SearchParams>::from_query(req.query_string())?; let params = web::Query::<SearchParams>::from_query(req.query_string())?;
//Initialize redis cache connection struct //Initialize redis cache connection struct
let redis_cache = RedisCache::new(config.redis_connection_url.clone()); let mut redis_cache = RedisCache::new(config.redis_connection_url.clone())?;
match &params.q { match &params.q {
Some(query) => { Some(query) => {
if query.trim().is_empty() { if query.trim().is_empty() {
@ -117,7 +117,7 @@ pub async fn search(
}; };
// fetch the cached results json. // fetch the cached results json.
let cached_results_json = redis_cache.clone().cached_results_json(page_url.clone()); let cached_results_json = redis_cache.cached_results_json(&page_url);
// check if fetched results was indeed fetched or it was an error and if so // check if fetched results was indeed fetched or it was an error and if so
// handle the data accordingly. // handle the data accordingly.
match cached_results_json { match cached_results_json {
@ -128,12 +128,10 @@ pub async fn search(
} }
Err(_) => { Err(_) => {
let mut results_json: crate::search_results_handler::aggregation_models::SearchResults = let mut results_json: crate::search_results_handler::aggregation_models::SearchResults =
aggregate(query, page).await?; aggregate(query, page).await?;
results_json.add_style(config.style.clone()); results_json.add_style(config.style.clone());
redis_cache.clone().cache_results( redis_cache
serde_json::to_string(&results_json)?, .cache_results(serde_json::to_string(&results_json)?, &page_url)?;
page_url.clone(),
)?;
let page_content: String = hbs.render("search", &results_json)?; let page_content: String = hbs.render("search", &results_json)?;
Ok(HttpResponse::Ok().body(page_content)) Ok(HttpResponse::Ok().body(page_content))
} }