mirror of
https://github.com/neon-mmd/websurfx.git
synced 2024-11-25 15:38:21 -05:00
Instead of caching jsons, we can cache the original structure
This commit is contained in:
parent
996ff84c5b
commit
519ebe0fd8
32
src/cache/cacher.rs
vendored
32
src/cache/cacher.rs
vendored
@ -6,6 +6,8 @@ use mini_moka::sync::Cache as MokaCache;
|
||||
use std::time::Duration;
|
||||
use tokio::sync::Mutex;
|
||||
|
||||
use crate::results::aggregation_models::SearchResults;
|
||||
|
||||
use super::{error::PoolError, redis_cacher::RedisCache};
|
||||
|
||||
/// Different implementations for caching, currently it is possible to cache in-memory or in Redis.
|
||||
@ -14,7 +16,7 @@ pub enum Cache {
|
||||
/// Encapsulates the Redis based cache
|
||||
Redis(RedisCache),
|
||||
/// Contains the in-memory cache.
|
||||
InMemory(MokaCache<String, String>),
|
||||
InMemory(MokaCache<String, SearchResults>),
|
||||
}
|
||||
|
||||
impl Cache {
|
||||
@ -37,9 +39,13 @@ impl Cache {
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `url` - It takes an url as a string.
|
||||
pub async fn cached_json(&mut self, url: &str) -> Result<String, Report<PoolError>> {
|
||||
pub async fn cached_json(&mut self, url: &str) -> Result<SearchResults, Report<PoolError>> {
|
||||
match self {
|
||||
Cache::Redis(redis_cache) => redis_cache.cached_json(url).await,
|
||||
Cache::Redis(redis_cache) => {
|
||||
let json = redis_cache.cached_json(url).await?;
|
||||
Ok(serde_json::from_str::<SearchResults>(&json)
|
||||
.map_err(|_| PoolError::SerializationError)?)
|
||||
}
|
||||
Cache::InMemory(in_memory) => match in_memory.get(&url.to_string()) {
|
||||
Some(res) => Ok(res),
|
||||
None => Err(Report::new(PoolError::MissingValue)),
|
||||
@ -56,13 +62,17 @@ impl Cache {
|
||||
/// * `url` - It takes the url as a String.
|
||||
pub async fn cache_results(
|
||||
&mut self,
|
||||
json_results: String,
|
||||
search_results: SearchResults,
|
||||
url: &str,
|
||||
) -> Result<(), Report<PoolError>> {
|
||||
match self {
|
||||
Cache::Redis(redis_cache) => redis_cache.cache_results(&json_results, url).await,
|
||||
Cache::Redis(redis_cache) => {
|
||||
let json = serde_json::to_string(&search_results)
|
||||
.map_err(|_| PoolError::SerializationError)?;
|
||||
redis_cache.cache_results(&json, url).await
|
||||
}
|
||||
Cache::InMemory(cache) => {
|
||||
cache.insert(url.to_string(), json_results);
|
||||
cache.insert(url.to_string(), search_results);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@ -82,20 +92,20 @@ impl SharedCache {
|
||||
}
|
||||
}
|
||||
|
||||
/// A function which fetches the cached json results as json string.
|
||||
pub async fn cached_json(&self, url: &str) -> Result<String, Report<PoolError>> {
|
||||
/// A function which retrieves the cached SearchResulsts from the internal cache.
|
||||
pub async fn cached_json(&self, url: &str) -> Result<SearchResults, Report<PoolError>> {
|
||||
let mut mut_cache = self.cache.lock().await;
|
||||
mut_cache.cached_json(url).await
|
||||
}
|
||||
|
||||
/// A function which caches the results by using the `url` as the key and
|
||||
/// `json results` as the value and stores it in the cache
|
||||
/// `SearchResults` as the value.
|
||||
pub async fn cache_results(
|
||||
&self,
|
||||
json_results: String,
|
||||
search_results: SearchResults,
|
||||
url: &str,
|
||||
) -> Result<(), Report<PoolError>> {
|
||||
let mut mut_cache = self.cache.lock().await;
|
||||
mut_cache.cache_results(json_results, url).await
|
||||
mut_cache.cache_results(search_results, url).await
|
||||
}
|
||||
}
|
||||
|
4
src/cache/error.rs
vendored
4
src/cache/error.rs
vendored
@ -12,6 +12,7 @@ pub enum PoolError {
|
||||
/// This variant handles the errors which occurs when all the connections
|
||||
/// in the connection pool return a connection dropped redis error.
|
||||
PoolExhaustionWithConnectionDropError,
|
||||
SerializationError,
|
||||
MissingValue,
|
||||
}
|
||||
|
||||
@ -34,6 +35,9 @@ impl fmt::Display for PoolError {
|
||||
PoolError::MissingValue => {
|
||||
write!(f, "The value is missing from the cache")
|
||||
}
|
||||
PoolError::SerializationError => {
|
||||
write!(f, "Unable to serialize, deserialize from the cache")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -44,7 +44,8 @@ use handler::paths::{file_path, FileType};
|
||||
///
|
||||
/// let config = Config::parse(true).unwrap();
|
||||
/// let listener = TcpListener::bind("127.0.0.1:8080").expect("Failed to bind address");
|
||||
/// let server = run(listener,config).expect("Failed to start server");
|
||||
/// let cache = Cache::new_in_memory();
|
||||
/// let server = run(listener,config,cache).expect("Failed to start server");
|
||||
/// ```
|
||||
pub fn run(listener: TcpListener, config: Config, cache: Cache) -> std::io::Result<Server> {
|
||||
let mut handlebars: Handlebars<'_> = Handlebars::new();
|
||||
|
@ -102,7 +102,7 @@ impl EngineErrorInfo {
|
||||
/// A named struct to store, serialize, deserialize the all the search results scraped and
|
||||
/// aggregated from the upstream search engines.
|
||||
/// `SearchResult` structs.
|
||||
#[derive(Serialize, Deserialize, Default)]
|
||||
#[derive(Serialize, Deserialize, Default, Clone)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SearchResults {
|
||||
/// Stores the individual serializable `SearchResult` struct into a vector of
|
||||
|
@ -192,12 +192,12 @@ async fn results(
|
||||
safe_search: u8,
|
||||
) -> Result<SearchResults, Box<dyn std::error::Error>> {
|
||||
// fetch the cached results json.
|
||||
let cached_results_json = cache.cached_json(&url).await.ok();
|
||||
let cached_results = cache.cached_json(&url).await;
|
||||
// check if fetched cache results was indeed fetched or it was an error and if so
|
||||
// handle the data accordingly.
|
||||
match cached_results_json {
|
||||
Some(results) => Ok(serde_json::from_str::<SearchResults>(&results)?),
|
||||
None => {
|
||||
match cached_results {
|
||||
Ok(results) => Ok(results),
|
||||
Err(_) => {
|
||||
if safe_search == 4 {
|
||||
let mut results: SearchResults = SearchResults::default();
|
||||
let mut _flag: bool =
|
||||
@ -208,9 +208,7 @@ async fn results(
|
||||
results.set_disallowed();
|
||||
results.add_style(&config.style);
|
||||
results.set_page_query(query);
|
||||
cache
|
||||
.cache_results(serde_json::to_string(&results)?, &url)
|
||||
.await?;
|
||||
cache.cache_results(results.clone(), &url).await?;
|
||||
return Ok(results);
|
||||
}
|
||||
}
|
||||
@ -258,8 +256,7 @@ async fn results(
|
||||
results.set_filtered();
|
||||
}
|
||||
results.add_style(&config.style);
|
||||
let json_results = serde_json::to_string(&results)?;
|
||||
cache.cache_results(json_results, &url).await?;
|
||||
cache.cache_results(results.clone(), &url).await?;
|
||||
Ok(results)
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user