0
0
mirror of https://github.com/neon-mmd/websurfx.git synced 2024-11-22 05:58:21 -05:00

Make the cache types compile time feature flags too, and make it more configurable!

This commit is contained in:
Zsombor Gegesy 2023-09-12 22:14:54 +02:00
parent 320f5f4720
commit 76795c43cc
7 changed files with 56 additions and 15 deletions

View File

@ -20,7 +20,7 @@ fake-useragent = {version="0.1.3"}
env_logger = {version="0.10.0"} env_logger = {version="0.10.0"}
log = {version="0.4.20"} log = {version="0.4.20"}
mlua = {version="0.8.10", features=["luajit"]} mlua = {version="0.8.10", features=["luajit"]}
redis = {version="0.23.3", features=["tokio-comp","connection-manager"]} redis = {version="0.23.3", features=["tokio-comp","connection-manager"], optional = true}
md5 = {version="0.7.0"} md5 = {version="0.7.0"}
rand={version="0.8.5"} rand={version="0.8.5"}
once_cell = {version="1.18.0"} once_cell = {version="1.18.0"}
@ -33,7 +33,7 @@ dhat = {version="0.3.2", optional = true}
mimalloc = { version = "0.1.38", default-features = false } mimalloc = { version = "0.1.38", default-features = false }
async-once-cell = {version="0.5.3"} async-once-cell = {version="0.5.3"}
actix-governor = {version="0.4.1"} actix-governor = {version="0.4.1"}
mini-moka = "0.10" mini-moka = { version="0.10", optional = true}
[dev-dependencies] [dev-dependencies]
rusty-hook = "^0.11.2" rusty-hook = "^0.11.2"
@ -67,4 +67,7 @@ rpath = false
strip = "debuginfo" strip = "debuginfo"
[features] [features]
default = ["in_memory_cache", "redis"]
dhat-heap = ["dep:dhat"] dhat-heap = ["dep:dhat"]
in_memory_cache = ["dep:mini-moka"]
redis = ["dep:redis"]

View File

@ -5,9 +5,7 @@
use mimalloc::MiMalloc; use mimalloc::MiMalloc;
use std::net::TcpListener; use std::net::TcpListener;
use websurfx::{ use websurfx::{cache::cacher::Cache, config::parser::Config, run};
cache::cacher::Cache, cache::redis_cacher::RedisCache, config::parser::Config, run,
};
/// A dhat heap memory profiler /// A dhat heap memory profiler
#[cfg(feature = "dhat-heap")] #[cfg(feature = "dhat-heap")]
@ -32,14 +30,8 @@ async fn main() -> std::io::Result<()> {
// Initialize the parsed config file. // Initialize the parsed config file.
let config = Config::parse(false).unwrap(); let config = Config::parse(false).unwrap();
let cache = match &config.redis_url {
Some(url) => Cache::new( let cache = Cache::build(&config).await;
RedisCache::new(url, 5)
.await
.expect("Redis cache configured"),
),
None => Cache::new_in_memory(),
};
log::info!( log::info!(
"started server on port {} and IP {}", "started server on port {} and IP {}",

39
src/cache/cacher.rs vendored
View File

@ -2,30 +2,59 @@
//! from the upstream search engines in a json format. //! from the upstream search engines in a json format.
use error_stack::Report; use error_stack::Report;
#[cfg(feature = "in_memory_cache")]
use mini_moka::sync::Cache as MokaCache; use mini_moka::sync::Cache as MokaCache;
use std::time::Duration; use std::time::Duration;
use tokio::sync::Mutex; use tokio::sync::Mutex;
use crate::results::aggregation_models::SearchResults; use crate::{config::parser::Config, results::aggregation_models::SearchResults};
use super::{error::PoolError, redis_cacher::RedisCache}; use super::error::PoolError;
#[cfg(feature = "redis")]
use super::redis_cacher::RedisCache;
/// Different implementations for caching, currently it is possible to cache in-memory or in Redis. /// Different implementations for caching, currently it is possible to cache in-memory or in Redis.
#[derive(Clone)] #[derive(Clone)]
pub enum Cache { pub enum Cache {
/// Caching is disabled
Disabled,
#[cfg(feature = "redis")]
/// Encapsulates the Redis based cache /// Encapsulates the Redis based cache
Redis(RedisCache), Redis(RedisCache),
#[cfg(feature = "in_memory_cache")]
/// Contains the in-memory cache. /// Contains the in-memory cache.
InMemory(MokaCache<String, SearchResults>), InMemory(MokaCache<String, SearchResults>),
} }
impl Cache { impl Cache {
/// Builds the cache from the given configuration.
pub async fn build(config: &Config) -> Self {
#[cfg(feature = "redis")]
if let Some(url) = &config.redis_url {
log::info!("Using Redis running at {} for caching", &url);
return Cache::new(
RedisCache::new(url, 5)
.await
.expect("Redis cache configured"),
);
}
#[cfg(feature = "in_memory_cache")]
if config.in_memory_cache {
log::info!("Using an in-memory cache");
return Cache::new_in_memory();
}
log::info!("Caching is disabled");
Cache::Disabled
}
/// Creates a new cache, which wraps the given RedisCache. /// Creates a new cache, which wraps the given RedisCache.
#[cfg(feature = "redis")]
pub fn new(redis_cache: RedisCache) -> Self { pub fn new(redis_cache: RedisCache) -> Self {
Cache::Redis(redis_cache) Cache::Redis(redis_cache)
} }
/// Creates an in-memory cache /// Creates an in-memory cache
#[cfg(feature = "in_memory_cache")]
pub fn new_in_memory() -> Self { pub fn new_in_memory() -> Self {
let cache = MokaCache::builder() let cache = MokaCache::builder()
.max_capacity(1000) .max_capacity(1000)
@ -41,11 +70,14 @@ impl Cache {
/// * `url` - It takes an url as a string. /// * `url` - It takes an url as a string.
pub async fn cached_json(&mut self, url: &str) -> Result<SearchResults, Report<PoolError>> { pub async fn cached_json(&mut self, url: &str) -> Result<SearchResults, Report<PoolError>> {
match self { match self {
Cache::Disabled => Err(Report::new(PoolError::MissingValue)),
#[cfg(feature = "redis")]
Cache::Redis(redis_cache) => { Cache::Redis(redis_cache) => {
let json = redis_cache.cached_json(url).await?; let json = redis_cache.cached_json(url).await?;
Ok(serde_json::from_str::<SearchResults>(&json) Ok(serde_json::from_str::<SearchResults>(&json)
.map_err(|_| PoolError::SerializationError)?) .map_err(|_| PoolError::SerializationError)?)
} }
#[cfg(feature = "in_memory_cache")]
Cache::InMemory(in_memory) => match in_memory.get(&url.to_string()) { Cache::InMemory(in_memory) => match in_memory.get(&url.to_string()) {
Some(res) => Ok(res), Some(res) => Ok(res),
None => Err(Report::new(PoolError::MissingValue)), None => Err(Report::new(PoolError::MissingValue)),
@ -66,11 +98,14 @@ impl Cache {
url: &str, url: &str,
) -> Result<(), Report<PoolError>> { ) -> Result<(), Report<PoolError>> {
match self { match self {
Cache::Disabled => Ok(()),
#[cfg(feature = "redis")]
Cache::Redis(redis_cache) => { Cache::Redis(redis_cache) => {
let json = serde_json::to_string(search_results) let json = serde_json::to_string(search_results)
.map_err(|_| PoolError::SerializationError)?; .map_err(|_| PoolError::SerializationError)?;
redis_cache.cache_results(&json, url).await redis_cache.cache_results(&json, url).await
} }
#[cfg(feature = "in_memory_cache")]
Cache::InMemory(cache) => { Cache::InMemory(cache) => {
cache.insert(url.to_string(), search_results.clone()); cache.insert(url.to_string(), search_results.clone());
Ok(()) Ok(())

3
src/cache/error.rs vendored
View File

@ -2,12 +2,14 @@
//! the redis server using an async connection pool. //! the redis server using an async connection pool.
use std::fmt; use std::fmt;
#[cfg(feature = "redis")]
use redis::RedisError; use redis::RedisError;
/// A custom error type used for handling redis async pool associated errors. /// A custom error type used for handling redis async pool associated errors.
#[derive(Debug)] #[derive(Debug)]
pub enum PoolError { pub enum PoolError {
/// This variant handles all errors related to `RedisError`, /// This variant handles all errors related to `RedisError`,
#[cfg(feature = "redis")]
RedisError(RedisError), RedisError(RedisError),
/// This variant handles the errors which occurs when all the connections /// This variant handles the errors which occurs when all the connections
/// in the connection pool return a connection dropped redis error. /// in the connection pool return a connection dropped redis error.
@ -19,6 +21,7 @@ pub enum PoolError {
impl fmt::Display for PoolError { impl fmt::Display for PoolError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self { match self {
#[cfg(feature = "redis")]
PoolError::RedisError(redis_error) => { PoolError::RedisError(redis_error) => {
if let Some(detail) = redis_error.detail() { if let Some(detail) = redis_error.detail() {
write!(f, "{}", detail) write!(f, "{}", detail)

1
src/cache/mod.rs vendored
View File

@ -3,4 +3,5 @@
pub mod cacher; pub mod cacher;
pub mod error; pub mod error;
#[cfg(feature = "redis")]
pub mod redis_cacher; pub mod redis_cacher;

View File

@ -20,6 +20,8 @@ pub struct Config {
/// It stores the redis connection url address on which the redis /// It stores the redis connection url address on which the redis
/// client should connect. /// client should connect.
pub redis_url: Option<String>, pub redis_url: Option<String>,
/// enable/disable the in-memory cache. Only checked, when no redis_url is provided.
pub in_memory_cache: bool,
/// It stores the option to whether enable or disable production use. /// It stores the option to whether enable or disable production use.
pub aggregator: AggregatorConfig, pub aggregator: AggregatorConfig,
/// It stores the option to whether enable or disable logs. /// It stores the option to whether enable or disable logs.
@ -100,6 +102,10 @@ impl Config {
globals.get::<_, String>("colorscheme")?, globals.get::<_, String>("colorscheme")?,
), ),
redis_url: globals.get::<_, String>("redis_url").ok(), redis_url: globals.get::<_, String>("redis_url").ok(),
in_memory_cache: globals
.get::<_, bool>("in_memory_cache")
.ok()
.unwrap_or(false),
aggregator: AggregatorConfig { aggregator: AggregatorConfig {
random_delay: globals.get::<_, bool>("production_use")?, random_delay: globals.get::<_, bool>("production_use")?,
}, },

View File

@ -47,6 +47,7 @@ theme = "simple" -- the theme name which should be used for the website
-- ### Caching ### -- ### Caching ###
redis_url = "redis://127.0.0.1:8082" -- redis connection url address on which the client should connect on. redis_url = "redis://127.0.0.1:8082" -- redis connection url address on which the client should connect on.
in_memory_cache = true
-- ### Search Engines ### -- ### Search Engines ###
upstream_search_engines = { upstream_search_engines = {