mirror of
https://github.com/neon-mmd/websurfx.git
synced 2024-11-21 13:38:21 -05:00
Merge pull request #245 from neon-mmd/fix-hybrid-cache-implementation
🛠️ Proper working implementation for the `hybrid` caching feature
This commit is contained in:
commit
79034eac6e
38
Cargo.lock
generated
38
Cargo.lock
generated
@ -115,7 +115,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "e01ed3140b2f8d422c68afa1ed2e85d996ea619c988ac834d255db32138655cb"
|
checksum = "e01ed3140b2f8d422c68afa1ed2e85d996ea619c988ac834d255db32138655cb"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"quote 1.0.33",
|
"quote 1.0.33",
|
||||||
"syn 2.0.33",
|
"syn 2.0.36",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -228,7 +228,7 @@ dependencies = [
|
|||||||
"actix-router",
|
"actix-router",
|
||||||
"proc-macro2 1.0.67",
|
"proc-macro2 1.0.67",
|
||||||
"quote 1.0.33",
|
"quote 1.0.33",
|
||||||
"syn 2.0.33",
|
"syn 2.0.36",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -326,7 +326,7 @@ checksum = "bc00ceb34980c03614e35a3a4e218276a0a824e911d07651cd0d858a51e8c0f0"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.67",
|
"proc-macro2 1.0.67",
|
||||||
"quote 1.0.33",
|
"quote 1.0.33",
|
||||||
"syn 2.0.33",
|
"syn 2.0.36",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -863,7 +863,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "13b588ba4ac1a99f7f2964d24b3d896ddc6bf847ee3855dbd4366f058cfcd331"
|
checksum = "13b588ba4ac1a99f7f2964d24b3d896ddc6bf847ee3855dbd4366f058cfcd331"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"quote 1.0.33",
|
"quote 1.0.33",
|
||||||
"syn 2.0.33",
|
"syn 2.0.36",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -1218,7 +1218,7 @@ checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.67",
|
"proc-macro2 1.0.67",
|
||||||
"quote 1.0.33",
|
"quote 1.0.33",
|
||||||
"syn 2.0.33",
|
"syn 2.0.36",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -2093,7 +2093,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.67",
|
"proc-macro2 1.0.67",
|
||||||
"quote 1.0.33",
|
"quote 1.0.33",
|
||||||
"syn 2.0.33",
|
"syn 2.0.36",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -2212,7 +2212,7 @@ dependencies = [
|
|||||||
"pest_meta",
|
"pest_meta",
|
||||||
"proc-macro2 1.0.67",
|
"proc-macro2 1.0.67",
|
||||||
"quote 1.0.33",
|
"quote 1.0.33",
|
||||||
"syn 2.0.33",
|
"syn 2.0.36",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -2314,7 +2314,7 @@ dependencies = [
|
|||||||
"phf_shared 0.11.2",
|
"phf_shared 0.11.2",
|
||||||
"proc-macro2 1.0.67",
|
"proc-macro2 1.0.67",
|
||||||
"quote 1.0.33",
|
"quote 1.0.33",
|
||||||
"syn 2.0.33",
|
"syn 2.0.36",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -2361,7 +2361,7 @@ checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.67",
|
"proc-macro2 1.0.67",
|
||||||
"quote 1.0.33",
|
"quote 1.0.33",
|
||||||
"syn 2.0.33",
|
"syn 2.0.36",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -3005,7 +3005,7 @@ checksum = "4eca7ac642d82aa35b60049a6eccb4be6be75e599bd2e9adb5f875a737654af2"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.67",
|
"proc-macro2 1.0.67",
|
||||||
"quote 1.0.33",
|
"quote 1.0.33",
|
||||||
"syn 2.0.33",
|
"syn 2.0.36",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -3262,9 +3262,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "syn"
|
name = "syn"
|
||||||
version = "2.0.33"
|
version = "2.0.36"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "9caece70c63bfba29ec2fed841a09851b14a235c60010fa4de58089b6c025668"
|
checksum = "91e02e55d62894af2a08aca894c6577281f76769ba47c94d5756bec8ac6e7373"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.67",
|
"proc-macro2 1.0.67",
|
||||||
"quote 1.0.33",
|
"quote 1.0.33",
|
||||||
@ -3349,7 +3349,7 @@ checksum = "49922ecae66cc8a249b77e68d1d0623c1b2c514f0060c27cdc68bd62a1219d35"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.67",
|
"proc-macro2 1.0.67",
|
||||||
"quote 1.0.33",
|
"quote 1.0.33",
|
||||||
"syn 2.0.33",
|
"syn 2.0.36",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -3509,7 +3509,7 @@ checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.67",
|
"proc-macro2 1.0.67",
|
||||||
"quote 1.0.33",
|
"quote 1.0.33",
|
||||||
"syn 2.0.33",
|
"syn 2.0.36",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -3678,9 +3678,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "typenum"
|
name = "typenum"
|
||||||
version = "1.16.0"
|
version = "1.17.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba"
|
checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ucd-trie"
|
name = "ucd-trie"
|
||||||
@ -3848,7 +3848,7 @@ dependencies = [
|
|||||||
"once_cell",
|
"once_cell",
|
||||||
"proc-macro2 1.0.67",
|
"proc-macro2 1.0.67",
|
||||||
"quote 1.0.33",
|
"quote 1.0.33",
|
||||||
"syn 2.0.33",
|
"syn 2.0.36",
|
||||||
"wasm-bindgen-shared",
|
"wasm-bindgen-shared",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -3882,7 +3882,7 @@ checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.67",
|
"proc-macro2 1.0.67",
|
||||||
"quote 1.0.33",
|
"quote 1.0.33",
|
||||||
"syn 2.0.33",
|
"syn 2.0.36",
|
||||||
"wasm-bindgen-backend",
|
"wasm-bindgen-backend",
|
||||||
"wasm-bindgen-shared",
|
"wasm-bindgen-shared",
|
||||||
]
|
]
|
||||||
@ -3905,7 +3905,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "websurfx"
|
name = "websurfx"
|
||||||
version = "0.20.11"
|
version = "0.21.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"actix-cors",
|
"actix-cors",
|
||||||
"actix-files",
|
"actix-files",
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "websurfx"
|
name = "websurfx"
|
||||||
version = "0.20.11"
|
version = "0.21.1"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
description = "An open-source alternative to Searx that provides clean, ad-free, and organic results with incredible speed while keeping privacy and security in mind."
|
description = "An open-source alternative to Searx that provides clean, ad-free, and organic results with incredible speed while keeping privacy and security in mind."
|
||||||
repository = "https://github.com/neon-mmd/websurfx"
|
repository = "https://github.com/neon-mmd/websurfx"
|
||||||
@ -71,4 +71,3 @@ default = ["memory-cache"]
|
|||||||
dhat-heap = ["dep:dhat"]
|
dhat-heap = ["dep:dhat"]
|
||||||
memory-cache = ["dep:mini-moka"]
|
memory-cache = ["dep:mini-moka"]
|
||||||
redis-cache = ["dep:redis"]
|
redis-cache = ["dep:redis"]
|
||||||
hybrid-cache = ["memory-cache", "redis-cache"]
|
|
||||||
|
194
src/cache/cacher.rs
vendored
194
src/cache/cacher.rs
vendored
@ -10,7 +10,7 @@ use tokio::sync::Mutex;
|
|||||||
|
|
||||||
use crate::{config::parser::Config, models::aggregation_models::SearchResults};
|
use crate::{config::parser::Config, models::aggregation_models::SearchResults};
|
||||||
|
|
||||||
use super::error::PoolError;
|
use super::error::CacheError;
|
||||||
#[cfg(feature = "redis-cache")]
|
#[cfg(feature = "redis-cache")]
|
||||||
use super::redis_cacher::RedisCache;
|
use super::redis_cacher::RedisCache;
|
||||||
|
|
||||||
@ -19,46 +19,80 @@ use super::redis_cacher::RedisCache;
|
|||||||
pub enum Cache {
|
pub enum Cache {
|
||||||
/// Caching is disabled
|
/// Caching is disabled
|
||||||
Disabled,
|
Disabled,
|
||||||
#[cfg(feature = "redis-cache")]
|
#[cfg(all(feature = "redis-cache", not(feature = "memory-cache")))]
|
||||||
/// Encapsulates the Redis based cache
|
/// Encapsulates the Redis based cache
|
||||||
Redis(RedisCache),
|
Redis(RedisCache),
|
||||||
#[cfg(feature = "memory-cache")]
|
#[cfg(all(feature = "memory-cache", not(feature = "redis-cache")))]
|
||||||
/// Contains the in-memory cache.
|
/// Contains the in-memory cache.
|
||||||
InMemory(MokaCache<String, SearchResults>),
|
InMemory(MokaCache<String, SearchResults>),
|
||||||
|
#[cfg(all(feature = "redis-cache", feature = "memory-cache"))]
|
||||||
|
/// Contains both the in-memory cache and Redis based cache
|
||||||
|
Hybrid(RedisCache, MokaCache<String, SearchResults>),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Cache {
|
impl Cache {
|
||||||
/// Builds the cache from the given configuration.
|
/// A function that builds the cache from the given configuration.
|
||||||
|
///
|
||||||
|
/// # Arguments
|
||||||
|
///
|
||||||
|
/// * `config` - It takes the config struct as an argument.
|
||||||
|
///
|
||||||
|
/// # Returns
|
||||||
|
///
|
||||||
|
/// It returns a newly initialized variant based on the feature enabled by the user.
|
||||||
pub async fn build(_config: &Config) -> Self {
|
pub async fn build(_config: &Config) -> Self {
|
||||||
#[cfg(feature = "redis-cache")]
|
#[cfg(all(feature = "redis-cache", feature = "memory-cache"))]
|
||||||
if let Some(url) = &_config.redis_url {
|
{
|
||||||
log::info!("Using Redis running at {} for caching", &url);
|
log::info!("Using a hybrid cache");
|
||||||
return Cache::new(
|
Cache::new_hybrid(
|
||||||
RedisCache::new(url, 5)
|
RedisCache::new(&_config.redis_url, 5)
|
||||||
.await
|
.await
|
||||||
.expect("Redis cache configured"),
|
.expect("Redis cache configured"),
|
||||||
);
|
)
|
||||||
}
|
}
|
||||||
#[cfg(feature = "memory-cache")]
|
#[cfg(all(feature = "redis-cache", not(feature = "memory-cache")))]
|
||||||
|
{
|
||||||
|
log::info!("Listening redis server on {}", &_config.redis_url);
|
||||||
|
Cache::new(
|
||||||
|
RedisCache::new(&_config.redis_url, 5)
|
||||||
|
.await
|
||||||
|
.expect("Redis cache configured"),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
#[cfg(all(feature = "memory-cache", not(feature = "redis-cache")))]
|
||||||
{
|
{
|
||||||
log::info!("Using an in-memory cache");
|
log::info!("Using an in-memory cache");
|
||||||
return Cache::new_in_memory();
|
Cache::new_in_memory()
|
||||||
}
|
}
|
||||||
#[cfg(not(feature = "memory-cache"))]
|
#[cfg(not(any(feature = "memory-cache", feature = "redis-cache")))]
|
||||||
{
|
{
|
||||||
log::info!("Caching is disabled");
|
log::info!("Caching is disabled");
|
||||||
Cache::Disabled
|
Cache::Disabled
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Creates a new cache, which wraps the given RedisCache.
|
/// A function that initializes a new connection pool struct.
|
||||||
#[cfg(feature = "redis-cache")]
|
///
|
||||||
|
/// # Arguments
|
||||||
|
///
|
||||||
|
/// * `redis_cache` - It takes the newly initialized connection pool struct as an argument.
|
||||||
|
///
|
||||||
|
/// # Returns
|
||||||
|
///
|
||||||
|
/// It returns a `Redis` variant with the newly initialized connection pool struct.
|
||||||
|
#[cfg(all(feature = "redis-cache", not(feature = "memory-cache")))]
|
||||||
pub fn new(redis_cache: RedisCache) -> Self {
|
pub fn new(redis_cache: RedisCache) -> Self {
|
||||||
Cache::Redis(redis_cache)
|
Cache::Redis(redis_cache)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Creates an in-memory cache
|
/// A function that initializes the `in memory` cache which is used to cache the results in
|
||||||
#[cfg(feature = "memory-cache")]
|
/// memory with the search engine thus improving performance by making retrieval and caching of
|
||||||
|
/// results faster.
|
||||||
|
///
|
||||||
|
/// # Returns
|
||||||
|
///
|
||||||
|
/// It returns a `InMemory` variant with the newly initialized in memory cache type.
|
||||||
|
#[cfg(all(feature = "memory-cache", not(feature = "redis-cache")))]
|
||||||
pub fn new_in_memory() -> Self {
|
pub fn new_in_memory() -> Self {
|
||||||
let cache = MokaCache::builder()
|
let cache = MokaCache::builder()
|
||||||
.max_capacity(1000)
|
.max_capacity(1000)
|
||||||
@ -67,24 +101,60 @@ impl Cache {
|
|||||||
Cache::InMemory(cache)
|
Cache::InMemory(cache)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A function that initializes both in memory cache and redis client connection for being used
|
||||||
|
/// for managing hybrid cache which increases resiliancy of the search engine by allowing the
|
||||||
|
/// cache to switch to `in memory` caching if the `redis` cache server is temporarily
|
||||||
|
/// unavailable.
|
||||||
|
///
|
||||||
|
/// # Arguments
|
||||||
|
///
|
||||||
|
/// * `redis_cache` - It takes `redis` client connection struct as an argument.
|
||||||
|
///
|
||||||
|
/// # Returns
|
||||||
|
///
|
||||||
|
/// It returns a tuple variant `Hybrid` storing both the in-memory cache type and the `redis`
|
||||||
|
/// client connection struct.
|
||||||
|
#[cfg(all(feature = "redis-cache", feature = "memory-cache"))]
|
||||||
|
pub fn new_hybrid(redis_cache: RedisCache) -> Self {
|
||||||
|
let cache = MokaCache::builder()
|
||||||
|
.max_capacity(1000)
|
||||||
|
.time_to_live(Duration::from_secs(60))
|
||||||
|
.build();
|
||||||
|
Cache::Hybrid(redis_cache, cache)
|
||||||
|
}
|
||||||
|
|
||||||
/// A function which fetches the cached json results as json string.
|
/// A function which fetches the cached json results as json string.
|
||||||
///
|
///
|
||||||
/// # Arguments
|
/// # Arguments
|
||||||
///
|
///
|
||||||
/// * `url` - It takes an url as a string.
|
/// * `url` - It takes an url as a string.
|
||||||
pub async fn cached_json(&mut self, url: &str) -> Result<SearchResults, Report<PoolError>> {
|
///
|
||||||
|
/// # Error
|
||||||
|
///
|
||||||
|
/// Returns the `SearchResults` from the cache if the program executes normally otherwise
|
||||||
|
/// returns a `CacheError` if the results cannot be retrieved from the cache.
|
||||||
|
pub async fn cached_json(&mut self, _url: &str) -> Result<SearchResults, Report<CacheError>> {
|
||||||
match self {
|
match self {
|
||||||
Cache::Disabled => Err(Report::new(PoolError::MissingValue)),
|
Cache::Disabled => Err(Report::new(CacheError::MissingValue)),
|
||||||
#[cfg(feature = "redis-cache")]
|
#[cfg(all(feature = "redis-cache", not(feature = "memory-cache")))]
|
||||||
Cache::Redis(redis_cache) => {
|
Cache::Redis(redis_cache) => {
|
||||||
let json = redis_cache.cached_json(url).await?;
|
let json = redis_cache.cached_json(_url).await?;
|
||||||
Ok(serde_json::from_str::<SearchResults>(&json)
|
Ok(serde_json::from_str::<SearchResults>(&json)
|
||||||
.map_err(|_| PoolError::SerializationError)?)
|
.map_err(|_| CacheError::SerializationError)?)
|
||||||
}
|
}
|
||||||
#[cfg(feature = "memory-cache")]
|
#[cfg(all(feature = "memory-cache", not(feature = "redis-cache")))]
|
||||||
Cache::InMemory(in_memory) => match in_memory.get(&url.to_string()) {
|
Cache::InMemory(in_memory) => match in_memory.get(&_url.to_string()) {
|
||||||
Some(res) => Ok(res),
|
Some(res) => Ok(res),
|
||||||
None => Err(Report::new(PoolError::MissingValue)),
|
None => Err(Report::new(CacheError::MissingValue)),
|
||||||
|
},
|
||||||
|
#[cfg(all(feature = "redis-cache", feature = "memory-cache"))]
|
||||||
|
Cache::Hybrid(redis_cache, in_memory) => match redis_cache.cached_json(_url).await {
|
||||||
|
Ok(res) => Ok(serde_json::from_str::<SearchResults>(&res)
|
||||||
|
.map_err(|_| CacheError::SerializationError)?),
|
||||||
|
Err(_) => match in_memory.get(&_url.to_string()) {
|
||||||
|
Some(res) => Ok(res),
|
||||||
|
None => Err(Report::new(CacheError::MissingValue)),
|
||||||
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -96,24 +166,42 @@ impl Cache {
|
|||||||
///
|
///
|
||||||
/// * `json_results` - It takes the json results string as an argument.
|
/// * `json_results` - It takes the json results string as an argument.
|
||||||
/// * `url` - It takes the url as a String.
|
/// * `url` - It takes the url as a String.
|
||||||
|
///
|
||||||
|
/// # Error
|
||||||
|
///
|
||||||
|
/// Returns a unit type if the program caches the given search results without a failure
|
||||||
|
/// otherwise it returns a `CacheError` if the search results cannot be cached due to a
|
||||||
|
/// failure.
|
||||||
pub async fn cache_results(
|
pub async fn cache_results(
|
||||||
&mut self,
|
&mut self,
|
||||||
search_results: &SearchResults,
|
_search_results: &SearchResults,
|
||||||
url: &str,
|
_url: &str,
|
||||||
) -> Result<(), Report<PoolError>> {
|
) -> Result<(), Report<CacheError>> {
|
||||||
match self {
|
match self {
|
||||||
Cache::Disabled => Ok(()),
|
Cache::Disabled => Ok(()),
|
||||||
#[cfg(feature = "redis-cache")]
|
#[cfg(all(feature = "redis-cache", not(feature = "memory-cache")))]
|
||||||
Cache::Redis(redis_cache) => {
|
Cache::Redis(redis_cache) => {
|
||||||
let json = serde_json::to_string(search_results)
|
let json = serde_json::to_string(_search_results)
|
||||||
.map_err(|_| PoolError::SerializationError)?;
|
.map_err(|_| CacheError::SerializationError)?;
|
||||||
redis_cache.cache_results(&json, url).await
|
redis_cache.cache_results(&json, _url).await
|
||||||
}
|
}
|
||||||
#[cfg(feature = "memory-cache")]
|
#[cfg(all(feature = "memory-cache", not(feature = "redis-cache")))]
|
||||||
Cache::InMemory(cache) => {
|
Cache::InMemory(cache) => {
|
||||||
cache.insert(url.to_string(), search_results.clone());
|
cache.insert(_url.to_string(), _search_results.clone());
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
#[cfg(all(feature = "memory-cache", feature = "redis-cache"))]
|
||||||
|
Cache::Hybrid(redis_cache, cache) => {
|
||||||
|
let json = serde_json::to_string(_search_results)
|
||||||
|
.map_err(|_| CacheError::SerializationError)?;
|
||||||
|
match redis_cache.cache_results(&json, _url).await {
|
||||||
|
Ok(_) => Ok(()),
|
||||||
|
Err(_) => {
|
||||||
|
cache.insert(_url.to_string(), _search_results.clone());
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -125,26 +213,54 @@ pub struct SharedCache {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl SharedCache {
|
impl SharedCache {
|
||||||
/// Creates a new SharedCache from a Cache implementation
|
/// A function that creates a new `SharedCache` from a Cache implementation.
|
||||||
|
///
|
||||||
|
/// # Arguments
|
||||||
|
///
|
||||||
|
/// * `cache` - It takes the `Cache` enum variant as an argument with the prefered cache type.
|
||||||
|
///
|
||||||
|
/// Returns a newly constructed `SharedCache` struct.
|
||||||
pub fn new(cache: Cache) -> Self {
|
pub fn new(cache: Cache) -> Self {
|
||||||
Self {
|
Self {
|
||||||
cache: Mutex::new(cache),
|
cache: Mutex::new(cache),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A function which retrieves the cached SearchResulsts from the internal cache.
|
/// A getter function which retrieves the cached SearchResulsts from the internal cache.
|
||||||
pub async fn cached_json(&self, url: &str) -> Result<SearchResults, Report<PoolError>> {
|
///
|
||||||
|
/// # Arguments
|
||||||
|
///
|
||||||
|
/// * `url` - It takes the search url as an argument which will be used as the key to fetch the
|
||||||
|
/// cached results from the cache.
|
||||||
|
///
|
||||||
|
/// # Error
|
||||||
|
///
|
||||||
|
/// Returns a `SearchResults` struct containing the search results from the cache if nothing
|
||||||
|
/// goes wrong otherwise returns a `CacheError`.
|
||||||
|
pub async fn cached_json(&self, url: &str) -> Result<SearchResults, Report<CacheError>> {
|
||||||
let mut mut_cache = self.cache.lock().await;
|
let mut mut_cache = self.cache.lock().await;
|
||||||
mut_cache.cached_json(url).await
|
mut_cache.cached_json(url).await
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A function which caches the results by using the `url` as the key and
|
/// A setter function which caches the results by using the `url` as the key and
|
||||||
/// `SearchResults` as the value.
|
/// `SearchResults` as the value.
|
||||||
|
///
|
||||||
|
/// # Arguments
|
||||||
|
///
|
||||||
|
/// * `search_results` - It takes the `SearchResults` as an argument which are results that
|
||||||
|
/// needs to be cached.
|
||||||
|
/// * `url` - It takes the search url as an argument which will be used as the key for storing
|
||||||
|
/// results in the cache.
|
||||||
|
///
|
||||||
|
/// # Error
|
||||||
|
///
|
||||||
|
/// Returns an unit type if the results are cached succesfully otherwise returns a `CacheError`
|
||||||
|
/// on a failure.
|
||||||
pub async fn cache_results(
|
pub async fn cache_results(
|
||||||
&self,
|
&self,
|
||||||
search_results: &SearchResults,
|
search_results: &SearchResults,
|
||||||
url: &str,
|
url: &str,
|
||||||
) -> Result<(), Report<PoolError>> {
|
) -> Result<(), Report<CacheError>> {
|
||||||
let mut mut_cache = self.cache.lock().await;
|
let mut mut_cache = self.cache.lock().await;
|
||||||
mut_cache.cache_results(search_results, url).await
|
mut_cache.cache_results(search_results, url).await
|
||||||
}
|
}
|
||||||
|
14
src/cache/error.rs
vendored
14
src/cache/error.rs
vendored
@ -7,7 +7,7 @@ use redis::RedisError;
|
|||||||
|
|
||||||
/// A custom error type used for handling redis async pool associated errors.
|
/// A custom error type used for handling redis async pool associated errors.
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum PoolError {
|
pub enum CacheError {
|
||||||
/// This variant handles all errors related to `RedisError`,
|
/// This variant handles all errors related to `RedisError`,
|
||||||
#[cfg(feature = "redis-cache")]
|
#[cfg(feature = "redis-cache")]
|
||||||
RedisError(RedisError),
|
RedisError(RedisError),
|
||||||
@ -20,31 +20,31 @@ pub enum PoolError {
|
|||||||
MissingValue,
|
MissingValue,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for PoolError {
|
impl fmt::Display for CacheError {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
#[cfg(feature = "redis-cache")]
|
#[cfg(feature = "redis-cache")]
|
||||||
PoolError::RedisError(redis_error) => {
|
CacheError::RedisError(redis_error) => {
|
||||||
if let Some(detail) = redis_error.detail() {
|
if let Some(detail) = redis_error.detail() {
|
||||||
write!(f, "{}", detail)
|
write!(f, "{}", detail)
|
||||||
} else {
|
} else {
|
||||||
write!(f, "")
|
write!(f, "")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
PoolError::PoolExhaustionWithConnectionDropError => {
|
CacheError::PoolExhaustionWithConnectionDropError => {
|
||||||
write!(
|
write!(
|
||||||
f,
|
f,
|
||||||
"Error all connections from the pool dropped with connection error"
|
"Error all connections from the pool dropped with connection error"
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
PoolError::MissingValue => {
|
CacheError::MissingValue => {
|
||||||
write!(f, "The value is missing from the cache")
|
write!(f, "The value is missing from the cache")
|
||||||
}
|
}
|
||||||
PoolError::SerializationError => {
|
CacheError::SerializationError => {
|
||||||
write!(f, "Unable to serialize, deserialize from the cache")
|
write!(f, "Unable to serialize, deserialize from the cache")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl error_stack::Context for PoolError {}
|
impl error_stack::Context for CacheError {}
|
||||||
|
29
src/cache/redis_cacher.rs
vendored
29
src/cache/redis_cacher.rs
vendored
@ -6,7 +6,7 @@ use futures::future::try_join_all;
|
|||||||
use md5::compute;
|
use md5::compute;
|
||||||
use redis::{aio::ConnectionManager, AsyncCommands, Client, RedisError};
|
use redis::{aio::ConnectionManager, AsyncCommands, Client, RedisError};
|
||||||
|
|
||||||
use super::error::PoolError;
|
use super::error::CacheError;
|
||||||
|
|
||||||
/// A named struct which stores the redis Connection url address to which the client will
|
/// A named struct which stores the redis Connection url address to which the client will
|
||||||
/// connect to.
|
/// connect to.
|
||||||
@ -29,6 +29,11 @@ impl RedisCache {
|
|||||||
/// * `redis_connection_url` - It takes the redis Connection url address.
|
/// * `redis_connection_url` - It takes the redis Connection url address.
|
||||||
/// * `pool_size` - It takes the size of the connection pool (in other words the number of
|
/// * `pool_size` - It takes the size of the connection pool (in other words the number of
|
||||||
/// connections that should be stored in the pool).
|
/// connections that should be stored in the pool).
|
||||||
|
///
|
||||||
|
/// # Error
|
||||||
|
///
|
||||||
|
/// Returns a newly constructed `RedisCache` struct on success otherwise returns a standard
|
||||||
|
/// error type.
|
||||||
pub async fn new(
|
pub async fn new(
|
||||||
redis_connection_url: &str,
|
redis_connection_url: &str,
|
||||||
pool_size: u8,
|
pool_size: u8,
|
||||||
@ -62,7 +67,12 @@ impl RedisCache {
|
|||||||
/// # Arguments
|
/// # Arguments
|
||||||
///
|
///
|
||||||
/// * `url` - It takes an url as a string.
|
/// * `url` - It takes an url as a string.
|
||||||
pub async fn cached_json(&mut self, url: &str) -> Result<String, Report<PoolError>> {
|
///
|
||||||
|
/// # Error
|
||||||
|
///
|
||||||
|
/// Returns the results as a String from the cache on success otherwise returns a `CacheError`
|
||||||
|
/// on a failure.
|
||||||
|
pub async fn cached_json(&mut self, url: &str) -> Result<String, Report<CacheError>> {
|
||||||
self.current_connection = Default::default();
|
self.current_connection = Default::default();
|
||||||
let hashed_url_string: &str = &self.hash_url(url);
|
let hashed_url_string: &str = &self.hash_url(url);
|
||||||
|
|
||||||
@ -85,7 +95,7 @@ impl RedisCache {
|
|||||||
self.current_connection += 1;
|
self.current_connection += 1;
|
||||||
if self.current_connection == self.pool_size {
|
if self.current_connection == self.pool_size {
|
||||||
return Err(Report::new(
|
return Err(Report::new(
|
||||||
PoolError::PoolExhaustionWithConnectionDropError,
|
CacheError::PoolExhaustionWithConnectionDropError,
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
result = self.connection_pool[self.current_connection as usize]
|
result = self.connection_pool[self.current_connection as usize]
|
||||||
@ -93,7 +103,7 @@ impl RedisCache {
|
|||||||
.await;
|
.await;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
false => return Err(Report::new(PoolError::RedisError(error))),
|
false => return Err(Report::new(CacheError::RedisError(error))),
|
||||||
},
|
},
|
||||||
Ok(res) => return Ok(res),
|
Ok(res) => return Ok(res),
|
||||||
}
|
}
|
||||||
@ -108,11 +118,16 @@ impl RedisCache {
|
|||||||
///
|
///
|
||||||
/// * `json_results` - It takes the json results string as an argument.
|
/// * `json_results` - It takes the json results string as an argument.
|
||||||
/// * `url` - It takes the url as a String.
|
/// * `url` - It takes the url as a String.
|
||||||
|
///
|
||||||
|
/// # Error
|
||||||
|
///
|
||||||
|
/// Returns an unit type if the results are cached succesfully otherwise returns a `CacheError`
|
||||||
|
/// on a failure.
|
||||||
pub async fn cache_results(
|
pub async fn cache_results(
|
||||||
&mut self,
|
&mut self,
|
||||||
json_results: &str,
|
json_results: &str,
|
||||||
url: &str,
|
url: &str,
|
||||||
) -> Result<(), Report<PoolError>> {
|
) -> Result<(), Report<CacheError>> {
|
||||||
self.current_connection = Default::default();
|
self.current_connection = Default::default();
|
||||||
let hashed_url_string: &str = &self.hash_url(url);
|
let hashed_url_string: &str = &self.hash_url(url);
|
||||||
|
|
||||||
@ -135,7 +150,7 @@ impl RedisCache {
|
|||||||
self.current_connection += 1;
|
self.current_connection += 1;
|
||||||
if self.current_connection == self.pool_size {
|
if self.current_connection == self.pool_size {
|
||||||
return Err(Report::new(
|
return Err(Report::new(
|
||||||
PoolError::PoolExhaustionWithConnectionDropError,
|
CacheError::PoolExhaustionWithConnectionDropError,
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
result = self.connection_pool[self.current_connection as usize]
|
result = self.connection_pool[self.current_connection as usize]
|
||||||
@ -143,7 +158,7 @@ impl RedisCache {
|
|||||||
.await;
|
.await;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
false => return Err(Report::new(PoolError::RedisError(error))),
|
false => return Err(Report::new(CacheError::RedisError(error))),
|
||||||
},
|
},
|
||||||
Ok(_) => return Ok(()),
|
Ok(_) => return Ok(()),
|
||||||
}
|
}
|
||||||
|
@ -17,9 +17,10 @@ pub struct Config {
|
|||||||
pub binding_ip: String,
|
pub binding_ip: String,
|
||||||
/// It stores the theming options for the website.
|
/// It stores the theming options for the website.
|
||||||
pub style: Style,
|
pub style: Style,
|
||||||
|
#[cfg(feature = "redis-cache")]
|
||||||
/// It stores the redis connection url address on which the redis
|
/// It stores the redis connection url address on which the redis
|
||||||
/// client should connect.
|
/// client should connect.
|
||||||
pub redis_url: Option<String>,
|
pub redis_url: String,
|
||||||
/// It stores the option to whether enable or disable production use.
|
/// It stores the option to whether enable or disable production use.
|
||||||
pub aggregator: AggregatorConfig,
|
pub aggregator: AggregatorConfig,
|
||||||
/// It stores the option to whether enable or disable logs.
|
/// It stores the option to whether enable or disable logs.
|
||||||
@ -99,7 +100,8 @@ impl Config {
|
|||||||
globals.get::<_, String>("theme")?,
|
globals.get::<_, String>("theme")?,
|
||||||
globals.get::<_, String>("colorscheme")?,
|
globals.get::<_, String>("colorscheme")?,
|
||||||
),
|
),
|
||||||
redis_url: globals.get::<_, String>("redis_url").ok(),
|
#[cfg(feature = "redis-cache")]
|
||||||
|
redis_url: globals.get::<_, String>("redis_url")?,
|
||||||
aggregator: AggregatorConfig {
|
aggregator: AggregatorConfig {
|
||||||
random_delay: globals.get::<_, bool>("production_use")?,
|
random_delay: globals.get::<_, bool>("production_use")?,
|
||||||
},
|
},
|
||||||
|
@ -11,16 +11,19 @@ pub struct SearchParams {
|
|||||||
/// It stores the search parameter `page` (or pageno in simple words)
|
/// It stores the search parameter `page` (or pageno in simple words)
|
||||||
/// of the search url.
|
/// of the search url.
|
||||||
pub page: Option<u32>,
|
pub page: Option<u32>,
|
||||||
|
/// It stores the search parameter `safesearch` (or safe search level in simple words) of the
|
||||||
|
/// search url.
|
||||||
|
pub safesearch: Option<u8>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A named struct which is used to deserialize the cookies fetched from the client side.
|
/// A named struct which is used to deserialize the cookies fetched from the client side.
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
pub struct Cookie {
|
pub struct Cookie<'a> {
|
||||||
/// It stores the theme name used in the website.
|
/// It stores the theme name used in the website.
|
||||||
pub theme: String,
|
pub theme: &'a str,
|
||||||
/// It stores the colorscheme name used for the website theme.
|
/// It stores the colorscheme name used for the website theme.
|
||||||
pub colorscheme: String,
|
pub colorscheme: &'a str,
|
||||||
/// It stores the user selected upstream search engines selected from the UI.
|
/// It stores the user selected upstream search engines selected from the UI.
|
||||||
pub engines: Vec<String>,
|
pub engines: Vec<&'a str>,
|
||||||
}
|
}
|
||||||
|
@ -4,43 +4,22 @@ use crate::{
|
|||||||
cache::cacher::SharedCache,
|
cache::cacher::SharedCache,
|
||||||
config::parser::Config,
|
config::parser::Config,
|
||||||
handler::paths::{file_path, FileType},
|
handler::paths::{file_path, FileType},
|
||||||
models::{aggregation_models::SearchResults, engine_models::EngineHandler},
|
models::{
|
||||||
|
aggregation_models::SearchResults,
|
||||||
|
engine_models::EngineHandler,
|
||||||
|
server_models::{Cookie, SearchParams},
|
||||||
|
},
|
||||||
results::aggregator::aggregate,
|
results::aggregator::aggregate,
|
||||||
};
|
};
|
||||||
use actix_web::{get, web, HttpRequest, HttpResponse};
|
use actix_web::{get, web, HttpRequest, HttpResponse};
|
||||||
use handlebars::Handlebars;
|
use handlebars::Handlebars;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use serde::Deserialize;
|
|
||||||
use std::{
|
use std::{
|
||||||
fs::{read_to_string, File},
|
fs::File,
|
||||||
io::{BufRead, BufReader, Read},
|
io::{BufRead, BufReader, Read},
|
||||||
};
|
};
|
||||||
use tokio::join;
|
use tokio::join;
|
||||||
|
|
||||||
/// A named struct which deserializes all the user provided search parameters and stores them.
|
|
||||||
#[derive(Deserialize)]
|
|
||||||
pub struct SearchParams {
|
|
||||||
/// It stores the search parameter option `q` (or query in simple words)
|
|
||||||
/// of the search url.
|
|
||||||
q: Option<String>,
|
|
||||||
/// It stores the search parameter `page` (or pageno in simple words)
|
|
||||||
/// of the search url.
|
|
||||||
page: Option<u32>,
|
|
||||||
/// It stores the search parameter `safesearch` (or safe search level in simple words) of the
|
|
||||||
/// search url.
|
|
||||||
safesearch: Option<u8>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Handles the route of index page or main page of the `websurfx` meta search engine website.
|
|
||||||
#[get("/")]
|
|
||||||
pub async fn index(
|
|
||||||
hbs: web::Data<Handlebars<'_>>,
|
|
||||||
config: web::Data<Config>,
|
|
||||||
) -> Result<HttpResponse, Box<dyn std::error::Error>> {
|
|
||||||
let page_content: String = hbs.render("index", &config.style).unwrap();
|
|
||||||
Ok(HttpResponse::Ok().body(page_content))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Handles the route of any other accessed route/page which is not provided by the
|
/// Handles the route of any other accessed route/page which is not provided by the
|
||||||
/// website essentially the 404 error page.
|
/// website essentially the 404 error page.
|
||||||
pub async fn not_found(
|
pub async fn not_found(
|
||||||
@ -54,18 +33,6 @@ pub async fn not_found(
|
|||||||
.body(page_content))
|
.body(page_content))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A named struct which is used to deserialize the cookies fetched from the client side.
|
|
||||||
#[allow(dead_code)]
|
|
||||||
#[derive(Deserialize)]
|
|
||||||
struct Cookie<'a> {
|
|
||||||
/// It stores the theme name used in the website.
|
|
||||||
theme: &'a str,
|
|
||||||
/// It stores the colorscheme name used for the website theme.
|
|
||||||
colorscheme: &'a str,
|
|
||||||
/// It stores the user selected upstream search engines selected from the UI.
|
|
||||||
engines: Vec<&'a str>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Handles the route of search page of the `websurfx` meta search engine website and it takes
|
/// Handles the route of search page of the `websurfx` meta search engine website and it takes
|
||||||
/// two search url parameters `q` and `page` where `page` parameter is optional.
|
/// two search url parameters `q` and `page` where `page` parameter is optional.
|
||||||
///
|
///
|
||||||
@ -264,6 +231,16 @@ async fn results(
|
|||||||
|
|
||||||
/// A helper function which checks whether the search query contains any keywords which should be
|
/// A helper function which checks whether the search query contains any keywords which should be
|
||||||
/// disallowed/allowed based on the regex based rules present in the blocklist and allowlist files.
|
/// disallowed/allowed based on the regex based rules present in the blocklist and allowlist files.
|
||||||
|
///
|
||||||
|
/// # Arguments
|
||||||
|
///
|
||||||
|
/// * `file_path` - It takes the file path of the list as the argument.
|
||||||
|
/// * `query` - It takes the search query to be checked against the list as an argument.
|
||||||
|
///
|
||||||
|
/// # Error
|
||||||
|
///
|
||||||
|
/// Returns a bool indicating whether the results were found in the list or not on success
|
||||||
|
/// otherwise returns a standard error type on a failure.
|
||||||
fn is_match_from_filter_list(
|
fn is_match_from_filter_list(
|
||||||
file_path: &str,
|
file_path: &str,
|
||||||
query: &str,
|
query: &str,
|
||||||
@ -279,33 +256,3 @@ fn is_match_from_filter_list(
|
|||||||
}
|
}
|
||||||
Ok(flag)
|
Ok(flag)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Handles the route of robots.txt page of the `websurfx` meta search engine website.
|
|
||||||
#[get("/robots.txt")]
|
|
||||||
pub async fn robots_data(_req: HttpRequest) -> Result<HttpResponse, Box<dyn std::error::Error>> {
|
|
||||||
let page_content: String =
|
|
||||||
read_to_string(format!("{}/robots.txt", file_path(FileType::Theme)?))?;
|
|
||||||
Ok(HttpResponse::Ok()
|
|
||||||
.content_type("text/plain; charset=ascii")
|
|
||||||
.body(page_content))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Handles the route of about page of the `websurfx` meta search engine website.
|
|
||||||
#[get("/about")]
|
|
||||||
pub async fn about(
|
|
||||||
hbs: web::Data<Handlebars<'_>>,
|
|
||||||
config: web::Data<Config>,
|
|
||||||
) -> Result<HttpResponse, Box<dyn std::error::Error>> {
|
|
||||||
let page_content: String = hbs.render("about", &config.style)?;
|
|
||||||
Ok(HttpResponse::Ok().body(page_content))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Handles the route of settings page of the `websurfx` meta search engine website.
|
|
||||||
#[get("/settings")]
|
|
||||||
pub async fn settings(
|
|
||||||
hbs: web::Data<Handlebars<'_>>,
|
|
||||||
config: web::Data<Config>,
|
|
||||||
) -> Result<HttpResponse, Box<dyn std::error::Error>> {
|
|
||||||
let page_content: String = hbs.render("settings", &config.style)?;
|
|
||||||
Ok(HttpResponse::Ok().body(page_content))
|
|
||||||
}
|
|
||||||
|
@ -12,6 +12,7 @@ fn spawn_app() -> String {
|
|||||||
let server = run(
|
let server = run(
|
||||||
listener,
|
listener,
|
||||||
config,
|
config,
|
||||||
|
#[cfg(all(feature = "memory-cache", not(feature = "redis-cache")))]
|
||||||
websurfx::cache::cacher::Cache::new_in_memory(),
|
websurfx::cache::cacher::Cache::new_in_memory(),
|
||||||
)
|
)
|
||||||
.expect("Failed to bind address");
|
.expect("Failed to bind address");
|
||||||
|
Loading…
Reference in New Issue
Block a user