mirror of
https://github.com/neon-mmd/websurfx.git
synced 2024-11-21 21:48:21 -05:00
Merge pull request #194 from neon-mmd/optimize-and-make-code-idiomatic-2
⚙️ Optimize and make code more idiomatic (part - I)
This commit is contained in:
commit
9cded973f0
181
Cargo.lock
generated
181
Cargo.lock
generated
@ -288,6 +288,12 @@ version = "1.0.75"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a4668cab20f66d8d020e1fbc0ebe47217433c1b6c8f2040faf858554e394ace6"
|
||||
|
||||
[[package]]
|
||||
name = "arc-swap"
|
||||
version = "1.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bddcadddf5e9015d310179a59bb28c4d4b9920ad0f11e8e14dbadf654890c9a6"
|
||||
|
||||
[[package]]
|
||||
name = "askama_escape"
|
||||
version = "0.10.3"
|
||||
@ -559,7 +565,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "35ed6e9d84f0b51a7f52daf1c7d71dd136fd7a3f41a8462b8cdb8c78d920fad4"
|
||||
dependencies = [
|
||||
"bytes 1.4.0",
|
||||
"futures-core",
|
||||
"memchr",
|
||||
"pin-project-lite",
|
||||
"tokio 1.32.0",
|
||||
"tokio-util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1049,6 +1059,21 @@ version = "0.1.31"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3a471a38ef8ed83cd6e40aa59c1ffe17db6855c18e3604d9c4ed8c08ebc28678"
|
||||
|
||||
[[package]]
|
||||
name = "futures"
|
||||
version = "0.3.28"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "23342abe12aba583913b2e62f22225ff9c950774065e4bfb61a19cd9770fec40"
|
||||
dependencies = [
|
||||
"futures-channel",
|
||||
"futures-core",
|
||||
"futures-executor",
|
||||
"futures-io",
|
||||
"futures-sink",
|
||||
"futures-task",
|
||||
"futures-util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "futures-channel"
|
||||
version = "0.3.28"
|
||||
@ -1056,6 +1081,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "955518d47e09b25bbebc7a18df10b81f0c766eaf4c4f1cccef2fca5f2a4fb5f2"
|
||||
dependencies = [
|
||||
"futures-core",
|
||||
"futures-sink",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1070,10 +1096,38 @@ version = "0.1.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ab90cde24b3319636588d0c35fe03b1333857621051837ed769faefb4c2162e4"
|
||||
dependencies = [
|
||||
"futures",
|
||||
"futures 0.1.31",
|
||||
"num_cpus",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "futures-executor"
|
||||
version = "0.3.28"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ccecee823288125bd88b4d7f565c9e58e41858e47ab72e8ea2d64e93624386e0"
|
||||
dependencies = [
|
||||
"futures-core",
|
||||
"futures-task",
|
||||
"futures-util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "futures-io"
|
||||
version = "0.3.28"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4fff74096e71ed47f8e023204cfd0aa1289cd54ae5430a9523be060cdb849964"
|
||||
|
||||
[[package]]
|
||||
name = "futures-macro"
|
||||
version = "0.3.28"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72"
|
||||
dependencies = [
|
||||
"proc-macro2 1.0.66",
|
||||
"quote 1.0.33",
|
||||
"syn 2.0.29",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "futures-sink"
|
||||
version = "0.3.28"
|
||||
@ -1092,10 +1146,16 @@ version = "0.3.28"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "26b01e40b772d54cf6c6d721c1d1abd0647a0106a12ecaa1c186273392a69533"
|
||||
dependencies = [
|
||||
"futures-channel",
|
||||
"futures-core",
|
||||
"futures-io",
|
||||
"futures-macro",
|
||||
"futures-sink",
|
||||
"futures-task",
|
||||
"memchr",
|
||||
"pin-project-lite",
|
||||
"pin-utils",
|
||||
"slab",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1152,7 +1212,7 @@ dependencies = [
|
||||
"byteorder",
|
||||
"bytes 0.4.12",
|
||||
"fnv",
|
||||
"futures",
|
||||
"futures 0.1.31",
|
||||
"http 0.1.21",
|
||||
"indexmap",
|
||||
"log",
|
||||
@ -1270,7 +1330,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6741c859c1b2463a423a1dbce98d418e6c3c3fc720fb0d45528657320920292d"
|
||||
dependencies = [
|
||||
"bytes 0.4.12",
|
||||
"futures",
|
||||
"futures 0.1.31",
|
||||
"http 0.1.21",
|
||||
"tokio-buf",
|
||||
]
|
||||
@ -1317,7 +1377,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5c843caf6296fc1f93444735205af9ed4e109a539005abb2564ae1d6fad34c52"
|
||||
dependencies = [
|
||||
"bytes 0.4.12",
|
||||
"futures",
|
||||
"futures 0.1.31",
|
||||
"futures-cpupool",
|
||||
"h2 0.1.26",
|
||||
"http 0.1.21",
|
||||
@ -1371,7 +1431,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3a800d6aa50af4b5850b2b0f659625ce9504df908e9733b635720483be26174f"
|
||||
dependencies = [
|
||||
"bytes 0.4.12",
|
||||
"futures",
|
||||
"futures 0.1.31",
|
||||
"hyper 0.12.36",
|
||||
"native-tls",
|
||||
"tokio-io",
|
||||
@ -1721,6 +1781,20 @@ dependencies = [
|
||||
"ws2_32-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mlua"
|
||||
version = "0.8.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0bb37b0ba91f017aa7ca2b98ef99496827770cd635b4a932a6047c5b4bbe678e"
|
||||
dependencies = [
|
||||
"bstr",
|
||||
"cc",
|
||||
"num-traits",
|
||||
"once_cell",
|
||||
"pkg-config",
|
||||
"rustc-hash",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "native-tls"
|
||||
version = "0.2.11"
|
||||
@ -2076,6 +2150,26 @@ dependencies = [
|
||||
"siphasher 0.3.11",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pin-project"
|
||||
version = "1.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fda4ed1c6c173e3fc7a83629421152e01d7b1f9b7f65fb301e490e8cfc656422"
|
||||
dependencies = [
|
||||
"pin-project-internal",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pin-project-internal"
|
||||
version = "1.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405"
|
||||
dependencies = [
|
||||
"proc-macro2 1.0.66",
|
||||
"quote 1.0.33",
|
||||
"syn 2.0.29",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pin-project-lite"
|
||||
version = "0.2.13"
|
||||
@ -2353,12 +2447,21 @@ version = "0.23.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ffd6543a7bc6428396845f6854ccf3d1ae8823816592e2cbe74f20f50f209d02"
|
||||
dependencies = [
|
||||
"arc-swap",
|
||||
"async-trait",
|
||||
"bytes 1.4.0",
|
||||
"combine",
|
||||
"futures 0.3.28",
|
||||
"futures-util",
|
||||
"itoa 1.0.9",
|
||||
"percent-encoding 2.3.0",
|
||||
"pin-project-lite",
|
||||
"ryu",
|
||||
"sha1_smol",
|
||||
"socket2 0.4.9",
|
||||
"tokio 1.32.0",
|
||||
"tokio-retry",
|
||||
"tokio-util",
|
||||
"url 2.4.1",
|
||||
]
|
||||
|
||||
@ -2418,7 +2521,7 @@ dependencies = [
|
||||
"cookie_store",
|
||||
"encoding_rs",
|
||||
"flate2",
|
||||
"futures",
|
||||
"futures 0.1.31",
|
||||
"http 0.1.21",
|
||||
"hyper 0.12.36",
|
||||
"hyper-tls 0.3.2",
|
||||
@ -2477,36 +2580,18 @@ dependencies = [
|
||||
"winreg 0.50.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rlua"
|
||||
version = "0.19.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5d33e5ba15c3d43178f283ed5863d4531e292fc0e56fb773f3bea45f18e3a42a"
|
||||
dependencies = [
|
||||
"bitflags 1.3.2",
|
||||
"bstr",
|
||||
"libc",
|
||||
"num-traits",
|
||||
"rlua-lua54-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rlua-lua54-sys"
|
||||
version = "0.1.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7aafabafe1895cb4a2be81a56d7ff3d46bf4b5d2f9cfdbea2ed404cdabe96474"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"libc",
|
||||
"pkg-config",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustc-demangle"
|
||||
version = "0.1.23"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76"
|
||||
|
||||
[[package]]
|
||||
name = "rustc-hash"
|
||||
version = "1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
|
||||
|
||||
[[package]]
|
||||
name = "rustc_version"
|
||||
version = "0.2.3"
|
||||
@ -3071,7 +3156,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5a09c0b5bb588872ab2f09afa13ee6e9dac11e10a0ec9e8e3ba39a5a5d530af6"
|
||||
dependencies = [
|
||||
"bytes 0.4.12",
|
||||
"futures",
|
||||
"futures 0.1.31",
|
||||
"mio 0.6.23",
|
||||
"num_cpus",
|
||||
"tokio-current-thread",
|
||||
@ -3110,7 +3195,7 @@ checksum = "8fb220f46c53859a4b7ec083e41dec9778ff0b1851c0942b211edb89e0ccdc46"
|
||||
dependencies = [
|
||||
"bytes 0.4.12",
|
||||
"either",
|
||||
"futures",
|
||||
"futures 0.1.31",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -3119,7 +3204,7 @@ version = "0.1.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b1de0e32a83f131e002238d7ccde18211c0a5397f60cbfffcb112868c2e0e20e"
|
||||
dependencies = [
|
||||
"futures",
|
||||
"futures 0.1.31",
|
||||
"tokio-executor",
|
||||
]
|
||||
|
||||
@ -3130,7 +3215,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fb2d1b8f4548dbf5e1f7818512e9c406860678f29c300cdf0ebac72d1a3a1671"
|
||||
dependencies = [
|
||||
"crossbeam-utils 0.7.2",
|
||||
"futures",
|
||||
"futures 0.1.31",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -3140,7 +3225,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "57fc868aae093479e3131e3d165c93b1c7474109d13c90ec0dda2a1bbfff0674"
|
||||
dependencies = [
|
||||
"bytes 0.4.12",
|
||||
"futures",
|
||||
"futures 0.1.31",
|
||||
"log",
|
||||
]
|
||||
|
||||
@ -3172,7 +3257,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "09bc590ec4ba8ba87652da2068d150dcada2cfa2e07faae270a5e0409aa51351"
|
||||
dependencies = [
|
||||
"crossbeam-utils 0.7.2",
|
||||
"futures",
|
||||
"futures 0.1.31",
|
||||
"lazy_static",
|
||||
"log",
|
||||
"mio 0.6.23",
|
||||
@ -3184,6 +3269,17 @@ dependencies = [
|
||||
"tokio-sync",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tokio-retry"
|
||||
version = "0.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7f57eb36ecbe0fc510036adff84824dd3c24bb781e21bfa67b69d556aa85214f"
|
||||
dependencies = [
|
||||
"pin-project",
|
||||
"rand 0.8.5",
|
||||
"tokio 1.32.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tokio-sync"
|
||||
version = "0.1.8"
|
||||
@ -3191,7 +3287,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "edfe50152bc8164fcc456dab7891fa9bf8beaf01c5ee7e1dd43a397c3cf87dee"
|
||||
dependencies = [
|
||||
"fnv",
|
||||
"futures",
|
||||
"futures 0.1.31",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -3201,7 +3297,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "98df18ed66e3b72e742f185882a9e201892407957e45fbff8da17ae7a7c51f72"
|
||||
dependencies = [
|
||||
"bytes 0.4.12",
|
||||
"futures",
|
||||
"futures 0.1.31",
|
||||
"iovec",
|
||||
"mio 0.6.23",
|
||||
"tokio-io",
|
||||
@ -3217,7 +3313,7 @@ dependencies = [
|
||||
"crossbeam-deque 0.7.4",
|
||||
"crossbeam-queue",
|
||||
"crossbeam-utils 0.7.2",
|
||||
"futures",
|
||||
"futures 0.1.31",
|
||||
"lazy_static",
|
||||
"log",
|
||||
"num_cpus",
|
||||
@ -3232,7 +3328,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "93044f2d313c95ff1cb7809ce9a7a05735b012288a888b62d4434fd58c94f296"
|
||||
dependencies = [
|
||||
"crossbeam-utils 0.7.2",
|
||||
"futures",
|
||||
"futures 0.1.31",
|
||||
"slab",
|
||||
"tokio-executor",
|
||||
]
|
||||
@ -3427,7 +3523,7 @@ version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b6395efa4784b027708f7451087e647ec73cc74f5d9bc2e418404248d679a230"
|
||||
dependencies = [
|
||||
"futures",
|
||||
"futures 0.1.31",
|
||||
"log",
|
||||
"try-lock",
|
||||
]
|
||||
@ -3541,15 +3637,16 @@ dependencies = [
|
||||
"env_logger",
|
||||
"error-stack",
|
||||
"fake-useragent",
|
||||
"futures 0.3.28",
|
||||
"handlebars",
|
||||
"log",
|
||||
"md5",
|
||||
"mlua",
|
||||
"once_cell",
|
||||
"rand 0.8.5",
|
||||
"redis",
|
||||
"regex",
|
||||
"reqwest 0.11.20",
|
||||
"rlua",
|
||||
"rusty-hook",
|
||||
"scraper",
|
||||
"serde",
|
||||
|
@ -19,14 +19,15 @@ serde_json = {version="1.0.105"}
|
||||
fake-useragent = {version="0.1.3"}
|
||||
env_logger = {version="0.10.0"}
|
||||
log = {version="0.4.20"}
|
||||
rlua = {version="0.19.7"}
|
||||
redis = {version="0.23.2"}
|
||||
mlua = {version="0.8.10",features=["luajit"]}
|
||||
redis = {version="0.23.2",features=["tokio-comp","connection-manager"]}
|
||||
md5 = {version="0.7.0"}
|
||||
rand={version="0.8.5"}
|
||||
once_cell = {version="1.18.0"}
|
||||
error-stack = {version="0.4.0"}
|
||||
async-trait = {version="0.1.73"}
|
||||
regex = {version="1.9.4", features=["perf"]}
|
||||
futures = {version="0.3.28"}
|
||||
|
||||
[dev-dependencies]
|
||||
rusty-hook = "^0.11.2"
|
||||
|
@ -6,6 +6,11 @@
|
||||
use std::net::TcpListener;
|
||||
use websurfx::{config::parser::Config, run};
|
||||
|
||||
/// A dhat heap memory profiler
|
||||
#[cfg(feature = "dhat-heap")]
|
||||
#[global_allocator]
|
||||
static ALLOC: dhat::Alloc = dhat::Alloc;
|
||||
|
||||
/// The function that launches the main server and registers all the routes of the website.
|
||||
///
|
||||
/// # Error
|
||||
@ -14,6 +19,10 @@ use websurfx::{config::parser::Config, run};
|
||||
/// available for being used for other applications.
|
||||
#[actix_web::main]
|
||||
async fn main() -> std::io::Result<()> {
|
||||
// A dhat heap profiler initialization.
|
||||
#[cfg(feature = "dhat-heap")]
|
||||
let _profiler = dhat::Profiler::new_heap();
|
||||
|
||||
// Initialize the parsed config file.
|
||||
let config = Config::parse(false).unwrap();
|
||||
|
||||
|
126
src/cache/cacher.rs
vendored
126
src/cache/cacher.rs
vendored
@ -1,17 +1,26 @@
|
||||
//! This module provides the functionality to cache the aggregated results fetched and aggregated
|
||||
//! from the upstream search engines in a json format.
|
||||
|
||||
use error_stack::Report;
|
||||
use futures::future::try_join_all;
|
||||
use md5::compute;
|
||||
use redis::{Client, Commands, Connection};
|
||||
use redis::{aio::ConnectionManager, AsyncCommands, Client, RedisError};
|
||||
|
||||
use super::error::PoolError;
|
||||
|
||||
/// A named struct which stores the redis Connection url address to which the client will
|
||||
/// connect to.
|
||||
///
|
||||
/// # Fields
|
||||
///
|
||||
/// * `redis_connection_url` - It stores the redis Connection url address.
|
||||
/// * `connection_pool` - It stores a pool of connections ready to be used.
|
||||
/// * `pool_size` - It stores the size of the connection pool (in other words the number of
|
||||
/// connections that should be stored in the pool).
|
||||
/// * `current_connection` - It stores the index of which connection is being used at the moment.
|
||||
pub struct RedisCache {
|
||||
connection: Connection,
|
||||
connection_pool: Vec<ConnectionManager>,
|
||||
pool_size: u8,
|
||||
current_connection: u8,
|
||||
}
|
||||
|
||||
impl RedisCache {
|
||||
@ -19,11 +28,25 @@ impl RedisCache {
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `redis_connection_url` - It stores the redis Connection url address.
|
||||
pub fn new(redis_connection_url: String) -> Result<Self, Box<dyn std::error::Error>> {
|
||||
/// * `redis_connection_url` - It takes the redis Connection url address.
|
||||
/// * `pool_size` - It takes the size of the connection pool (in other words the number of
|
||||
/// connections that should be stored in the pool).
|
||||
pub async fn new(
|
||||
redis_connection_url: &str,
|
||||
pool_size: u8,
|
||||
) -> Result<Self, Box<dyn std::error::Error>> {
|
||||
let client = Client::open(redis_connection_url)?;
|
||||
let connection = client.get_connection()?;
|
||||
let redis_cache = RedisCache { connection };
|
||||
let mut tasks: Vec<_> = Vec::new();
|
||||
|
||||
for _ in 0..pool_size {
|
||||
tasks.push(client.get_tokio_connection_manager());
|
||||
}
|
||||
|
||||
let redis_cache = RedisCache {
|
||||
connection_pool: try_join_all(tasks).await?,
|
||||
pool_size,
|
||||
current_connection: Default::default(),
|
||||
};
|
||||
Ok(redis_cache)
|
||||
}
|
||||
|
||||
@ -32,7 +55,7 @@ impl RedisCache {
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `url` - It takes an url as string.
|
||||
fn hash_url(url: &str) -> String {
|
||||
fn hash_url(&self, url: &str) -> String {
|
||||
format!("{:?}", compute(url))
|
||||
}
|
||||
|
||||
@ -41,9 +64,42 @@ impl RedisCache {
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `url` - It takes an url as a string.
|
||||
pub fn cached_json(&mut self, url: &str) -> Result<String, Box<dyn std::error::Error>> {
|
||||
let hashed_url_string = Self::hash_url(url);
|
||||
Ok(self.connection.get(hashed_url_string)?)
|
||||
pub async fn cached_json(&mut self, url: &str) -> Result<String, Report<PoolError>> {
|
||||
self.current_connection = Default::default();
|
||||
let hashed_url_string: &str = &self.hash_url(url);
|
||||
|
||||
let mut result: Result<String, RedisError> = self.connection_pool
|
||||
[self.current_connection as usize]
|
||||
.get(hashed_url_string)
|
||||
.await;
|
||||
|
||||
// Code to check whether the current connection being used is dropped with connection error
|
||||
// or not. if it drops with the connection error then the current connection is replaced
|
||||
// with a new connection from the pool which is then used to run the redis command then
|
||||
// that connection is also checked whether it is dropped or not if it is not then the
|
||||
// result is passed as a `Result` or else the same process repeats again and if all of the
|
||||
// connections in the pool result in connection drop error then a custom pool error is
|
||||
// returned.
|
||||
loop {
|
||||
match result {
|
||||
Err(error) => match error.is_connection_dropped() {
|
||||
true => {
|
||||
self.current_connection += 1;
|
||||
if self.current_connection == self.pool_size {
|
||||
return Err(Report::new(
|
||||
PoolError::PoolExhaustionWithConnectionDropError,
|
||||
));
|
||||
}
|
||||
result = self.connection_pool[self.current_connection as usize]
|
||||
.get(hashed_url_string)
|
||||
.await;
|
||||
continue;
|
||||
}
|
||||
false => return Err(Report::new(PoolError::RedisError(error))),
|
||||
},
|
||||
Ok(res) => return Ok(res),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A function which caches the results by using the hashed `url` as the key and
|
||||
@ -54,21 +110,45 @@ impl RedisCache {
|
||||
///
|
||||
/// * `json_results` - It takes the json results string as an argument.
|
||||
/// * `url` - It takes the url as a String.
|
||||
pub fn cache_results(
|
||||
pub async fn cache_results(
|
||||
&mut self,
|
||||
json_results: String,
|
||||
json_results: &str,
|
||||
url: &str,
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let hashed_url_string = Self::hash_url(url);
|
||||
) -> Result<(), Report<PoolError>> {
|
||||
self.current_connection = Default::default();
|
||||
let hashed_url_string: &str = &self.hash_url(url);
|
||||
|
||||
// put results_json into cache
|
||||
self.connection.set(&hashed_url_string, json_results)?;
|
||||
let mut result: Result<(), RedisError> = self.connection_pool
|
||||
[self.current_connection as usize]
|
||||
.set_ex(hashed_url_string, json_results, 60)
|
||||
.await;
|
||||
|
||||
// Set the TTL for the key to 60 seconds
|
||||
self.connection
|
||||
.expire::<String, u32>(hashed_url_string, 60)
|
||||
.unwrap();
|
||||
|
||||
Ok(())
|
||||
// Code to check whether the current connection being used is dropped with connection error
|
||||
// or not. if it drops with the connection error then the current connection is replaced
|
||||
// with a new connection from the pool which is then used to run the redis command then
|
||||
// that connection is also checked whether it is dropped or not if it is not then the
|
||||
// result is passed as a `Result` or else the same process repeats again and if all of the
|
||||
// connections in the pool result in connection drop error then a custom pool error is
|
||||
// returned.
|
||||
loop {
|
||||
match result {
|
||||
Err(error) => match error.is_connection_dropped() {
|
||||
true => {
|
||||
self.current_connection += 1;
|
||||
if self.current_connection == self.pool_size {
|
||||
return Err(Report::new(
|
||||
PoolError::PoolExhaustionWithConnectionDropError,
|
||||
));
|
||||
}
|
||||
result = self.connection_pool[self.current_connection as usize]
|
||||
.set_ex(hashed_url_string, json_results, 60)
|
||||
.await;
|
||||
continue;
|
||||
}
|
||||
false => return Err(Report::new(PoolError::RedisError(error))),
|
||||
},
|
||||
Ok(_) => return Ok(()),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
40
src/cache/error.rs
vendored
Normal file
40
src/cache/error.rs
vendored
Normal file
@ -0,0 +1,40 @@
|
||||
//! This module provides the error enum to handle different errors associated while requesting data from
|
||||
//! the redis server using an async connection pool.
|
||||
use std::fmt;
|
||||
|
||||
use redis::RedisError;
|
||||
|
||||
/// A custom error type used for handling redis async pool associated errors.
|
||||
///
|
||||
/// This enum provides variants three different categories of errors:
|
||||
/// * `RedisError` - This variant handles all errors related to `RedisError`,
|
||||
/// * `PoolExhaustionWithConnectionDropError` - This variant handles the error
|
||||
/// which occurs when all the connections in the connection pool return a connection
|
||||
/// dropped redis error.
|
||||
#[derive(Debug)]
|
||||
pub enum PoolError {
|
||||
RedisError(RedisError),
|
||||
PoolExhaustionWithConnectionDropError,
|
||||
}
|
||||
|
||||
impl fmt::Display for PoolError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
PoolError::RedisError(redis_error) => {
|
||||
if let Some(detail) = redis_error.detail() {
|
||||
write!(f, "{}", detail)
|
||||
} else {
|
||||
write!(f, "")
|
||||
}
|
||||
}
|
||||
PoolError::PoolExhaustionWithConnectionDropError => {
|
||||
write!(
|
||||
f,
|
||||
"Error all connections from the pool dropped with connection error"
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl error_stack::Context for PoolError {}
|
1
src/cache/mod.rs
vendored
1
src/cache/mod.rs
vendored
@ -1 +1,2 @@
|
||||
pub mod cacher;
|
||||
pub mod error;
|
||||
|
@ -5,7 +5,7 @@ use crate::handler::paths::{file_path, FileType};
|
||||
|
||||
use super::parser_models::Style;
|
||||
use log::LevelFilter;
|
||||
use rlua::Lua;
|
||||
use mlua::Lua;
|
||||
use std::{collections::HashMap, fs, thread::available_parallelism};
|
||||
|
||||
/// A named struct which stores the parsed config file options.
|
||||
@ -63,53 +63,53 @@ impl Config {
|
||||
/// or io error if the config.lua file doesn't exists otherwise it returns a newly constructed
|
||||
/// Config struct with all the parsed config options from the parsed config file.
|
||||
pub fn parse(logging_initialized: bool) -> Result<Self, Box<dyn std::error::Error>> {
|
||||
Lua::new().context(|context| -> Result<Self, Box<dyn std::error::Error>> {
|
||||
let globals = context.globals();
|
||||
let lua = Lua::new();
|
||||
let globals = lua.globals();
|
||||
|
||||
context
|
||||
.load(&fs::read_to_string(file_path(FileType::Config)?)?)
|
||||
.exec()?;
|
||||
lua.load(&fs::read_to_string(file_path(FileType::Config)?)?)
|
||||
.exec()?;
|
||||
|
||||
let parsed_threads: u8 = globals.get::<_, u8>("threads")?;
|
||||
let parsed_threads: u8 = globals.get::<_, u8>("threads")?;
|
||||
|
||||
let debug: bool = globals.get::<_, bool>("debug")?;
|
||||
let logging:bool= globals.get::<_, bool>("logging")?;
|
||||
let debug: bool = globals.get::<_, bool>("debug")?;
|
||||
let logging: bool = globals.get::<_, bool>("logging")?;
|
||||
|
||||
if !logging_initialized {
|
||||
set_logging_level(debug, logging);
|
||||
}
|
||||
if !logging_initialized {
|
||||
set_logging_level(debug, logging);
|
||||
}
|
||||
|
||||
let threads: u8 = if parsed_threads == 0 {
|
||||
let total_num_of_threads: usize = available_parallelism()?.get() / 2;
|
||||
log::error!("Config Error: The value of `threads` option should be a non zero positive integer");
|
||||
log::error!("Falling back to using {} threads", total_num_of_threads);
|
||||
total_num_of_threads as u8
|
||||
} else {
|
||||
parsed_threads
|
||||
};
|
||||
let threads: u8 = if parsed_threads == 0 {
|
||||
let total_num_of_threads: usize = available_parallelism()?.get() / 2;
|
||||
log::error!(
|
||||
"Config Error: The value of `threads` option should be a non zero positive integer"
|
||||
);
|
||||
log::error!("Falling back to using {} threads", total_num_of_threads);
|
||||
total_num_of_threads as u8
|
||||
} else {
|
||||
parsed_threads
|
||||
};
|
||||
|
||||
Ok(Config {
|
||||
port: globals.get::<_, u16>("port")?,
|
||||
binding_ip: globals.get::<_, String>("binding_ip")?,
|
||||
style: Style::new(
|
||||
globals.get::<_, String>("theme")?,
|
||||
globals.get::<_, String>("colorscheme")?,
|
||||
),
|
||||
redis_url: globals.get::<_, String>("redis_url")?,
|
||||
aggregator: AggregatorConfig {
|
||||
random_delay: globals.get::<_, bool>("production_use")?,
|
||||
},
|
||||
logging,
|
||||
debug,
|
||||
upstream_search_engines: globals
|
||||
.get::<_, HashMap<String, bool>>("upstream_search_engines")?
|
||||
.into_iter()
|
||||
.filter_map(|(key, value)| value.then_some(key))
|
||||
.filter_map(|engine| crate::engines::engine_models::EngineHandler::new(&engine))
|
||||
.collect(),
|
||||
request_timeout: globals.get::<_, u8>("request_timeout")?,
|
||||
threads,
|
||||
})
|
||||
Ok(Config {
|
||||
port: globals.get::<_, u16>("port")?,
|
||||
binding_ip: globals.get::<_, String>("binding_ip")?,
|
||||
style: Style::new(
|
||||
globals.get::<_, String>("theme")?,
|
||||
globals.get::<_, String>("colorscheme")?,
|
||||
),
|
||||
redis_url: globals.get::<_, String>("redis_url")?,
|
||||
aggregator: AggregatorConfig {
|
||||
random_delay: globals.get::<_, bool>("production_use")?,
|
||||
},
|
||||
logging,
|
||||
debug,
|
||||
upstream_search_engines: globals
|
||||
.get::<_, HashMap<String, bool>>("upstream_search_engines")?
|
||||
.into_iter()
|
||||
.filter_map(|(key, value)| value.then_some(key))
|
||||
.filter_map(|engine| crate::engines::engine_models::EngineHandler::new(&engine))
|
||||
.collect(),
|
||||
request_timeout: globals.get::<_, u8>("request_timeout")?,
|
||||
threads,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -18,7 +18,7 @@ use serde::{Deserialize, Serialize};
|
||||
/// * `theme` - It stores the parsed theme option used to set a theme for the website.
|
||||
/// * `colorscheme` - It stores the parsed colorscheme option used to set a colorscheme for the
|
||||
/// theme being used.
|
||||
#[derive(Serialize, Deserialize, Clone)]
|
||||
#[derive(Serialize, Deserialize, Clone, Default)]
|
||||
pub struct Style {
|
||||
pub theme: String,
|
||||
pub colorscheme: String,
|
||||
|
@ -4,6 +4,7 @@
|
||||
use std::collections::HashMap;
|
||||
use std::io::Error;
|
||||
use std::path::Path;
|
||||
use std::sync::OnceLock;
|
||||
|
||||
// ------- Constants --------
|
||||
static PUBLIC_DIRECTORY_NAME: &str = "public";
|
||||
@ -20,57 +21,7 @@ pub enum FileType {
|
||||
Theme,
|
||||
}
|
||||
|
||||
static FILE_PATHS_FOR_DIFF_FILE_TYPES: once_cell::sync::Lazy<HashMap<FileType, Vec<String>>> =
|
||||
once_cell::sync::Lazy::new(|| {
|
||||
HashMap::from([
|
||||
(
|
||||
FileType::Config,
|
||||
vec![
|
||||
format!(
|
||||
"{}/.config/{}/{}",
|
||||
std::env::var("HOME").unwrap(),
|
||||
COMMON_DIRECTORY_NAME,
|
||||
CONFIG_FILE_NAME
|
||||
),
|
||||
format!("/etc/xdg/{}/{}", COMMON_DIRECTORY_NAME, CONFIG_FILE_NAME),
|
||||
format!("./{}/{}", COMMON_DIRECTORY_NAME, CONFIG_FILE_NAME),
|
||||
],
|
||||
),
|
||||
(
|
||||
FileType::Theme,
|
||||
vec![
|
||||
format!("/opt/websurfx/{}/", PUBLIC_DIRECTORY_NAME),
|
||||
format!("./{}/", PUBLIC_DIRECTORY_NAME),
|
||||
],
|
||||
),
|
||||
(
|
||||
FileType::AllowList,
|
||||
vec![
|
||||
format!(
|
||||
"{}/.config/{}/{}",
|
||||
std::env::var("HOME").unwrap(),
|
||||
COMMON_DIRECTORY_NAME,
|
||||
ALLOWLIST_FILE_NAME
|
||||
),
|
||||
format!("/etc/xdg/{}/{}", COMMON_DIRECTORY_NAME, ALLOWLIST_FILE_NAME),
|
||||
format!("./{}/{}", COMMON_DIRECTORY_NAME, ALLOWLIST_FILE_NAME),
|
||||
],
|
||||
),
|
||||
(
|
||||
FileType::BlockList,
|
||||
vec![
|
||||
format!(
|
||||
"{}/.config/{}/{}",
|
||||
std::env::var("HOME").unwrap(),
|
||||
COMMON_DIRECTORY_NAME,
|
||||
BLOCKLIST_FILE_NAME
|
||||
),
|
||||
format!("/etc/xdg/{}/{}", COMMON_DIRECTORY_NAME, BLOCKLIST_FILE_NAME),
|
||||
format!("./{}/{}", COMMON_DIRECTORY_NAME, BLOCKLIST_FILE_NAME),
|
||||
],
|
||||
),
|
||||
])
|
||||
});
|
||||
static FILE_PATHS_FOR_DIFF_FILE_TYPES: OnceLock<HashMap<FileType, Vec<String>>> = OnceLock::new();
|
||||
|
||||
/// A helper function which returns an appropriate config file path checking if the config
|
||||
/// file exists on that path.
|
||||
@ -95,11 +46,64 @@ static FILE_PATHS_FOR_DIFF_FILE_TYPES: once_cell::sync::Lazy<HashMap<FileType, V
|
||||
/// 1. `/opt/websurfx` if it not present here then it fallbacks to the next one (2)
|
||||
/// 2. Under project folder ( or codebase in other words) if it is not present
|
||||
/// here then it returns an error as mentioned above.
|
||||
pub fn file_path(file_type: FileType) -> Result<String, Error> {
|
||||
let file_path = FILE_PATHS_FOR_DIFF_FILE_TYPES.get(&file_type).unwrap();
|
||||
pub fn file_path(file_type: FileType) -> Result<&'static str, Error> {
|
||||
let file_path: &Vec<String> = FILE_PATHS_FOR_DIFF_FILE_TYPES
|
||||
.get_or_init(|| {
|
||||
HashMap::from([
|
||||
(
|
||||
FileType::Config,
|
||||
vec![
|
||||
format!(
|
||||
"{}/.config/{}/{}",
|
||||
std::env::var("HOME").unwrap(),
|
||||
COMMON_DIRECTORY_NAME,
|
||||
CONFIG_FILE_NAME
|
||||
),
|
||||
format!("/etc/xdg/{}/{}", COMMON_DIRECTORY_NAME, CONFIG_FILE_NAME),
|
||||
format!("./{}/{}", COMMON_DIRECTORY_NAME, CONFIG_FILE_NAME),
|
||||
],
|
||||
),
|
||||
(
|
||||
FileType::Theme,
|
||||
vec![
|
||||
format!("/opt/websurfx/{}/", PUBLIC_DIRECTORY_NAME),
|
||||
format!("./{}/", PUBLIC_DIRECTORY_NAME),
|
||||
],
|
||||
),
|
||||
(
|
||||
FileType::AllowList,
|
||||
vec![
|
||||
format!(
|
||||
"{}/.config/{}/{}",
|
||||
std::env::var("HOME").unwrap(),
|
||||
COMMON_DIRECTORY_NAME,
|
||||
ALLOWLIST_FILE_NAME
|
||||
),
|
||||
format!("/etc/xdg/{}/{}", COMMON_DIRECTORY_NAME, ALLOWLIST_FILE_NAME),
|
||||
format!("./{}/{}", COMMON_DIRECTORY_NAME, ALLOWLIST_FILE_NAME),
|
||||
],
|
||||
),
|
||||
(
|
||||
FileType::BlockList,
|
||||
vec![
|
||||
format!(
|
||||
"{}/.config/{}/{}",
|
||||
std::env::var("HOME").unwrap(),
|
||||
COMMON_DIRECTORY_NAME,
|
||||
BLOCKLIST_FILE_NAME
|
||||
),
|
||||
format!("/etc/xdg/{}/{}", COMMON_DIRECTORY_NAME, BLOCKLIST_FILE_NAME),
|
||||
format!("./{}/{}", COMMON_DIRECTORY_NAME, BLOCKLIST_FILE_NAME),
|
||||
],
|
||||
),
|
||||
])
|
||||
})
|
||||
.get(&file_type)
|
||||
.unwrap();
|
||||
|
||||
for (idx, _) in file_path.iter().enumerate() {
|
||||
if Path::new(file_path[idx].as_str()).exists() {
|
||||
return Ok(file_path[idx].clone());
|
||||
return Ok(std::mem::take(&mut &*file_path[idx]));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -42,7 +42,7 @@ use handler::paths::{file_path, FileType};
|
||||
pub fn run(listener: TcpListener, config: Config) -> std::io::Result<Server> {
|
||||
let mut handlebars: Handlebars = Handlebars::new();
|
||||
|
||||
let public_folder_path: String = file_path(FileType::Theme)?;
|
||||
let public_folder_path: &str = file_path(FileType::Theme)?;
|
||||
|
||||
handlebars
|
||||
.register_templates_directory(".html", format!("{}/templates", public_folder_path))
|
||||
|
@ -71,7 +71,7 @@ pub async fn aggregate(
|
||||
upstream_search_engines: Vec<EngineHandler>,
|
||||
request_timeout: u8,
|
||||
) -> Result<SearchResults, Box<dyn std::error::Error>> {
|
||||
let user_agent: String = random_user_agent();
|
||||
let user_agent: &str = random_user_agent();
|
||||
|
||||
// Add a random delay before making the request.
|
||||
if random_delay || !debug {
|
||||
@ -89,10 +89,9 @@ pub async fn aggregate(
|
||||
let (name, search_engine) = engine_handler.into_name_engine();
|
||||
names.push(name);
|
||||
let query: String = query.clone();
|
||||
let user_agent: String = user_agent.clone();
|
||||
tasks.push(tokio::spawn(async move {
|
||||
search_engine
|
||||
.results(query, page, user_agent.clone(), request_timeout)
|
||||
.results(query, page, user_agent.to_owned(), request_timeout)
|
||||
.await
|
||||
}));
|
||||
}
|
||||
@ -155,13 +154,13 @@ pub async fn aggregate(
|
||||
filter_with_lists(
|
||||
&mut result_map,
|
||||
&mut blacklist_map,
|
||||
&file_path(FileType::BlockList)?,
|
||||
file_path(FileType::BlockList)?,
|
||||
)?;
|
||||
|
||||
filter_with_lists(
|
||||
&mut blacklist_map,
|
||||
&mut result_map,
|
||||
&file_path(FileType::AllowList)?,
|
||||
file_path(FileType::AllowList)?,
|
||||
)?;
|
||||
|
||||
drop(blacklist_map);
|
||||
|
@ -1,28 +1,32 @@
|
||||
//! This module provides the functionality to generate random user agent string.
|
||||
|
||||
use std::sync::OnceLock;
|
||||
|
||||
use fake_useragent::{Browsers, UserAgents, UserAgentsBuilder};
|
||||
|
||||
static USER_AGENTS: once_cell::sync::Lazy<UserAgents> = once_cell::sync::Lazy::new(|| {
|
||||
UserAgentsBuilder::new()
|
||||
.cache(false)
|
||||
.dir("/tmp")
|
||||
.thread(1)
|
||||
.set_browsers(
|
||||
Browsers::new()
|
||||
.set_chrome()
|
||||
.set_safari()
|
||||
.set_edge()
|
||||
.set_firefox()
|
||||
.set_mozilla(),
|
||||
)
|
||||
.build()
|
||||
});
|
||||
static USER_AGENTS: OnceLock<UserAgents> = OnceLock::new();
|
||||
|
||||
/// A function to generate random user agent to improve privacy of the user.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A randomly generated user agent string.
|
||||
pub fn random_user_agent() -> String {
|
||||
USER_AGENTS.random().to_string()
|
||||
pub fn random_user_agent() -> &'static str {
|
||||
USER_AGENTS
|
||||
.get_or_init(|| {
|
||||
UserAgentsBuilder::new()
|
||||
.cache(false)
|
||||
.dir("/tmp")
|
||||
.thread(1)
|
||||
.set_browsers(
|
||||
Browsers::new()
|
||||
.set_chrome()
|
||||
.set_safari()
|
||||
.set_edge()
|
||||
.set_firefox()
|
||||
.set_mozilla(),
|
||||
)
|
||||
.build()
|
||||
})
|
||||
.random()
|
||||
}
|
||||
|
@ -159,9 +159,9 @@ async fn results(
|
||||
req: HttpRequest,
|
||||
) -> Result<SearchResults, Box<dyn std::error::Error>> {
|
||||
//Initialize redis cache connection struct
|
||||
let mut redis_cache = RedisCache::new(config.redis_url.clone())?;
|
||||
let mut redis_cache = RedisCache::new(&config.redis_url, 5).await?;
|
||||
// fetch the cached results json.
|
||||
let cached_results_json = redis_cache.cached_json(&url);
|
||||
let cached_results_json = redis_cache.cached_json(&url).await;
|
||||
// check if fetched cache results was indeed fetched or it was an error and if so
|
||||
// handle the data accordingly.
|
||||
match cached_results_json {
|
||||
@ -206,7 +206,9 @@ async fn results(
|
||||
}
|
||||
};
|
||||
results.add_style(config.style.clone());
|
||||
redis_cache.cache_results(serde_json::to_string(&results)?, &url)?;
|
||||
redis_cache
|
||||
.cache_results(&serde_json::to_string(&results)?, &url)
|
||||
.await?;
|
||||
Ok(results)
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user