0
0
mirror of https://github.com/neon-mmd/websurfx.git synced 2024-10-18 06:22:53 -04:00

Merge branch 'rolling' into feat-rate-limiter-for-websurfx

This commit is contained in:
alamin655 2023-09-10 20:56:21 +05:30 committed by GitHub
commit 86991a2f9a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
21 changed files with 571 additions and 329 deletions

1
.gitignore vendored
View File

@ -4,3 +4,4 @@ package-lock.json
dump.rdb dump.rdb
.vscode .vscode
megalinter-reports/ megalinter-reports/
dhat-heap.json

171
Cargo.lock generated
View File

@ -300,12 +300,24 @@ version = "1.0.75"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a4668cab20f66d8d020e1fbc0ebe47217433c1b6c8f2040faf858554e394ace6" checksum = "a4668cab20f66d8d020e1fbc0ebe47217433c1b6c8f2040faf858554e394ace6"
[[package]]
name = "arc-swap"
version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bddcadddf5e9015d310179a59bb28c4d4b9920ad0f11e8e14dbadf654890c9a6"
[[package]] [[package]]
name = "askama_escape" name = "askama_escape"
version = "0.10.3" version = "0.10.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "619743e34b5ba4e9703bba34deac3427c72507c7159f5fd030aea8cac0cfe341" checksum = "619743e34b5ba4e9703bba34deac3427c72507c7159f5fd030aea8cac0cfe341"
[[package]]
name = "async-once-cell"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9338790e78aa95a416786ec8389546c4b6a1dfc3dc36071ed9518a9413a542eb"
[[package]] [[package]]
name = "async-trait" name = "async-trait"
version = "0.1.73" version = "0.1.73"
@ -571,7 +583,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "35ed6e9d84f0b51a7f52daf1c7d71dd136fd7a3f41a8462b8cdb8c78d920fad4" checksum = "35ed6e9d84f0b51a7f52daf1c7d71dd136fd7a3f41a8462b8cdb8c78d920fad4"
dependencies = [ dependencies = [
"bytes 1.4.0", "bytes 1.4.0",
"futures-core",
"memchr", "memchr",
"pin-project-lite",
"tokio 1.32.0",
"tokio-util",
] ]
[[package]] [[package]]
@ -845,6 +861,22 @@ dependencies = [
"syn 1.0.109", "syn 1.0.109",
] ]
[[package]]
name = "dhat"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4f2aaf837aaf456f6706cb46386ba8dffd4013a757e36f4ea05c20dd46b209a3"
dependencies = [
"backtrace",
"lazy_static",
"mintex",
"parking_lot 0.12.1",
"rustc-hash",
"serde",
"serde_json",
"thousands",
]
[[package]] [[package]]
name = "digest" name = "digest"
version = "0.10.7" version = "0.10.7"
@ -1630,6 +1662,16 @@ version = "0.2.147"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3" checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3"
[[package]]
name = "libmimalloc-sys"
version = "0.1.34"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "25d058a81af0d1c22d7a1c948576bee6d673f7af3c0f35564abd6c81122f513d"
dependencies = [
"cc",
"libc",
]
[[package]] [[package]]
name = "linux-raw-sys" name = "linux-raw-sys"
version = "0.4.5" version = "0.4.5"
@ -1767,6 +1809,15 @@ dependencies = [
"autocfg 1.1.0", "autocfg 1.1.0",
] ]
[[package]]
name = "mimalloc"
version = "0.1.38"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "972e5f23f6716f62665760b0f4cbf592576a80c7b879ba9beaafc0e558894127"
dependencies = [
"libmimalloc-sys",
]
[[package]] [[package]]
name = "mime" name = "mime"
version = "0.3.17" version = "0.3.17"
@ -1792,6 +1843,16 @@ dependencies = [
"adler", "adler",
] ]
[[package]]
name = "mintex"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fd7c5ba1c3b5a23418d7bbf98c71c3d4946a0125002129231da8d6b723d559cb"
dependencies = [
"once_cell",
"sys-info",
]
[[package]] [[package]]
name = "mio" name = "mio"
version = "0.6.23" version = "0.6.23"
@ -1835,6 +1896,20 @@ dependencies = [
"ws2_32-sys", "ws2_32-sys",
] ]
[[package]]
name = "mlua"
version = "0.8.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0bb37b0ba91f017aa7ca2b98ef99496827770cd635b4a932a6047c5b4bbe678e"
dependencies = [
"bstr",
"cc",
"num-traits",
"once_cell",
"pkg-config",
"rustc-hash",
]
[[package]] [[package]]
name = "native-tls" name = "native-tls"
version = "0.2.11" version = "0.2.11"
@ -2202,6 +2277,26 @@ dependencies = [
"siphasher 0.3.11", "siphasher 0.3.11",
] ]
[[package]]
name = "pin-project"
version = "1.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fda4ed1c6c173e3fc7a83629421152e01d7b1f9b7f65fb301e490e8cfc656422"
dependencies = [
"pin-project-internal",
]
[[package]]
name = "pin-project-internal"
version = "1.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405"
dependencies = [
"proc-macro2 1.0.66",
"quote 1.0.33",
"syn 2.0.29",
]
[[package]] [[package]]
name = "pin-project-lite" name = "pin-project-lite"
version = "0.2.13" version = "0.2.13"
@ -2504,12 +2599,21 @@ version = "0.23.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4f49cdc0bb3f412bf8e7d1bd90fe1d9eb10bc5c399ba90973c14662a27b3f8ba" checksum = "4f49cdc0bb3f412bf8e7d1bd90fe1d9eb10bc5c399ba90973c14662a27b3f8ba"
dependencies = [ dependencies = [
"arc-swap",
"async-trait",
"bytes 1.4.0",
"combine", "combine",
"futures 0.3.28",
"futures-util",
"itoa 1.0.9", "itoa 1.0.9",
"percent-encoding 2.3.0", "percent-encoding 2.3.0",
"pin-project-lite",
"ryu", "ryu",
"sha1_smol", "sha1_smol",
"socket2 0.4.9", "socket2 0.4.9",
"tokio 1.32.0",
"tokio-retry",
"tokio-util",
"url 2.4.1", "url 2.4.1",
] ]
@ -2628,36 +2732,18 @@ dependencies = [
"winreg 0.50.0", "winreg 0.50.0",
] ]
[[package]]
name = "rlua"
version = "0.19.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5d33e5ba15c3d43178f283ed5863d4531e292fc0e56fb773f3bea45f18e3a42a"
dependencies = [
"bitflags 1.3.2",
"bstr",
"libc",
"num-traits",
"rlua-lua54-sys",
]
[[package]]
name = "rlua-lua54-sys"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7aafabafe1895cb4a2be81a56d7ff3d46bf4b5d2f9cfdbea2ed404cdabe96474"
dependencies = [
"cc",
"libc",
"pkg-config",
]
[[package]] [[package]]
name = "rustc-demangle" name = "rustc-demangle"
version = "0.1.23" version = "0.1.23"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76" checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76"
[[package]]
name = "rustc-hash"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
[[package]] [[package]]
name = "rustc_version" name = "rustc_version"
version = "0.2.3" version = "0.2.3"
@ -2957,6 +3043,9 @@ name = "smallvec"
version = "1.11.0" version = "1.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "62bb4feee49fdd9f707ef802e22365a35de4b7b299de4763d44bfea899442ff9" checksum = "62bb4feee49fdd9f707ef802e22365a35de4b7b299de4763d44bfea899442ff9"
dependencies = [
"serde",
]
[[package]] [[package]]
name = "socket2" name = "socket2"
@ -3098,6 +3187,16 @@ dependencies = [
"unicode-xid 0.2.4", "unicode-xid 0.2.4",
] ]
[[package]]
name = "sys-info"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b3a0d0aba8bf96a0e1ddfdc352fc53b3df7f39318c71854910c3c4b024ae52c"
dependencies = [
"cc",
"libc",
]
[[package]] [[package]]
name = "tempfile" name = "tempfile"
version = "3.8.0" version = "3.8.0"
@ -3151,6 +3250,12 @@ dependencies = [
"syn 2.0.29", "syn 2.0.29",
] ]
[[package]]
name = "thousands"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3bf63baf9f5039dadc247375c29eb13706706cfde997d0330d05aa63a77d8820"
[[package]] [[package]]
name = "time" name = "time"
version = "0.1.45" version = "0.1.45"
@ -3335,6 +3440,17 @@ dependencies = [
"tokio-sync", "tokio-sync",
] ]
[[package]]
name = "tokio-retry"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f57eb36ecbe0fc510036adff84824dd3c24bb781e21bfa67b69d556aa85214f"
dependencies = [
"pin-project",
"rand 0.8.5",
"tokio 1.32.0",
]
[[package]] [[package]]
name = "tokio-sync" name = "tokio-sync"
version = "0.1.8" version = "0.1.8"
@ -3688,24 +3804,29 @@ dependencies = [
"actix-files", "actix-files",
"actix-governor", "actix-governor",
"actix-web", "actix-web",
"async-once-cell",
"async-trait", "async-trait",
"criterion", "criterion",
"dhat",
"env_logger", "env_logger",
"error-stack", "error-stack",
"fake-useragent", "fake-useragent",
"futures 0.3.28",
"handlebars", "handlebars",
"log", "log",
"md5", "md5",
"mimalloc",
"mlua",
"once_cell", "once_cell",
"rand 0.8.5", "rand 0.8.5",
"redis", "redis",
"regex", "regex",
"reqwest 0.11.20", "reqwest 0.11.20",
"rlua",
"rusty-hook", "rusty-hook",
"scraper", "scraper",
"serde", "serde",
"serde_json", "serde_json",
"smallvec 1.11.0",
"tempfile", "tempfile",
"tokio 1.32.0", "tokio 1.32.0",
] ]

View File

@ -8,7 +8,7 @@ license = "AGPL-3.0"
[dependencies] [dependencies]
reqwest = {version="0.11.20",features=["json"]} reqwest = {version="0.11.20",features=["json"]}
tokio = {version="1.32.0",features=["full"]} tokio = {version="1.32.0",features=["rt-multi-thread","macros"]}
serde = {version="1.0.188",features=["derive"]} serde = {version="1.0.188",features=["derive"]}
handlebars = { version = "4.4.0", features = ["dir_source"] } handlebars = { version = "4.4.0", features = ["dir_source"] }
scraper = {version="0.17.1"} scraper = {version="0.17.1"}
@ -48,13 +48,17 @@ rpath = false
[profile.release] [profile.release]
opt-level = 3 opt-level = 3
debug = false debug = false # This should only be commented when testing with dhat profiler
# debug = 1 # This should only be uncommented when testing with dhat profiler
split-debuginfo = '...' split-debuginfo = '...'
debug-assertions = false debug-assertions = false
overflow-checks = false overflow-checks = false
lto = 'thin' lto = true
panic = 'abort' panic = 'abort'
incremental = false incremental = false
codegen-units = 16 codegen-units = 1
rpath = false rpath = false
strip = "debuginfo" strip = "debuginfo"
[features]
dhat-heap = ["dep:dhat"]

View File

@ -19,7 +19,7 @@ COPY . .
RUN cargo install --path . RUN cargo install --path .
# We do not need the Rust toolchain to run the binary! # We do not need the Rust toolchain to run the binary!
FROM gcr.io/distroless/cc-debian11 FROM gcr.io/distroless/cc-debian12
COPY --from=builder /app/public/ /opt/websurfx/public/ COPY --from=builder /app/public/ /opt/websurfx/public/
COPY --from=builder /app/websurfx/config.lua /etc/xdg/websurfx/config.lua COPY --from=builder /app/websurfx/config.lua /etc/xdg/websurfx/config.lua
COPY --from=builder /usr/local/cargo/bin/* /usr/local/bin/ COPY --from=builder /usr/local/cargo/bin/* /usr/local/bin/

View File

@ -5,7 +5,7 @@
<b align="center"><a href="README.md">Readme</a></b> | <b align="center"><a href="README.md">Readme</a></b> |
<b><a href="https://discord.gg/SWnda7Mw5u">Discord</a></b> | <b><a href="https://discord.gg/SWnda7Mw5u">Discord</a></b> |
<b><a href="https://github.com/neon-mmd/websurfx">GitHub</a></b> | <b><a href="https://github.com/neon-mmd/websurfx">GitHub</a></b> |
<b><a href="./docs/README.md">Documentation</a></b> <b><a href="../../tree/HEAD/docs/">Documentation</a></b>
<br /><br /> <br /><br />
<a href="#"> <a href="#">
<img <img

View File

@ -109,7 +109,7 @@ colorscheme = "catppuccin-mocha" -- the colorscheme name which should be used fo
theme = "simple" -- the theme name which should be used for the website theme = "simple" -- the theme name which should be used for the website
-- ### Caching ### -- ### Caching ###
redis_connection_url = "redis://redis:6379" -- redis connection url address on which the client should connect on. redis_url = "redis://redis:6379" -- redis connection url address on which the client should connect on.
-- ### Search Engines ### -- ### Search Engines ###
upstream_search_engines = { DuckDuckGo = true, Searx = false } -- select the upstream search engines from which the results should be fetched. upstream_search_engines = { DuckDuckGo = true, Searx = false } -- select the upstream search engines from which the results should be fetched.

View File

@ -3,9 +3,19 @@
//! This module contains the main function which handles the logging of the application to the //! This module contains the main function which handles the logging of the application to the
//! stdout and handles the command line arguments provided and launches the `websurfx` server. //! stdout and handles the command line arguments provided and launches the `websurfx` server.
use mimalloc::MiMalloc;
use std::net::TcpListener; use std::net::TcpListener;
use websurfx::{config::parser::Config, run}; use websurfx::{config::parser::Config, run};
/// A dhat heap memory profiler
#[cfg(feature = "dhat-heap")]
#[global_allocator]
static ALLOC: dhat::Alloc = dhat::Alloc;
#[cfg(not(feature = "dhat-heap"))]
#[global_allocator]
static GLOBAL: MiMalloc = MiMalloc;
/// The function that launches the main server and registers all the routes of the website. /// The function that launches the main server and registers all the routes of the website.
/// ///
/// # Error /// # Error
@ -14,6 +24,10 @@ use websurfx::{config::parser::Config, run};
/// available for being used for other applications. /// available for being used for other applications.
#[actix_web::main] #[actix_web::main]
async fn main() -> std::io::Result<()> { async fn main() -> std::io::Result<()> {
// A dhat heap profiler initialization.
#[cfg(feature = "dhat-heap")]
let _profiler = dhat::Profiler::new_heap();
// Initialize the parsed config file. // Initialize the parsed config file.
let config = Config::parse(false).unwrap(); let config = Config::parse(false).unwrap();

127
src/cache/cacher.rs vendored
View File

@ -1,17 +1,27 @@
//! This module provides the functionality to cache the aggregated results fetched and aggregated //! This module provides the functionality to cache the aggregated results fetched and aggregated
//! from the upstream search engines in a json format. //! from the upstream search engines in a json format.
use error_stack::Report;
use futures::future::try_join_all;
use md5::compute; use md5::compute;
use redis::{Client, Commands, Connection}; use redis::{aio::ConnectionManager, AsyncCommands, Client, RedisError};
use super::error::PoolError;
/// A named struct which stores the redis Connection url address to which the client will /// A named struct which stores the redis Connection url address to which the client will
/// connect to. /// connect to.
/// ///
/// # Fields /// # Fields
/// ///
/// * `redis_connection_url` - It stores the redis Connection url address. /// * `connection_pool` - It stores a pool of connections ready to be used.
/// * `pool_size` - It stores the size of the connection pool (in other words the number of
/// connections that should be stored in the pool).
/// * `current_connection` - It stores the index of which connection is being used at the moment.
#[derive(Clone)]
pub struct RedisCache { pub struct RedisCache {
connection: Connection, connection_pool: Vec<ConnectionManager>,
pool_size: u8,
current_connection: u8,
} }
impl RedisCache { impl RedisCache {
@ -19,11 +29,25 @@ impl RedisCache {
/// ///
/// # Arguments /// # Arguments
/// ///
/// * `redis_connection_url` - It stores the redis Connection url address. /// * `redis_connection_url` - It takes the redis Connection url address.
pub fn new(redis_connection_url: String) -> Result<Self, Box<dyn std::error::Error>> { /// * `pool_size` - It takes the size of the connection pool (in other words the number of
/// connections that should be stored in the pool).
pub async fn new(
redis_connection_url: &str,
pool_size: u8,
) -> Result<Self, Box<dyn std::error::Error>> {
let client = Client::open(redis_connection_url)?; let client = Client::open(redis_connection_url)?;
let connection = client.get_connection()?; let mut tasks: Vec<_> = Vec::new();
let redis_cache = RedisCache { connection };
for _ in 0..pool_size {
tasks.push(client.get_tokio_connection_manager());
}
let redis_cache = RedisCache {
connection_pool: try_join_all(tasks).await?,
pool_size,
current_connection: Default::default(),
};
Ok(redis_cache) Ok(redis_cache)
} }
@ -32,7 +56,7 @@ impl RedisCache {
/// # Arguments /// # Arguments
/// ///
/// * `url` - It takes an url as string. /// * `url` - It takes an url as string.
fn hash_url(url: &str) -> String { fn hash_url(&self, url: &str) -> String {
format!("{:?}", compute(url)) format!("{:?}", compute(url))
} }
@ -41,9 +65,42 @@ impl RedisCache {
/// # Arguments /// # Arguments
/// ///
/// * `url` - It takes an url as a string. /// * `url` - It takes an url as a string.
pub fn cached_json(&mut self, url: &str) -> Result<String, Box<dyn std::error::Error>> { pub async fn cached_json(&mut self, url: &str) -> Result<String, Report<PoolError>> {
let hashed_url_string = Self::hash_url(url); self.current_connection = Default::default();
Ok(self.connection.get(hashed_url_string)?) let hashed_url_string: &str = &self.hash_url(url);
let mut result: Result<String, RedisError> = self.connection_pool
[self.current_connection as usize]
.get(hashed_url_string)
.await;
// Code to check whether the current connection being used is dropped with connection error
// or not. if it drops with the connection error then the current connection is replaced
// with a new connection from the pool which is then used to run the redis command then
// that connection is also checked whether it is dropped or not if it is not then the
// result is passed as a `Result` or else the same process repeats again and if all of the
// connections in the pool result in connection drop error then a custom pool error is
// returned.
loop {
match result {
Err(error) => match error.is_connection_dropped() {
true => {
self.current_connection += 1;
if self.current_connection == self.pool_size {
return Err(Report::new(
PoolError::PoolExhaustionWithConnectionDropError,
));
}
result = self.connection_pool[self.current_connection as usize]
.get(hashed_url_string)
.await;
continue;
}
false => return Err(Report::new(PoolError::RedisError(error))),
},
Ok(res) => return Ok(res),
}
}
} }
/// A function which caches the results by using the hashed `url` as the key and /// A function which caches the results by using the hashed `url` as the key and
@ -54,21 +111,45 @@ impl RedisCache {
/// ///
/// * `json_results` - It takes the json results string as an argument. /// * `json_results` - It takes the json results string as an argument.
/// * `url` - It takes the url as a String. /// * `url` - It takes the url as a String.
pub fn cache_results( pub async fn cache_results(
&mut self, &mut self,
json_results: String, json_results: &str,
url: &str, url: &str,
) -> Result<(), Box<dyn std::error::Error>> { ) -> Result<(), Report<PoolError>> {
let hashed_url_string = Self::hash_url(url); self.current_connection = Default::default();
let hashed_url_string: &str = &self.hash_url(url);
// put results_json into cache let mut result: Result<(), RedisError> = self.connection_pool
self.connection.set(&hashed_url_string, json_results)?; [self.current_connection as usize]
.set_ex(hashed_url_string, json_results, 60)
.await;
// Set the TTL for the key to 60 seconds // Code to check whether the current connection being used is dropped with connection error
self.connection // or not. if it drops with the connection error then the current connection is replaced
.expire::<String, u32>(hashed_url_string, 60) // with a new connection from the pool which is then used to run the redis command then
.unwrap(); // that connection is also checked whether it is dropped or not if it is not then the
// result is passed as a `Result` or else the same process repeats again and if all of the
Ok(()) // connections in the pool result in connection drop error then a custom pool error is
// returned.
loop {
match result {
Err(error) => match error.is_connection_dropped() {
true => {
self.current_connection += 1;
if self.current_connection == self.pool_size {
return Err(Report::new(
PoolError::PoolExhaustionWithConnectionDropError,
));
}
result = self.connection_pool[self.current_connection as usize]
.set_ex(hashed_url_string, json_results, 60)
.await;
continue;
}
false => return Err(Report::new(PoolError::RedisError(error))),
},
Ok(_) => return Ok(()),
}
}
} }
} }

40
src/cache/error.rs vendored Normal file
View File

@ -0,0 +1,40 @@
//! This module provides the error enum to handle different errors associated while requesting data from
//! the redis server using an async connection pool.
use std::fmt;
use redis::RedisError;
/// A custom error type used for handling redis async pool associated errors.
///
/// This enum provides variants three different categories of errors:
/// * `RedisError` - This variant handles all errors related to `RedisError`,
/// * `PoolExhaustionWithConnectionDropError` - This variant handles the error
/// which occurs when all the connections in the connection pool return a connection
/// dropped redis error.
#[derive(Debug)]
pub enum PoolError {
RedisError(RedisError),
PoolExhaustionWithConnectionDropError,
}
impl fmt::Display for PoolError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
PoolError::RedisError(redis_error) => {
if let Some(detail) = redis_error.detail() {
write!(f, "{}", detail)
} else {
write!(f, "")
}
}
PoolError::PoolExhaustionWithConnectionDropError => {
write!(
f,
"Error all connections from the pool dropped with connection error"
)
}
}
}
}
impl error_stack::Context for PoolError {}

1
src/cache/mod.rs vendored
View File

@ -1 +1,2 @@
pub mod cacher; pub mod cacher;
pub mod error;

View File

@ -5,7 +5,7 @@ use crate::handler::paths::{file_path, FileType};
use super::parser_models::{AggregatorConfig, RateLimiter, Style}; use super::parser_models::{AggregatorConfig, RateLimiter, Style};
use log::LevelFilter; use log::LevelFilter;
use rlua::Lua; use mlua::Lua;
use std::{collections::HashMap, fs, thread::available_parallelism}; use std::{collections::HashMap, fs, thread::available_parallelism};
/// A named struct which stores the parsed config file options. /// A named struct which stores the parsed config file options.
@ -53,11 +53,10 @@ impl Config {
/// or io error if the config.lua file doesn't exists otherwise it returns a newly constructed /// or io error if the config.lua file doesn't exists otherwise it returns a newly constructed
/// Config struct with all the parsed config options from the parsed config file. /// Config struct with all the parsed config options from the parsed config file.
pub fn parse(logging_initialized: bool) -> Result<Self, Box<dyn std::error::Error>> { pub fn parse(logging_initialized: bool) -> Result<Self, Box<dyn std::error::Error>> {
Lua::new().context(|context| -> Result<Self, Box<dyn std::error::Error>> { let lua = Lua::new();
let globals = context.globals(); let globals = lua.globals();
context lua.load(&fs::read_to_string(file_path(FileType::Config)?)?)
.load(&fs::read_to_string(file_path(FileType::Config)?)?)
.exec()?; .exec()?;
let parsed_threads: u8 = globals.get::<_, u8>("threads")?; let parsed_threads: u8 = globals.get::<_, u8>("threads")?;
@ -71,7 +70,9 @@ impl Config {
let threads: u8 = if parsed_threads == 0 { let threads: u8 = if parsed_threads == 0 {
let total_num_of_threads: usize = available_parallelism()?.get() / 2; let total_num_of_threads: usize = available_parallelism()?.get() / 2;
log::error!("Config Error: The value of `threads` option should be a non zero positive integer"); log::error!(
"Config Error: The value of `threads` option should be a non zero positive integer"
);
log::error!("Falling back to using {} threads", total_num_of_threads); log::error!("Falling back to using {} threads", total_num_of_threads);
total_num_of_threads as u8 total_num_of_threads as u8
} else { } else {

View File

@ -18,7 +18,7 @@ use serde::{Deserialize, Serialize};
/// * `theme` - It stores the parsed theme option used to set a theme for the website. /// * `theme` - It stores the parsed theme option used to set a theme for the website.
/// * `colorscheme` - It stores the parsed colorscheme option used to set a colorscheme for the /// * `colorscheme` - It stores the parsed colorscheme option used to set a colorscheme for the
/// theme being used. /// theme being used.
#[derive(Serialize, Deserialize, Clone)] #[derive(Serialize, Deserialize, Clone, Default)]
pub struct Style { pub struct Style {
pub theme: String, pub theme: String,
pub colorscheme: String, pub colorscheme: String,

View File

@ -4,14 +4,14 @@
use std::collections::HashMap; use std::collections::HashMap;
use reqwest::header::{HeaderMap, CONTENT_TYPE, COOKIE, REFERER, USER_AGENT}; use reqwest::header::HeaderMap;
use scraper::{Html, Selector}; use scraper::{Html, Selector};
use crate::results::aggregation_models::SearchResult; use crate::results::aggregation_models::SearchResult;
use super::engine_models::{EngineError, SearchEngine}; use super::engine_models::{EngineError, SearchEngine};
use error_stack::{IntoReport, Report, Result, ResultExt}; use error_stack::{Report, Result, ResultExt};
/// A new DuckDuckGo engine type defined in-order to implement the `SearchEngine` trait which allows to /// A new DuckDuckGo engine type defined in-order to implement the `SearchEngine` trait which allows to
/// reduce code duplication as well as allows to create vector of different search engines easily. /// reduce code duplication as well as allows to create vector of different search engines easily.
@ -39,9 +39,9 @@ impl SearchEngine for DuckDuckGo {
/// or HeaderMap fails to initialize. /// or HeaderMap fails to initialize.
async fn results( async fn results(
&self, &self,
query: String, query: &str,
page: u32, page: u32,
user_agent: String, user_agent: &str,
request_timeout: u8, request_timeout: u8,
) -> Result<HashMap<String, SearchResult>, EngineError> { ) -> Result<HashMap<String, SearchResult>, EngineError> {
// Page number can be missing or empty string and so appropriate handling is required // Page number can be missing or empty string and so appropriate handling is required
@ -61,38 +61,19 @@ impl SearchEngine for DuckDuckGo {
}; };
// initializing HeaderMap and adding appropriate headers. // initializing HeaderMap and adding appropriate headers.
let mut header_map = HeaderMap::new(); let header_map = HeaderMap::try_from(&HashMap::from([
header_map.insert( ("USER_AGENT".to_string(), user_agent.to_string()),
USER_AGENT, ("REFERER".to_string(), "https://google.com/".to_string()),
user_agent (
.parse() "CONTENT_TYPE".to_string(),
.into_report() "application/x-www-form-urlencoded".to_string(),
.change_context(EngineError::UnexpectedError)?, ),
); ("COOKIE".to_string(), "kl=wt-wt".to_string()),
header_map.insert( ]))
REFERER, .change_context(EngineError::UnexpectedError)?;
"https://google.com/"
.parse()
.into_report()
.change_context(EngineError::UnexpectedError)?,
);
header_map.insert(
CONTENT_TYPE,
"application/x-www-form-urlencoded"
.parse()
.into_report()
.change_context(EngineError::UnexpectedError)?,
);
header_map.insert(
COOKIE,
"kl=wt-wt"
.parse()
.into_report()
.change_context(EngineError::UnexpectedError)?,
);
let document: Html = Html::parse_document( let document: Html = Html::parse_document(
&DuckDuckGo::fetch_html_from_upstream(self, url, header_map, request_timeout).await?, &DuckDuckGo::fetch_html_from_upstream(self, &url, header_map, request_timeout).await?,
); );
let no_result: Selector = Selector::parse(".no-results") let no_result: Selector = Selector::parse(".no-results")
@ -126,8 +107,7 @@ impl SearchEngine for DuckDuckGo {
.next() .next()
.unwrap() .unwrap()
.inner_html() .inner_html()
.trim() .trim(),
.to_string(),
format!( format!(
"https://{}", "https://{}",
result result
@ -136,15 +116,15 @@ impl SearchEngine for DuckDuckGo {
.unwrap() .unwrap()
.inner_html() .inner_html()
.trim() .trim()
), )
.as_str(),
result result
.select(&result_desc) .select(&result_desc)
.next() .next()
.unwrap() .unwrap()
.inner_html() .inner_html()
.trim() .trim(),
.to_string(), &["duckduckgo"],
vec!["duckduckgo".to_string()],
) )
}) })
.map(|search_result| (search_result.url.clone(), search_result)) .map(|search_result| (search_result.url.clone(), search_result))

View File

@ -2,7 +2,7 @@
//! the upstream search engines with the search query provided by the user. //! the upstream search engines with the search query provided by the user.
use crate::results::aggregation_models::SearchResult; use crate::results::aggregation_models::SearchResult;
use error_stack::{IntoReport, Result, ResultExt}; use error_stack::{Result, ResultExt};
use std::{collections::HashMap, fmt, time::Duration}; use std::{collections::HashMap, fmt, time::Duration};
/// A custom error type used for handle engine associated errors. /// A custom error type used for handle engine associated errors.
@ -48,7 +48,7 @@ impl error_stack::Context for EngineError {}
pub trait SearchEngine: Sync + Send { pub trait SearchEngine: Sync + Send {
async fn fetch_html_from_upstream( async fn fetch_html_from_upstream(
&self, &self,
url: String, url: &str,
header_map: reqwest::header::HeaderMap, header_map: reqwest::header::HeaderMap,
request_timeout: u8, request_timeout: u8,
) -> Result<String, EngineError> { ) -> Result<String, EngineError> {
@ -59,19 +59,17 @@ pub trait SearchEngine: Sync + Send {
.headers(header_map) // add spoofed headers to emulate human behavior .headers(header_map) // add spoofed headers to emulate human behavior
.send() .send()
.await .await
.into_report()
.change_context(EngineError::RequestError)? .change_context(EngineError::RequestError)?
.text() .text()
.await .await
.into_report()
.change_context(EngineError::RequestError)?) .change_context(EngineError::RequestError)?)
} }
async fn results( async fn results(
&self, &self,
query: String, query: &str,
page: u32, page: u32,
user_agent: String, user_agent: &str,
request_timeout: u8, request_timeout: u8,
) -> Result<HashMap<String, SearchResult>, EngineError>; ) -> Result<HashMap<String, SearchResult>, EngineError>;
} }

View File

@ -2,14 +2,14 @@
//! by querying the upstream searx search engine instance with user provided query and with a page //! by querying the upstream searx search engine instance with user provided query and with a page
//! number if provided. //! number if provided.
use reqwest::header::{HeaderMap, CONTENT_TYPE, COOKIE, REFERER, USER_AGENT}; use reqwest::header::HeaderMap;
use scraper::{Html, Selector}; use scraper::{Html, Selector};
use std::collections::HashMap; use std::collections::HashMap;
use crate::results::aggregation_models::SearchResult; use crate::results::aggregation_models::SearchResult;
use super::engine_models::{EngineError, SearchEngine}; use super::engine_models::{EngineError, SearchEngine};
use error_stack::{IntoReport, Report, Result, ResultExt}; use error_stack::{Report, Result, ResultExt};
/// A new Searx engine type defined in-order to implement the `SearchEngine` trait which allows to /// A new Searx engine type defined in-order to implement the `SearchEngine` trait which allows to
/// reduce code duplication as well as allows to create vector of different search engines easily. /// reduce code duplication as well as allows to create vector of different search engines easily.
@ -38,9 +38,9 @@ impl SearchEngine for Searx {
async fn results( async fn results(
&self, &self,
query: String, query: &str,
page: u32, page: u32,
user_agent: String, user_agent: &str,
request_timeout: u8, request_timeout: u8,
) -> Result<HashMap<String, SearchResult>, EngineError> { ) -> Result<HashMap<String, SearchResult>, EngineError> {
// Page number can be missing or empty string and so appropriate handling is required // Page number can be missing or empty string and so appropriate handling is required
@ -51,32 +51,16 @@ impl SearchEngine for Searx {
}; };
// initializing headers and adding appropriate headers. // initializing headers and adding appropriate headers.
let mut header_map = HeaderMap::new(); let header_map = HeaderMap::try_from(&HashMap::from([
header_map.insert( ("USER_AGENT".to_string(), user_agent.to_string()),
USER_AGENT, ("REFERER".to_string(), "https://google.com/".to_string()),
user_agent ("CONTENT_TYPE".to_string(), "application/x-www-form-urlencoded".to_string()),
.parse() ("COOKIE".to_string(), "categories=general; language=auto; locale=en; autocomplete=duckduckgo; image_proxy=1; method=POST; safesearch=2; theme=simple; results_on_new_tab=1; doi_resolver=oadoi.org; simple_style=auto; center_alignment=1; query_in_title=1; infinite_scroll=0; disabled_engines=; enabled_engines=\"archive is__general\\054yep__general\\054curlie__general\\054currency__general\\054ddg definitions__general\\054wikidata__general\\054duckduckgo__general\\054tineye__general\\054lingva__general\\054startpage__general\\054yahoo__general\\054wiby__general\\054marginalia__general\\054alexandria__general\\054wikibooks__general\\054wikiquote__general\\054wikisource__general\\054wikiversity__general\\054wikivoyage__general\\054dictzone__general\\054seznam__general\\054mojeek__general\\054naver__general\\054wikimini__general\\054brave__general\\054petalsearch__general\\054goo__general\"; disabled_plugins=; enabled_plugins=\"searx.plugins.hostname_replace\\054searx.plugins.oa_doi_rewrite\\054searx.plugins.vim_hotkeys\"; tokens=; maintab=on; enginetab=on".to_string())
.into_report() ]))
.change_context(EngineError::UnexpectedError)?, .change_context(EngineError::UnexpectedError)?;
);
header_map.insert(
REFERER,
"https://google.com/"
.parse()
.into_report()
.change_context(EngineError::UnexpectedError)?,
);
header_map.insert(
CONTENT_TYPE,
"application/x-www-form-urlencoded"
.parse()
.into_report()
.change_context(EngineError::UnexpectedError)?,
);
header_map.insert(COOKIE, "categories=general; language=auto; locale=en; autocomplete=duckduckgo; image_proxy=1; method=POST; safesearch=2; theme=simple; results_on_new_tab=1; doi_resolver=oadoi.org; simple_style=auto; center_alignment=1; query_in_title=1; infinite_scroll=0; disabled_engines=; enabled_engines=\"archive is__general\\054yep__general\\054curlie__general\\054currency__general\\054ddg definitions__general\\054wikidata__general\\054duckduckgo__general\\054tineye__general\\054lingva__general\\054startpage__general\\054yahoo__general\\054wiby__general\\054marginalia__general\\054alexandria__general\\054wikibooks__general\\054wikiquote__general\\054wikisource__general\\054wikiversity__general\\054wikivoyage__general\\054dictzone__general\\054seznam__general\\054mojeek__general\\054naver__general\\054wikimini__general\\054brave__general\\054petalsearch__general\\054goo__general\"; disabled_plugins=; enabled_plugins=\"searx.plugins.hostname_replace\\054searx.plugins.oa_doi_rewrite\\054searx.plugins.vim_hotkeys\"; tokens=; maintab=on; enginetab=on".parse().into_report().change_context(EngineError::UnexpectedError)?);
let document: Html = Html::parse_document( let document: Html = Html::parse_document(
&Searx::fetch_html_from_upstream(self, url, header_map, request_timeout).await?, &Searx::fetch_html_from_upstream(self, &url, header_map, request_timeout).await?,
); );
let no_result: Selector = Selector::parse("#urls>.dialog-error>p") let no_result: Selector = Selector::parse("#urls>.dialog-error>p")
@ -117,24 +101,21 @@ impl SearchEngine for Searx {
.next() .next()
.unwrap() .unwrap()
.inner_html() .inner_html()
.trim() .trim(),
.to_string(),
result result
.select(&result_url) .select(&result_url)
.next() .next()
.unwrap() .unwrap()
.value() .value()
.attr("href") .attr("href")
.unwrap() .unwrap(),
.to_string(),
result result
.select(&result_desc) .select(&result_desc)
.next() .next()
.unwrap() .unwrap()
.inner_html() .inner_html()
.trim() .trim(),
.to_string(), &["searx"],
vec!["searx".to_string()],
) )
}) })
.map(|search_result| (search_result.url.clone(), search_result)) .map(|search_result| (search_result.url.clone(), search_result))

View File

@ -4,6 +4,7 @@
use std::collections::HashMap; use std::collections::HashMap;
use std::io::Error; use std::io::Error;
use std::path::Path; use std::path::Path;
use std::sync::OnceLock;
// ------- Constants -------- // ------- Constants --------
static PUBLIC_DIRECTORY_NAME: &str = "public"; static PUBLIC_DIRECTORY_NAME: &str = "public";
@ -20,8 +21,34 @@ pub enum FileType {
Theme, Theme,
} }
static FILE_PATHS_FOR_DIFF_FILE_TYPES: once_cell::sync::Lazy<HashMap<FileType, Vec<String>>> = static FILE_PATHS_FOR_DIFF_FILE_TYPES: OnceLock<HashMap<FileType, Vec<String>>> = OnceLock::new();
once_cell::sync::Lazy::new(|| {
/// A helper function which returns an appropriate config file path checking if the config
/// file exists on that path.
///
/// # Error
///
/// Returns a `config file not found!!` error if the config file is not present under following
/// paths which are:
/// 1. `~/.config/websurfx/` if it not present here then it fallbacks to the next one (2)
/// 2. `/etc/xdg/websurfx/config.lua` if it is not present here then it fallbacks to the next
/// one (3).
/// 3. `websurfx/` (under project folder ( or codebase in other words)) if it is not present
/// here then it returns an error as mentioned above.
/// A function which returns an appropriate theme directory path checking if the theme
/// directory exists on that path.
///
/// # Error
///
/// Returns a `Theme (public) folder not found!!` error if the theme folder is not present under following
/// paths which are:
/// 1. `/opt/websurfx` if it not present here then it fallbacks to the next one (2)
/// 2. Under project folder ( or codebase in other words) if it is not present
/// here then it returns an error as mentioned above.
pub fn file_path(file_type: FileType) -> Result<&'static str, Error> {
let file_path: &Vec<String> = FILE_PATHS_FOR_DIFF_FILE_TYPES
.get_or_init(|| {
HashMap::from([ HashMap::from([
( (
FileType::Config, FileType::Config,
@ -70,36 +97,13 @@ static FILE_PATHS_FOR_DIFF_FILE_TYPES: once_cell::sync::Lazy<HashMap<FileType, V
], ],
), ),
]) ])
}); })
.get(&file_type)
.unwrap();
/// A helper function which returns an appropriate config file path checking if the config
/// file exists on that path.
///
/// # Error
///
/// Returns a `config file not found!!` error if the config file is not present under following
/// paths which are:
/// 1. `~/.config/websurfx/` if it not present here then it fallbacks to the next one (2)
/// 2. `/etc/xdg/websurfx/config.lua` if it is not present here then it fallbacks to the next
/// one (3).
/// 3. `websurfx/` (under project folder ( or codebase in other words)) if it is not present
/// here then it returns an error as mentioned above.
/// A function which returns an appropriate theme directory path checking if the theme
/// directory exists on that path.
///
/// # Error
///
/// Returns a `Theme (public) folder not found!!` error if the theme folder is not present under following
/// paths which are:
/// 1. `/opt/websurfx` if it not present here then it fallbacks to the next one (2)
/// 2. Under project folder ( or codebase in other words) if it is not present
/// here then it returns an error as mentioned above.
pub fn file_path(file_type: FileType) -> Result<String, Error> {
let file_path = FILE_PATHS_FOR_DIFF_FILE_TYPES.get(&file_type).unwrap();
for (idx, _) in file_path.iter().enumerate() { for (idx, _) in file_path.iter().enumerate() {
if Path::new(file_path[idx].as_str()).exists() { if Path::new(file_path[idx].as_str()).exists() {
return Ok(file_path[idx].clone()); return Ok(std::mem::take(&mut &*file_path[idx]));
} }
} }

View File

@ -43,7 +43,7 @@ use handler::paths::{file_path, FileType};
pub fn run(listener: TcpListener, config: Config) -> std::io::Result<Server> { pub fn run(listener: TcpListener, config: Config) -> std::io::Result<Server> {
let mut handlebars: Handlebars = Handlebars::new(); let mut handlebars: Handlebars = Handlebars::new();
let public_folder_path: String = file_path(FileType::Theme)?; let public_folder_path: &str = file_path(FileType::Theme)?;
handlebars handlebars
.register_templates_directory(".html", format!("{}/templates", public_folder_path)) .register_templates_directory(".html", format!("{}/templates", public_folder_path))

View File

@ -2,6 +2,7 @@
//! data scraped from the upstream search engines. //! data scraped from the upstream search engines.
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use smallvec::SmallVec;
use crate::{config::parser_models::Style, engines::engine_models::EngineError}; use crate::{config::parser_models::Style, engines::engine_models::EngineError};
@ -16,13 +17,13 @@ use crate::{config::parser_models::Style, engines::engine_models::EngineError};
/// (href url in html in simple words). /// (href url in html in simple words).
/// * `description` - The description of the search result. /// * `description` - The description of the search result.
/// * `engine` - The names of the upstream engines from which this results were provided. /// * `engine` - The names of the upstream engines from which this results were provided.
#[derive(Clone, Serialize, Deserialize)] #[derive(Clone, Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct SearchResult { pub struct SearchResult {
pub title: String, pub title: String,
pub url: String, pub url: String,
pub description: String, pub description: String,
pub engine: Vec<String>, pub engine: SmallVec<[String; 0]>,
} }
impl SearchResult { impl SearchResult {
@ -35,12 +36,12 @@ impl SearchResult {
/// (href url in html in simple words). /// (href url in html in simple words).
/// * `description` - The description of the search result. /// * `description` - The description of the search result.
/// * `engine` - The names of the upstream engines from which this results were provided. /// * `engine` - The names of the upstream engines from which this results were provided.
pub fn new(title: String, url: String, description: String, engine: Vec<String>) -> Self { pub fn new(title: &str, url: &str, description: &str, engine: &[&str]) -> Self {
SearchResult { SearchResult {
title, title: title.to_owned(),
url, url: url.to_owned(),
description, description: description.to_owned(),
engine, engine: engine.iter().map(|name| name.to_string()).collect(),
} }
} }
@ -49,8 +50,8 @@ impl SearchResult {
/// # Arguments /// # Arguments
/// ///
/// * `engine` - Takes an engine name provided as a String. /// * `engine` - Takes an engine name provided as a String.
pub fn add_engines(&mut self, engine: String) { pub fn add_engines(&mut self, engine: &str) {
self.engine.push(engine) self.engine.push(engine.to_owned())
} }
/// A function which returns the engine name stored from the struct as a string. /// A function which returns the engine name stored from the struct as a string.
@ -58,13 +59,12 @@ impl SearchResult {
/// # Returns /// # Returns
/// ///
/// An engine name stored as a string from the struct. /// An engine name stored as a string from the struct.
pub fn engine(self) -> String { pub fn engine(&mut self) -> String {
self.engine.get(0).unwrap().to_string() std::mem::take(&mut self.engine[0])
} }
} }
/// #[derive(Serialize, Deserialize, Clone)]
#[derive(Serialize, Deserialize)]
pub struct EngineErrorInfo { pub struct EngineErrorInfo {
pub error: String, pub error: String,
pub engine: String, pub engine: String,
@ -72,18 +72,18 @@ pub struct EngineErrorInfo {
} }
impl EngineErrorInfo { impl EngineErrorInfo {
pub fn new(error: &EngineError, engine: String) -> Self { pub fn new(error: &EngineError, engine: &str) -> Self {
Self { Self {
error: match error { error: match error {
EngineError::RequestError => String::from("RequestError"), EngineError::RequestError => "RequestError".to_owned(),
EngineError::EmptyResultSet => String::from("EmptyResultSet"), EngineError::EmptyResultSet => "EmptyResultSet".to_owned(),
EngineError::UnexpectedError => String::from("UnexpectedError"), EngineError::UnexpectedError => "UnexpectedError".to_owned(),
}, },
engine, engine: engine.to_owned(),
severity_color: match error { severity_color: match error {
EngineError::RequestError => String::from("green"), EngineError::RequestError => "green".to_owned(),
EngineError::EmptyResultSet => String::from("blue"), EngineError::EmptyResultSet => "blue".to_owned(),
EngineError::UnexpectedError => String::from("red"), EngineError::UnexpectedError => "red".to_owned(),
}, },
} }
} }
@ -108,7 +108,7 @@ pub struct SearchResults {
pub results: Vec<SearchResult>, pub results: Vec<SearchResult>,
pub page_query: String, pub page_query: String,
pub style: Style, pub style: Style,
pub engine_errors_info: Vec<EngineErrorInfo>, pub engine_errors_info: SmallVec<[EngineErrorInfo; 0]>,
} }
impl SearchResults { impl SearchResults {
@ -124,19 +124,19 @@ impl SearchResults {
/// given search query. /// given search query.
pub fn new( pub fn new(
results: Vec<SearchResult>, results: Vec<SearchResult>,
page_query: String, page_query: &str,
engine_errors_info: Vec<EngineErrorInfo>, engine_errors_info: &[EngineErrorInfo],
) -> Self { ) -> Self {
SearchResults { Self {
results, results,
page_query, page_query: page_query.to_owned(),
style: Style::new("".to_string(), "".to_string()), style: Style::default(),
engine_errors_info, engine_errors_info: SmallVec::from(engine_errors_info),
} }
} }
/// A setter function to add website style to the return search results. /// A setter function to add website style to the return search results.
pub fn add_style(&mut self, style: Style) { pub fn add_style(&mut self, style: &Style) {
self.style = style; self.style = style.to_owned();
} }
} }

View File

@ -64,14 +64,14 @@ type FutureVec = Vec<JoinHandle<Result<HashMap<String, SearchResult>, Report<Eng
/// function in either `searx` or `duckduckgo` or both otherwise returns a `SearchResults struct` /// function in either `searx` or `duckduckgo` or both otherwise returns a `SearchResults struct`
/// containing appropriate values. /// containing appropriate values.
pub async fn aggregate( pub async fn aggregate(
query: String, query: &str,
page: u32, page: u32,
random_delay: bool, random_delay: bool,
debug: bool, debug: bool,
upstream_search_engines: Vec<EngineHandler>, upstream_search_engines: &[EngineHandler],
request_timeout: u8, request_timeout: u8,
) -> Result<SearchResults, Box<dyn std::error::Error>> { ) -> Result<SearchResults, Box<dyn std::error::Error>> {
let user_agent: String = random_user_agent(); let user_agent: &str = random_user_agent();
// Add a random delay before making the request. // Add a random delay before making the request.
if random_delay || !debug { if random_delay || !debug {
@ -80,19 +80,18 @@ pub async fn aggregate(
tokio::time::sleep(Duration::from_secs(delay_secs)).await; tokio::time::sleep(Duration::from_secs(delay_secs)).await;
} }
let mut names: Vec<&str> = vec![]; let mut names: Vec<&str> = Vec::with_capacity(0);
// create tasks for upstream result fetching // create tasks for upstream result fetching
let mut tasks: FutureVec = FutureVec::new(); let mut tasks: FutureVec = FutureVec::new();
for engine_handler in upstream_search_engines { for engine_handler in upstream_search_engines {
let (name, search_engine) = engine_handler.into_name_engine(); let (name, search_engine) = engine_handler.to_owned().into_name_engine();
names.push(name); names.push(name);
let query: String = query.clone(); let query: String = query.to_owned();
let user_agent: String = user_agent.clone();
tasks.push(tokio::spawn(async move { tasks.push(tokio::spawn(async move {
search_engine search_engine
.results(query, page, user_agent.clone(), request_timeout) .results(&query, page, user_agent, request_timeout)
.await .await
})); }));
} }
@ -110,7 +109,7 @@ pub async fn aggregate(
let mut result_map: HashMap<String, SearchResult> = HashMap::new(); let mut result_map: HashMap<String, SearchResult> = HashMap::new();
let mut engine_errors_info: Vec<EngineErrorInfo> = Vec::new(); let mut engine_errors_info: Vec<EngineErrorInfo> = Vec::new();
let mut handle_error = |error: Report<EngineError>, engine_name: String| { let mut handle_error = |error: &Report<EngineError>, engine_name: &'static str| {
log::error!("Engine Error: {:?}", error); log::error!("Engine Error: {:?}", error);
engine_errors_info.push(EngineErrorInfo::new( engine_errors_info.push(EngineErrorInfo::new(
error.downcast_ref::<EngineError>().unwrap(), error.downcast_ref::<EngineError>().unwrap(),
@ -120,7 +119,7 @@ pub async fn aggregate(
for _ in 0..responses.len() { for _ in 0..responses.len() {
let response = responses.pop().unwrap(); let response = responses.pop().unwrap();
let engine = names.pop().unwrap().to_string(); let engine = names.pop().unwrap();
if result_map.is_empty() { if result_map.is_empty() {
match response { match response {
@ -128,7 +127,7 @@ pub async fn aggregate(
result_map = results.clone(); result_map = results.clone();
} }
Err(error) => { Err(error) => {
handle_error(error, engine); handle_error(&error, engine);
} }
} }
continue; continue;
@ -140,13 +139,13 @@ pub async fn aggregate(
result_map result_map
.entry(key) .entry(key)
.and_modify(|result| { .and_modify(|result| {
result.add_engines(engine.clone()); result.add_engines(engine);
}) })
.or_insert_with(|| -> SearchResult { value }); .or_insert_with(|| -> SearchResult { value });
}); });
} }
Err(error) => { Err(error) => {
handle_error(error, engine); handle_error(&error, engine);
} }
} }
} }
@ -155,24 +154,20 @@ pub async fn aggregate(
filter_with_lists( filter_with_lists(
&mut result_map, &mut result_map,
&mut blacklist_map, &mut blacklist_map,
&file_path(FileType::BlockList)?, file_path(FileType::BlockList)?,
)?; )?;
filter_with_lists( filter_with_lists(
&mut blacklist_map, &mut blacklist_map,
&mut result_map, &mut result_map,
&file_path(FileType::AllowList)?, file_path(FileType::AllowList)?,
)?; )?;
drop(blacklist_map); drop(blacklist_map);
let results: Vec<SearchResult> = result_map.into_values().collect(); let results: Vec<SearchResult> = result_map.into_values().collect();
Ok(SearchResults::new( Ok(SearchResults::new(results, query, &engine_errors_info))
results,
query.to_string(),
engine_errors_info,
))
} }
/// Filters a map of search results using a list of regex patterns. /// Filters a map of search results using a list of regex patterns.
@ -203,7 +198,10 @@ pub fn filter_with_lists(
|| re.is_match(&search_result.description.to_lowercase()) || re.is_match(&search_result.description.to_lowercase())
{ {
// If the search result matches the regex pattern, move it from the original map to the resultant map // If the search result matches the regex pattern, move it from the original map to the resultant map
resultant_map.insert(url.clone(), map_to_be_filtered.remove(&url).unwrap()); resultant_map.insert(
url.to_owned(),
map_to_be_filtered.remove(&url.to_owned()).unwrap(),
);
} }
} }
} }
@ -214,6 +212,7 @@ pub fn filter_with_lists(
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use smallvec::smallvec;
use std::collections::HashMap; use std::collections::HashMap;
use std::io::Write; use std::io::Write;
use tempfile::NamedTempFile; use tempfile::NamedTempFile;
@ -223,22 +222,22 @@ mod tests {
// Create a map of search results to filter // Create a map of search results to filter
let mut map_to_be_filtered = HashMap::new(); let mut map_to_be_filtered = HashMap::new();
map_to_be_filtered.insert( map_to_be_filtered.insert(
"https://www.example.com".to_string(), "https://www.example.com".to_owned(),
SearchResult { SearchResult {
title: "Example Domain".to_string(), title: "Example Domain".to_owned(),
url: "https://www.example.com".to_string(), url: "https://www.example.com".to_owned(),
description: "This domain is for use in illustrative examples in documents." description: "This domain is for use in illustrative examples in documents."
.to_string(), .to_owned(),
engine: vec!["Google".to_string(), "Bing".to_string()], engine: smallvec!["Google".to_owned(), "Bing".to_owned()],
}, },
); );
map_to_be_filtered.insert( map_to_be_filtered.insert(
"https://www.rust-lang.org/".to_string(), "https://www.rust-lang.org/".to_owned(),
SearchResult { SearchResult {
title: "Rust Programming Language".to_string(), title: "Rust Programming Language".to_owned(),
url: "https://www.rust-lang.org/".to_string(), url: "https://www.rust-lang.org/".to_owned(),
description: "A systems programming language that runs blazingly fast, prevents segfaults, and guarantees thread safety.".to_string(), description: "A systems programming language that runs blazingly fast, prevents segfaults, and guarantees thread safety.".to_owned(),
engine: vec!["Google".to_string(), "DuckDuckGo".to_string()], engine: smallvec!["Google".to_owned(), "DuckDuckGo".to_owned()],
}, },
); );
@ -267,22 +266,22 @@ mod tests {
fn test_filter_with_lists_wildcard() -> Result<(), Box<dyn std::error::Error>> { fn test_filter_with_lists_wildcard() -> Result<(), Box<dyn std::error::Error>> {
let mut map_to_be_filtered = HashMap::new(); let mut map_to_be_filtered = HashMap::new();
map_to_be_filtered.insert( map_to_be_filtered.insert(
"https://www.example.com".to_string(), "https://www.example.com".to_owned(),
SearchResult { SearchResult {
title: "Example Domain".to_string(), title: "Example Domain".to_owned(),
url: "https://www.example.com".to_string(), url: "https://www.example.com".to_owned(),
description: "This domain is for use in illustrative examples in documents." description: "This domain is for use in illustrative examples in documents."
.to_string(), .to_owned(),
engine: vec!["Google".to_string(), "Bing".to_string()], engine: smallvec!["Google".to_owned(), "Bing".to_owned()],
}, },
); );
map_to_be_filtered.insert( map_to_be_filtered.insert(
"https://www.rust-lang.org/".to_string(), "https://www.rust-lang.org/".to_owned(),
SearchResult { SearchResult {
title: "Rust Programming Language".to_string(), title: "Rust Programming Language".to_owned(),
url: "https://www.rust-lang.org/".to_string(), url: "https://www.rust-lang.org/".to_owned(),
description: "A systems programming language that runs blazingly fast, prevents segfaults, and guarantees thread safety.".to_string(), description: "A systems programming language that runs blazingly fast, prevents segfaults, and guarantees thread safety.".to_owned(),
engine: vec!["Google".to_string(), "DuckDuckGo".to_string()], engine: smallvec!["Google".to_owned(), "DuckDuckGo".to_owned()],
}, },
); );
@ -327,13 +326,13 @@ mod tests {
fn test_filter_with_lists_invalid_regex() { fn test_filter_with_lists_invalid_regex() {
let mut map_to_be_filtered = HashMap::new(); let mut map_to_be_filtered = HashMap::new();
map_to_be_filtered.insert( map_to_be_filtered.insert(
"https://www.example.com".to_string(), "https://www.example.com".to_owned(),
SearchResult { SearchResult {
title: "Example Domain".to_string(), title: "Example Domain".to_owned(),
url: "https://www.example.com".to_string(), url: "https://www.example.com".to_owned(),
description: "This domain is for use in illustrative examples in documents." description: "This domain is for use in illustrative examples in documents."
.to_string(), .to_owned(),
engine: vec!["Google".to_string(), "Bing".to_string()], engine: smallvec!["Google".to_owned(), "Bing".to_owned()],
}, },
); );

View File

@ -1,8 +1,19 @@
//! This module provides the functionality to generate random user agent string. //! This module provides the functionality to generate random user agent string.
use std::sync::OnceLock;
use fake_useragent::{Browsers, UserAgents, UserAgentsBuilder}; use fake_useragent::{Browsers, UserAgents, UserAgentsBuilder};
static USER_AGENTS: once_cell::sync::Lazy<UserAgents> = once_cell::sync::Lazy::new(|| { static USER_AGENTS: OnceLock<UserAgents> = OnceLock::new();
/// A function to generate random user agent to improve privacy of the user.
///
/// # Returns
///
/// A randomly generated user agent string.
pub fn random_user_agent() -> &'static str {
USER_AGENTS
.get_or_init(|| {
UserAgentsBuilder::new() UserAgentsBuilder::new()
.cache(false) .cache(false)
.dir("/tmp") .dir("/tmp")
@ -16,13 +27,6 @@ static USER_AGENTS: once_cell::sync::Lazy<UserAgents> = once_cell::sync::Lazy::n
.set_mozilla(), .set_mozilla(),
) )
.build() .build()
}); })
.random()
/// A function to generate random user agent to improve privacy of the user.
///
/// # Returns
///
/// A randomly generated user agent string.
pub fn random_user_agent() -> String {
USER_AGENTS.random().to_string()
} }

View File

@ -16,6 +16,10 @@ use handlebars::Handlebars;
use serde::Deserialize; use serde::Deserialize;
use tokio::join; use tokio::join;
// ---- Constants ----
/// Initialize redis cache connection once and store it on the heap.
const REDIS_CACHE: async_once_cell::OnceCell<RedisCache> = async_once_cell::OnceCell::new();
/// A named struct which deserializes all the user provided search parameters and stores them. /// A named struct which deserializes all the user provided search parameters and stores them.
/// ///
/// # Fields /// # Fields
@ -62,10 +66,10 @@ pub async fn not_found(
/// * `engines` - It stores the user selected upstream search engines selected from the UI. /// * `engines` - It stores the user selected upstream search engines selected from the UI.
#[allow(dead_code)] #[allow(dead_code)]
#[derive(Deserialize)] #[derive(Deserialize)]
struct Cookie { struct Cookie<'a> {
theme: String, theme: &'a str,
colorscheme: String, colorscheme: &'a str,
engines: Vec<String>, engines: Vec<&'a str>,
} }
/// Handles the route of search page of the `websurfx` meta search engine website and it takes /// Handles the route of search page of the `websurfx` meta search engine website and it takes
@ -111,9 +115,9 @@ pub async fn search(
page - 1 page - 1
), ),
&config, &config,
query.to_string(), query,
page - 1, page - 1,
req.clone(), &req,
), ),
results( results(
format!( format!(
@ -121,9 +125,9 @@ pub async fn search(
config.binding_ip, config.port, query, page config.binding_ip, config.port, query, page
), ),
&config, &config,
query.to_string(), query,
page, page,
req.clone(), &req,
), ),
results( results(
format!( format!(
@ -134,9 +138,9 @@ pub async fn search(
page + 1 page + 1
), ),
&config, &config,
query.to_string(), query,
page + 1, page + 1,
req.clone(), &req,
) )
); );
@ -154,30 +158,35 @@ pub async fn search(
async fn results( async fn results(
url: String, url: String,
config: &Config, config: &Config,
query: String, query: &str,
page: u32, page: u32,
req: HttpRequest, req: &HttpRequest,
) -> Result<SearchResults, Box<dyn std::error::Error>> { ) -> Result<SearchResults, Box<dyn std::error::Error>> {
//Initialize redis cache connection struct let redis_cache: RedisCache = REDIS_CACHE
let mut redis_cache = RedisCache::new(config.redis_url.clone())?; .get_or_init(async {
// Initialize redis cache connection pool only one and store it in the heap.
RedisCache::new(&config.redis_url, 5).await.unwrap()
})
.await
.clone();
// fetch the cached results json. // fetch the cached results json.
let cached_results_json = redis_cache.cached_json(&url); let cached_results_json: Result<String, error_stack::Report<crate::cache::error::PoolError>> =
redis_cache.clone().cached_json(&url).await;
// check if fetched cache results was indeed fetched or it was an error and if so // check if fetched cache results was indeed fetched or it was an error and if so
// handle the data accordingly. // handle the data accordingly.
match cached_results_json { match cached_results_json {
Ok(results) => Ok(serde_json::from_str::<SearchResults>(&results).unwrap()), Ok(results) => Ok(serde_json::from_str::<SearchResults>(&results)?),
Err(_) => { Err(_) => {
// check if the cookie value is empty or not if it is empty then use the // check if the cookie value is empty or not if it is empty then use the
// default selected upstream search engines from the config file otherwise // default selected upstream search engines from the config file otherwise
// parse the non-empty cookie and grab the user selected engines from the // parse the non-empty cookie and grab the user selected engines from the
// UI and use that. // UI and use that.
let mut results: crate::results::aggregation_models::SearchResults = match req let mut results: SearchResults = match req.cookie("appCookie") {
.cookie("appCookie")
{
Some(cookie_value) => { Some(cookie_value) => {
let cookie_value: Cookie = serde_json::from_str(cookie_value.name_value().1)?; let cookie_value: Cookie = serde_json::from_str(cookie_value.name_value().1)?;
let engines = cookie_value let engines: Vec<EngineHandler> = cookie_value
.engines .engines
.iter() .iter()
.filter_map(|name| EngineHandler::new(name)) .filter_map(|name| EngineHandler::new(name))
@ -188,7 +197,7 @@ async fn results(
page, page,
config.aggregator.random_delay, config.aggregator.random_delay,
config.debug, config.debug,
engines, &engines,
config.request_timeout, config.request_timeout,
) )
.await? .await?
@ -199,14 +208,18 @@ async fn results(
page, page,
config.aggregator.random_delay, config.aggregator.random_delay,
config.debug, config.debug,
config.upstream_search_engines.clone(), &config.upstream_search_engines,
config.request_timeout, config.request_timeout,
) )
.await? .await?
} }
}; };
results.add_style(config.style.clone());
redis_cache.cache_results(serde_json::to_string(&results)?, &url)?; results.add_style(&config.style);
redis_cache
.clone()
.cache_results(&serde_json::to_string(&results)?, &url)
.await?;
Ok(results) Ok(results)
} }
} }