0
0
mirror of https://github.com/neon-mmd/websurfx.git synced 2024-11-22 05:58:21 -05:00
This commit is contained in:
Ilan Joselevich 2024-02-07 00:26:35 +00:00 committed by GitHub
commit 15aaa1b63f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
21 changed files with 430 additions and 331 deletions

43
Cargo.lock generated
View File

@ -1129,6 +1129,15 @@ version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5"
[[package]]
name = "erased-serde"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "55d05712b2d8d88102bc9868020c9e5c7a1f5527c452b9b97450a1d006140ba7"
dependencies = [
"serde",
]
[[package]]
name = "errno"
version = "0.3.8"
@ -1941,9 +1950,9 @@ dependencies = [
[[package]]
name = "luajit-src"
version = "210.5.3+29b0b28"
version = "210.5.5+f2336c4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0c2bb89013916ce5c949f01a1fbd6d435a58e1d980767a791d755911211d792d"
checksum = "d8bcba9790f4e3b1c1467d75cdd011a63bbe6bc75da95af5d2cb4e3631f939c4"
dependencies = [
"cc",
"which",
@ -2161,22 +2170,25 @@ dependencies = [
[[package]]
name = "mlua"
version = "0.9.2"
version = "0.9.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7c81f8ac20188feb5461a73eabb22a34dd09d6d58513535eb587e46bff6ba250"
checksum = "1d3561f79659ff3afad7b25e2bf2ec21507fe601ebecb7f81088669ec4bfd51e"
dependencies = [
"bstr",
"erased-serde",
"mlua-sys",
"num-traits",
"once_cell",
"rustc-hash",
"serde",
"serde-value",
]
[[package]]
name = "mlua-sys"
version = "0.4.0"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc29228347d6bdc9e613dc95c69df2817f755434ee0f7f3b27b57755fe238b7f"
checksum = "2847b42764435201d8cbee1f517edb79c4cca4181877b90047587c89e1b7bce4"
dependencies = [
"cc",
"cfg-if 1.0.0",
@ -2328,6 +2340,15 @@ dependencies = [
"vcpkg",
]
[[package]]
name = "ordered-float"
version = "2.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "68f19d67e5a2795c94e73e0bb1cc1a7edeb2e28efd39e2e1c9b7a40c1108b11c"
dependencies = [
"num-traits",
]
[[package]]
name = "parcel_selectors"
version = "0.26.4"
@ -3248,6 +3269,16 @@ dependencies = [
"serde_derive",
]
[[package]]
name = "serde-value"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f3a1a3341211875ef120e117ea7fd5228530ae7e7036a779fdc9117be6b3282c"
dependencies = [
"ordered-float",
"serde",
]
[[package]]
name = "serde_derive"
version = "1.0.196"

View File

@ -15,7 +15,7 @@ path = "src/bin/websurfx.rs"
[dependencies]
reqwest = {version="0.11.22", default-features=false, features=["rustls-tls","brotli", "gzip"]}
tokio = {version="1.32.0",features=["rt-multi-thread","macros"], default-features = false}
serde = {version="1.0.196", default-features=false, features=["derive"]}
serde = { version = "1.0.196", default-features = false, features = ["derive"] }
serde_json = {version="1.0.109", default-features=false}
maud = {version="0.25.0", default-features=false, features=["actix-web"]}
scraper = {version="0.18.1", default-features = false}
@ -25,7 +25,7 @@ actix-cors = {version="0.6.4", default-features=false}
fake-useragent = {version="0.1.3", default-features=false}
env_logger = {version="0.11.1", default-features=false}
log = {version="0.4.20", default-features=false}
mlua = {version="0.9.1", features=["luajit", "vendored"], default-features=false}
mlua = {version="0.9.1", features=["luajit", "vendored", "serialize"], default-features=false, optional = true}
redis = {version="0.24.0", features=["tokio-comp","connection-manager"], default-features = false, optional = true}
blake3 = {version="1.5.0", default-features=false}
error-stack = {version="0.4.0", default-features=false, features=["std"]}
@ -80,7 +80,9 @@ rpath = false
strip = "debuginfo"
[features]
default = ["memory-cache"]
default = ["memory-cache", "lua-config" ]
lua-config = ["dep:mlua"]
json-config = []
dhat-heap = ["dep:dhat"]
memory-cache = ["dep:mini-moka"]
redis-cache = ["dep:redis","dep:base64"]

View File

@ -6,7 +6,7 @@
use mimalloc::MiMalloc;
use std::net::TcpListener;
use websurfx::{cache::cacher::create_cache, config::parser::Config, run};
use websurfx::{cache::cacher::create_cache, config::Config, run};
/// A dhat heap memory profiler
#[cfg(feature = "dhat-heap")]
@ -36,16 +36,16 @@ async fn main() -> std::io::Result<()> {
log::info!(
"started server on port {} and IP {}",
config.port,
config.binding_ip
config.server.port,
config.server.binding_ip
);
log::info!(
"Open http://{}:{}/ in your browser",
config.binding_ip,
config.port,
config.server.binding_ip,
config.server.port,
);
let listener = TcpListener::bind((config.binding_ip.clone(), config.port))?;
let listener = TcpListener::bind((config.server.binding_ip.clone(), config.server.port.get()))?;
run(listener, config, cache)?.await
}

4
src/cache/cacher.rs vendored
View File

@ -11,7 +11,7 @@ use mini_moka::sync::ConcurrentCacheExt;
use std::time::Duration;
use tokio::sync::Mutex;
use crate::{config::parser::Config, models::aggregation_models::SearchResults};
use crate::{config::Config, models::aggregation_models::SearchResults};
use super::error::CacheError;
#[cfg(feature = "redis-cache")]
@ -397,7 +397,7 @@ impl Cacher for InMemoryCache {
InMemoryCache {
cache: MokaCache::builder()
.time_to_live(Duration::from_secs(config.cache_expiry_time.into()))
.time_to_live(Duration::from_secs(config.caching.cache_expiry_time.into()))
.build(),
}
}

26
src/config/caching.rs Normal file
View File

@ -0,0 +1,26 @@
#![allow(missing_docs)]
use serde::Deserialize;
/// Stores configurations related to caching.
#[derive(Clone, Deserialize, Debug)]
#[serde(default, deny_unknown_fields)]
pub struct Caching {
/// The expiry time of the search results from the cache (in seconds).
#[cfg(any(feature = "redis-cache", feature = "memory-cache"))]
pub cache_expiry_time: u16,
/// The URI to the redis server to use for caching.
#[cfg(feature = "redis-cache")]
pub redis_url: String,
}
impl Default for Caching {
fn default() -> Self {
Caching {
#[cfg(any(feature = "redis-cache", feature = "memory-cache"))]
cache_expiry_time: 600,
#[cfg(feature = "redis-cache")]
redis_url: "redis://127.0.0.1:8082",
}
}
}

43
src/config/json.rs Normal file
View File

@ -0,0 +1,43 @@
//! This module provides the functionality to parse the json config and convert the config options
//! into rust readable form.
use std::fs::File;
use std::io::BufReader;
use crate::config::{process_settings, Config};
use crate::handler::{file_path, FileType};
impl Config {
/// A function which deserializes the config.lua into a Config struct.
///
/// # Arguments
///
/// * `logging_initialized` - It takes a boolean which ensures that the logging doesn't get
/// initialized twice. Pass false if the logger has not yet been initialized.
///
pub fn parse(logging_initialized: bool) -> Result<Self, Box<dyn std::error::Error>> {
let config_file = match file_path(FileType::Config) {
Ok(f) => f,
Err(_) => {
log::error!("Config Error: No config file found, falling back to defaults");
let conf = Self::default();
if !logging_initialized {
conf.set_logging_level();
}
return Ok(conf);
}
};
let reader = BufReader::new(File::open(config_file)?);
let mut conf: Config = serde_json::from_reader(reader)?;
if !logging_initialized {
conf.set_logging_level();
}
conf = process_settings(conf)?;
Ok(conf)
}
}

43
src/config/lua.rs Normal file
View File

@ -0,0 +1,43 @@
//! This module provides the functionality to parse the lua config and convert the config options
//! into rust readable form.
use std::fs;
use mlua::{Lua, LuaSerdeExt};
use crate::config::{process_settings, Config};
use crate::handler::{file_path, FileType};
impl Config {
/// A function which deserializes the config.lua into a Config struct.
///
/// # Arguments
///
/// * `logging_initialized` - It takes a boolean which ensures that the logging doesn't get
/// initialized twice. Pass false if the logger has not yet been initialized.
///
pub fn parse(logging_initialized: bool) -> Result<Self, Box<dyn std::error::Error>> {
let lua = Lua::new();
let config_file = match file_path(FileType::Config) {
Ok(f) => f,
Err(_) => {
log::error!("Config Error: No config file found, falling back to defaults");
let conf = Self::default();
if !logging_initialized {
conf.set_logging_level();
}
return Ok(conf);
}
};
let val = lua.load(fs::read_to_string(config_file)?).eval()?;
let mut conf: Config = lua.from_value(val)?;
if !logging_initialized {
conf.set_logging_level();
}
conf = process_settings(conf)?;
Ok(conf)
}
}

View File

@ -1,4 +1,72 @@
//! This module provides the modules which handles the functionality to parse the lua config
#![allow(missing_docs, clippy::missing_docs_in_private_items)]
//! This module provides the modules which handles the functionality to parse the lua/json config
//! and convert the config options into rust readable form.
use crate::config::{caching::Caching, search::Search, server::Server, style::Style};
use log::LevelFilter;
use serde::Deserialize;
pub mod parser;
pub mod caching;
pub mod search;
pub mod server;
pub mod style;
/// A named struct which stores the parsed config file options.
#[derive(Clone, Deserialize, Debug, Default)]
#[serde(default, deny_unknown_fields)]
pub struct Config {
pub server: Server,
pub style: Style,
pub caching: Caching,
pub search: Search,
}
impl Config {
/// a helper function that sets the proper logging level
fn set_logging_level(&self) {
if let Ok(pkg_env_var) = std::env::var("PKG_ENV") {
if pkg_env_var.to_lowercase() == "dev" {
env_logger::Builder::new()
.filter(None, LevelFilter::Trace)
.init();
return;
}
}
// Initializing logging middleware with level set to default or info.
let log_level = match (self.server.debug, self.server.logging) {
(true, true) => LevelFilter::Debug,
(true, false) => LevelFilter::Debug,
(false, true) => LevelFilter::Info,
(false, false) => LevelFilter::Error,
};
env_logger::Builder::new().filter(None, log_level).init();
}
}
fn process_settings(mut conf: Config) -> Result<Config, Box<dyn std::error::Error>> {
conf.search.safe_search = match conf.search.safe_search {
0..=4 => conf.search.safe_search,
_ => {
log::error!("Config Error: The value of `safe_search` option should be a non zero positive integer from 0 to 4.");
log::error!("Falling back to using the value `1` for the option");
1
}
};
conf.caching.cache_expiry_time = match conf.caching.cache_expiry_time {
0..=59 => {
log::error!("Config Error: The value of `cache_expiry_time` must be greater than 60");
log::error!("Falling back to using the value `60` for the option");
60
}
_ => conf.caching.cache_expiry_time,
};
Ok(conf)
}
#[cfg(feature = "json-config")]
pub mod json;
#[cfg(feature = "lua-config")]
pub mod lua;

View File

@ -1,168 +0,0 @@
//! This module provides the functionality to parse the lua config and convert the config options
//! into rust readable form.
use crate::handler::{file_path, FileType};
use crate::models::parser_models::{AggregatorConfig, RateLimiter, Style};
use log::LevelFilter;
use mlua::Lua;
use std::{collections::HashMap, fs, thread::available_parallelism};
/// A named struct which stores the parsed config file options.
#[derive(Clone)]
pub struct Config {
/// It stores the parsed port number option on which the server should launch.
pub port: u16,
/// It stores the parsed ip address option on which the server should launch
pub binding_ip: String,
/// It stores the theming options for the website.
pub style: Style,
#[cfg(feature = "redis-cache")]
/// It stores the redis connection url address on which the redis
/// client should connect.
pub redis_url: String,
#[cfg(any(feature = "redis-cache", feature = "memory-cache"))]
/// It stores the max TTL for search results in cache.
pub cache_expiry_time: u16,
/// It stores the option to whether enable or disable production use.
pub aggregator: AggregatorConfig,
/// It stores the option to whether enable or disable logs.
pub logging: bool,
/// It stores the option to whether enable or disable debug mode.
pub debug: bool,
/// It stores all the engine names that were enabled by the user.
pub upstream_search_engines: HashMap<String, bool>,
/// It stores the time (secs) which controls the server request timeout.
pub request_timeout: u8,
/// It stores the number of threads which controls the app will use to run.
pub threads: u8,
/// It stores configuration options for the ratelimiting middleware.
pub rate_limiter: RateLimiter,
/// It stores the level of safe search to be used for restricting content in the
/// search results.
pub safe_search: u8,
}
impl Config {
/// A function which parses the config.lua file and puts all the parsed options in the newly
/// constructed Config struct and returns it.
///
/// # Arguments
///
/// * `logging_initialized` - It takes a boolean which ensures that the logging doesn't get
/// initialized twice. Pass false if the logger has not yet been initialized.
///
/// # Error
///
/// Returns a lua parse error if parsing of the config.lua file fails or has a syntax error
/// or io error if the config.lua file doesn't exists otherwise it returns a newly constructed
/// Config struct with all the parsed config options from the parsed config file.
pub fn parse(logging_initialized: bool) -> Result<Self, Box<dyn std::error::Error>> {
let lua = Lua::new();
let globals = lua.globals();
lua.load(&fs::read_to_string(file_path(FileType::Config)?)?)
.exec()?;
let parsed_threads: u8 = globals.get::<_, u8>("threads")?;
let debug: bool = globals.get::<_, bool>("debug")?;
let logging: bool = globals.get::<_, bool>("logging")?;
if !logging_initialized {
set_logging_level(debug, logging);
}
let threads: u8 = if parsed_threads == 0 {
let total_num_of_threads: usize = available_parallelism()?.get() / 2;
log::error!(
"Config Error: The value of `threads` option should be a non zero positive integer"
);
log::error!("Falling back to using {} threads", total_num_of_threads);
total_num_of_threads as u8
} else {
parsed_threads
};
let rate_limiter = globals.get::<_, HashMap<String, u8>>("rate_limiter")?;
let parsed_safe_search: u8 = globals.get::<_, u8>("safe_search")?;
let safe_search: u8 = match parsed_safe_search {
0..=4 => parsed_safe_search,
_ => {
log::error!("Config Error: The value of `safe_search` option should be a non zero positive integer from 0 to 4.");
log::error!("Falling back to using the value `1` for the option");
1
}
};
#[cfg(any(feature = "redis-cache", feature = "memory-cache"))]
let parsed_cet = globals.get::<_, u16>("cache_expiry_time")?;
#[cfg(any(feature = "redis-cache", feature = "memory-cache"))]
let cache_expiry_time = match parsed_cet {
0..=59 => {
log::error!(
"Config Error: The value of `cache_expiry_time` must be greater than 60"
);
log::error!("Falling back to using the value `60` for the option");
60
}
_ => parsed_cet,
};
Ok(Config {
port: globals.get::<_, u16>("port")?,
binding_ip: globals.get::<_, String>("binding_ip")?,
style: Style::new(
globals.get::<_, String>("theme")?,
globals.get::<_, String>("colorscheme")?,
globals.get::<_, Option<String>>("animation")?,
),
#[cfg(feature = "redis-cache")]
redis_url: globals.get::<_, String>("redis_url")?,
aggregator: AggregatorConfig {
random_delay: globals.get::<_, bool>("production_use")?,
},
logging,
debug,
upstream_search_engines: globals
.get::<_, HashMap<String, bool>>("upstream_search_engines")?,
request_timeout: globals.get::<_, u8>("request_timeout")?,
threads,
rate_limiter: RateLimiter {
number_of_requests: rate_limiter["number_of_requests"],
time_limit: rate_limiter["time_limit"],
},
safe_search,
#[cfg(any(feature = "redis-cache", feature = "memory-cache"))]
cache_expiry_time,
})
}
}
/// a helper function that sets the proper logging level
///
/// # Arguments
///
/// * `debug` - It takes the option to whether enable or disable debug mode.
/// * `logging` - It takes the option to whether enable or disable logs.
fn set_logging_level(debug: bool, logging: bool) {
if let Ok(pkg_env_var) = std::env::var("PKG_ENV") {
if pkg_env_var.to_lowercase() == "dev" {
env_logger::Builder::new()
.filter(None, LevelFilter::Trace)
.init();
return;
}
}
// Initializing logging middleware with level set to default or info.
let log_level = match (debug, logging) {
(true, true) => LevelFilter::Debug,
(true, false) => LevelFilter::Debug,
(false, true) => LevelFilter::Info,
(false, false) => LevelFilter::Error,
};
env_logger::Builder::new().filter(None, log_level).init();
}

37
src/config/search.rs Normal file
View File

@ -0,0 +1,37 @@
#![allow(missing_docs)]
use serde::Deserialize;
use std::collections::HashMap;
/// Stores configurations related to caching.
#[derive(Clone, Deserialize, Debug)]
#[serde(default, deny_unknown_fields)]
pub struct Search {
/// The search engines to enable/disable.
pub upstream_search_engines: HashMap<String, bool>,
/// The safe search level to set
/// * 0 - None
/// * 1 - Low
/// * 2 - Moderate
/// * 3 - High
/// * 4 - Aggressive
pub safe_search: u8,
}
impl Default for Search {
fn default() -> Self {
Search {
upstream_search_engines: {
let mut map = HashMap::new();
map.insert("DuckDuckGo".to_string(), true);
map.insert("Searx".to_string(), false);
map.insert("Brave".to_string(), false);
map.insert("Startpage".to_string(), false);
map.insert("LibreX".to_string(), false);
map.insert("Mojeek".to_string(), false);
map.insert("Bing".to_string(), false);
map
},
safe_search: 2,
}
}
}

69
src/config/server.rs Normal file
View File

@ -0,0 +1,69 @@
#![allow(missing_docs)]
use serde::Deserialize;
use std::num::NonZeroU16;
/// Configuration options for the server.
#[derive(Clone, Debug, Deserialize)]
#[serde(default, deny_unknown_fields)]
pub struct Server {
/// Whether to create logs.
pub logging: bool,
/// Whether to use debug mode.
pub debug: bool,
/// The amount of threads to utilize.
pub threads: NonZeroU16,
/// The port websurfx will listen on.
pub port: NonZeroU16,
/// The IP address websurfx will listen on.
pub binding_ip: String,
pub aggregator: Aggregator,
/// Timeout for the search requests sent to the upstream search engines (in seconds).
pub request_timeout: u8,
pub rate_limiter: RateLimiter,
}
impl Default for Server {
fn default() -> Self {
Server {
logging: true,
debug: false,
threads: NonZeroU16::new(10).unwrap(),
port: NonZeroU16::new(8080).unwrap(),
binding_ip: "127.0.0.1".to_string(),
aggregator: Aggregator::default(),
request_timeout: 30,
rate_limiter: RateLimiter::default(),
}
}
}
/// Configuration options for the aggregator.
#[derive(Clone, Deserialize, Default, Debug)]
#[serde(default, deny_unknown_fields)]
pub struct Aggregator {
/// Whether to use a random_delay for the aggregator.
/// Enabling this option is recommended for instances with multiple users.
/// This setting will add a random delay before sending the request to the search engines,
/// this is to prevent DDoSing the upstream search engines from a large number of simultaneous requests.
pub random_delay: bool,
}
/// Configuration options for the rate limiter middleware.
#[derive(Clone, Deserialize, Debug)]
#[serde(default, deny_unknown_fields)]
pub struct RateLimiter {
/// The number of request that are allowed within the provided time limit.
pub number_of_requests: u8,
/// The time limit in which the quantity of requests that should be accepted.
pub time_limit: u8,
}
impl Default for RateLimiter {
fn default() -> Self {
RateLimiter {
number_of_requests: 20,
time_limit: 3,
}
}
}

25
src/config/style.rs Normal file
View File

@ -0,0 +1,25 @@
#![allow(missing_docs)]
use serde::Deserialize;
/// Stores configurations related to style of the UI.
#[derive(Clone, Deserialize, Debug)]
#[serde(default, deny_unknown_fields)]
pub struct Style {
/// The theme to use for the website.
pub theme: String,
/// The colorscheme to use for the theme.
pub colorscheme: String,
/// The animation to use for the theme.
pub animation: Option<String>,
}
impl Default for Style {
fn default() -> Self {
Style {
theme: "simple".to_string(),
colorscheme: "catppuccin-mocha".to_string(),
animation: Some("simple-frosted-glow".to_string()),
}
}
}

View File

@ -12,7 +12,10 @@ const PUBLIC_DIRECTORY_NAME: &str = "public";
/// The constant holding the name of the common folder.
const COMMON_DIRECTORY_NAME: &str = "websurfx";
/// The constant holding the name of the config file.
#[cfg(feature = "lua-config")]
const CONFIG_FILE_NAME: &str = "config.lua";
#[cfg(feature = "json-config")]
const CONFIG_FILE_NAME: &str = "config.json";
/// The constant holding the name of the AllowList text file.
const ALLOWLIST_FILE_NAME: &str = "allowlist.txt";
/// The constant holding the name of the BlockList text file.

View File

@ -5,6 +5,11 @@
#![deny(missing_docs, clippy::missing_docs_in_private_items, clippy::perf)]
#![warn(clippy::cognitive_complexity, rust_2018_idioms)]
#[cfg(all(feature = "lua-config", feature = "json-config"))]
compile_error!(
r#"feature "lua-config" and feature "json-config" cannot be enabled at the same time, please disable the "lua-config" feature to use the "json-config" feature"#
);
pub mod cache;
pub mod config;
pub mod engines;
@ -28,7 +33,7 @@ use actix_web::{
web, App, HttpServer,
};
use cache::cacher::{Cacher, SharedCache};
use config::parser::Config;
use config::Config;
use handler::{file_path, FileType};
/// Runs the web server on the provided TCP listener and returns a `Server` instance.
@ -45,7 +50,7 @@ use handler::{file_path, FileType};
///
/// ```rust
/// use std::net::TcpListener;
/// use websurfx::{config::parser::Config, run, cache::cacher::create_cache};
/// use websurfx::{config::Config, run, cache::cacher::create_cache};
///
/// #[tokio::main]
/// async fn main(){
@ -62,7 +67,7 @@ pub fn run(
) -> std::io::Result<Server> {
let public_folder_path: &str = file_path(FileType::Theme)?;
let cloned_config_threads_opt: u8 = config.threads;
let cloned_config_threads_opt = config.server.threads;
let cache = web::Data::new(SharedCache::new(cache));
@ -86,8 +91,8 @@ pub fn run(
.wrap(cors)
.wrap(Governor::new(
&GovernorConfigBuilder::default()
.per_second(config.rate_limiter.time_limit as u64)
.burst_size(config.rate_limiter.number_of_requests as u32)
.per_second(config.server.rate_limiter.time_limit as u64)
.burst_size(config.server.rate_limiter.number_of_requests as u32)
.finish()
.unwrap(),
))
@ -107,7 +112,7 @@ pub fn run(
.service(router::settings) // settings page
.default_service(web::route().to(router::not_found)) // error page
})
.workers(cloned_config_threads_opt as usize)
.workers(cloned_config_threads_opt.get().into())
// Start server on 127.0.0.1 with the user provided port number. for example 127.0.0.1:8080.
.listen(listener)?
.run();

View File

@ -4,5 +4,4 @@
pub mod aggregation_models;
pub mod engine_models;
pub mod parser_models;
pub mod server_models;

View File

@ -1,57 +0,0 @@
//! This module provides public models for handling, storing and serializing parsed config file
//! options from config.lua by grouping them together.
/// A named struct which stores,deserializes, serializes and groups the parsed config file options
/// of theme and colorscheme names into the Style struct which derives the `Clone`, `Serialize`
/// and Deserialize traits where the `Clone` trait is derived for allowing the struct to be
/// cloned and passed to the server as a shared data between all routes except `/robots.txt` and
/// the `Serialize` trait has been derived for allowing the object to be serialized so that it
/// can be passed to handlebars template files and the `Deserialize` trait has been derived in
/// order to allow the deserializing the json back to struct in aggregate function in
/// aggregator.rs and create a new struct out of it and then serialize it back to json and pass
/// it to the template files.
#[derive(Clone, Default)]
pub struct Style {
/// It stores the parsed theme option used to set a theme for the website.
pub theme: String,
/// It stores the parsed colorscheme option used to set a colorscheme for the
/// theme being used.
pub colorscheme: String,
/// It stores the parsed animation option used to set an animation for the
/// theme being used.
pub animation: Option<String>,
}
impl Style {
/// Constructs a new `Style` with the given arguments needed for the struct.
///
/// # Arguments
///
/// * `theme` - It takes the parsed theme option used to set a theme for the website.
/// * `colorscheme` - It takes the parsed colorscheme option used to set a colorscheme
/// for the theme being used.
pub fn new(theme: String, colorscheme: String, animation: Option<String>) -> Self {
Style {
theme,
colorscheme,
animation,
}
}
}
/// Configuration options for the aggregator.
#[derive(Clone)]
pub struct AggregatorConfig {
/// It stores the option to whether enable or disable random delays between
/// requests.
pub random_delay: bool,
}
/// Configuration options for the rate limiter middleware.
#[derive(Clone)]
pub struct RateLimiter {
/// The number of request that are allowed within a provided time limit.
pub number_of_requests: u8,
/// The time limit in which the quantity of requests that should be accepted.
pub time_limit: u8,
}

View File

@ -4,7 +4,7 @@ use std::borrow::Cow;
use serde::Deserialize;
use super::parser_models::Style;
use crate::config::style::Style;
/// A named struct which deserializes all the user provided search parameters and stores them.
#[derive(Deserialize)]

View File

@ -3,7 +3,7 @@
//! when requested.
use crate::{
config::parser::Config,
config::Config,
handler::{file_path, FileType},
};
use actix_web::{get, http::header::ContentType, web, HttpRequest, HttpResponse};
@ -67,11 +67,11 @@ pub async fn settings(
) -> Result<HttpResponse, Box<dyn std::error::Error>> {
Ok(HttpResponse::Ok().content_type(ContentType::html()).body(
crate::templates::views::settings::settings(
config.safe_search,
config.search.safe_search,
&config.style.colorscheme,
&config.style.theme,
&config.style.animation,
&config.upstream_search_engines,
&config.search.upstream_search_engines,
)?
.0,
))

View File

@ -2,7 +2,7 @@
use crate::{
cache::cacher::SharedCache,
config::parser::Config,
config::Config,
handler::{file_path, FileType},
models::{
aggregation_models::SearchResults,
@ -59,20 +59,21 @@ pub async fn search(
server_models::Cookie::build(
&config.style,
config
.search
.upstream_search_engines
.iter()
.filter_map(|(engine, enabled)| {
enabled.then_some(Cow::Borrowed(engine.as_str()))
})
.collect(),
config.safe_search,
config.search.safe_search,
)
});
search_settings.safe_search_level = get_safesearch_level(
&Some(search_settings.safe_search_level),
&params.safesearch,
config.safe_search,
config.search.safe_search,
);
// Closure wrapping the results function capturing local references
@ -169,8 +170,8 @@ async fn results(
let cache_key = format!(
"http://{}:{}/search?q={}&page={}&safesearch={}&engines={}",
config.binding_ip,
config.port,
config.server.binding_ip,
config.server.port,
query,
page,
safe_search_level,
@ -209,14 +210,14 @@ async fn results(
aggregate(
query,
page,
config.aggregator.random_delay,
config.debug,
config.server.aggregator.random_delay,
config.server.debug,
&search_settings
.engines
.iter()
.filter_map(|engine| EngineHandler::new(engine).ok())
.collect::<Vec<EngineHandler>>(),
config.request_timeout,
config.server.request_timeout,
safe_search_level,
)
.await?

View File

@ -1,6 +1,6 @@
use std::net::TcpListener;
use websurfx::{config::parser::Config, run, templates::views};
use websurfx::{config::Config, run, templates::views};
// Starts a new instance of the HTTP server, bound to a random available port
async fn spawn_app() -> String {

View File

@ -1,69 +1,41 @@
-- ### General ###
logging = true -- an option to enable or disable logs.
debug = false -- an option to enable or disable debug mode.
threads = 10 -- the amount of threads that the app will use to run (the value should be greater than 0).
{
server = {
logging = true,
debug = false,
threads = 10,
port = 8080,
binding_ip = "127.0.0.1",
aggregator = {
random_delay = false
},
request_timeout = 30,
rate_limiter = {
number_of_requests = 20,
time_limit = 3
}
},
-- ### Server ###
port = "8080" -- port on which server should be launched
binding_ip = "127.0.0.1" --ip address on the which server should be launched.
production_use = false -- whether to use production mode or not (in other words this option should be used if it is to be used to host it on the server to provide a service to a large number of users (more than one))
-- if production_use is set to true
-- There will be a random delay before sending the request to the search engines, this is to prevent DDoSing the upstream search engines from a large number of simultaneous requests.
request_timeout = 30 -- timeout for the search requests sent to the upstream search engines to be fetched (value in seconds).
rate_limiter = {
number_of_requests = 20, -- The number of request that are allowed within a provided time limit.
time_limit = 3, -- The time limit in which the quantity of requests that should be accepted.
caching = {
-- redis_url = "redis://127.0.0.1::8082",
cache_expiry_time = 600
},
search = {
upstream_search_engines = {
DuckDuckGo = true,
Searx = false,
Brave = false,
Startpage = false,
LibreX = false,
Mojeek = false,
Bing = false,
},
safe_search = 2
},
style = {
colorscheme = "catppuccin-mocha",
theme = "simple",
animation = "simple-frosted-glow"
}
}
-- ### Search ###
-- Filter results based on different levels. The levels provided are:
-- {{
-- 0 - None
-- 1 - Low
-- 2 - Moderate
-- 3 - High
-- 4 - Aggressive
-- }}
safe_search = 2
-- ### Website ###
-- The different colorschemes provided are:
-- {{
-- catppuccin-mocha
-- dark-chocolate
-- dracula
-- gruvbox-dark
-- monokai
-- nord
-- oceanic-next
-- one-dark
-- solarized-dark
-- solarized-light
-- tokyo-night
-- tomorrow-night
-- }}
colorscheme = "catppuccin-mocha" -- the colorscheme name which should be used for the website theme
-- The different themes provided are:
-- {{
-- simple
-- }}
theme = "simple" -- the theme name which should be used for the website
-- The different animations provided are:
-- {{
-- simple-frosted-glow
-- }}
animation = "simple-frosted-glow" -- the animation name which should be used with the theme or `nil` if you don't want any animations.
-- ### Caching ###
redis_url = "redis://127.0.0.1:8082" -- redis connection url address on which the client should connect on.
cache_expiry_time = 600 -- This option takes the expiry time of the search results (value in seconds and the value should be greater than or equal to 60 seconds).
-- ### Search Engines ###
upstream_search_engines = {
DuckDuckGo = true,
Searx = false,
Brave = false,
Startpage = false,
LibreX = false,
Mojeek = false,
Bing = false,
} -- select the upstream search engines from which the results should be fetched.