mirror of
https://github.com/neon-mmd/websurfx.git
synced 2024-11-22 05:58:21 -05:00
Compare commits
18 Commits
9fa0c0a790
...
f6373bcd83
Author | SHA1 | Date | |
---|---|---|---|
|
f6373bcd83 | ||
|
99fe3da258 | ||
|
89d367ff99 | ||
|
193b4e36db | ||
|
e582960402 | ||
|
1883093dc7 | ||
|
98e817c0a8 | ||
|
28231d99b8 | ||
|
4afc07f1d5 | ||
|
f2d2068bcf | ||
|
b1bcf41061 | ||
|
39af9096ef | ||
|
4bb6c5e90b | ||
|
494ff27b49 | ||
|
745197152b | ||
|
ed661174ba | ||
|
f6cf1ce5f5 | ||
|
07e1f663df |
1171
Cargo.lock
generated
1171
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
23
Cargo.toml
23
Cargo.toml
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "websurfx"
|
||||
version = "1.17.20"
|
||||
version = "1.17.22"
|
||||
edition = "2021"
|
||||
description = "An open-source alternative to Searx that provides clean, ad-free, and organic results with incredible speed while keeping privacy and security in mind."
|
||||
repository = "https://github.com/neon-mmd/websurfx"
|
||||
@ -17,7 +17,7 @@ reqwest = { version = "0.12.5", default-features = false, features = [
|
||||
"rustls-tls",
|
||||
"brotli",
|
||||
"gzip",
|
||||
"http2"
|
||||
"http2",
|
||||
] }
|
||||
tokio = { version = "1.32.0", features = [
|
||||
"rt-multi-thread",
|
||||
@ -27,6 +27,7 @@ tokio = { version = "1.32.0", features = [
|
||||
], default-features = false }
|
||||
serde = { version = "1.0.209", default-features = false, features = ["derive"] }
|
||||
serde_json = { version = "1.0.122", default-features = false }
|
||||
bincode = {version="1.3.3", default-features=false}
|
||||
maud = { version = "0.26.0", default-features = false, features = [
|
||||
"actix-web",
|
||||
] }
|
||||
@ -48,6 +49,7 @@ mlua = { version = "0.9.9", features = [
|
||||
redis = { version = "0.25.4", features = [
|
||||
"tokio-comp",
|
||||
"connection-manager",
|
||||
"tcp_nodelay"
|
||||
], default-features = false, optional = true }
|
||||
blake3 = { version = "1.5.4", default-features = false }
|
||||
error-stack = { version = "0.4.0", default-features = false, features = [
|
||||
@ -55,17 +57,13 @@ error-stack = { version = "0.4.0", default-features = false, features = [
|
||||
] }
|
||||
async-trait = { version = "0.1.80", default-features = false }
|
||||
regex = { version = "1.9.4", features = ["perf"], default-features = false }
|
||||
smallvec = { version = "1.13.1", features = [
|
||||
"union",
|
||||
"serde",
|
||||
], default-features = false }
|
||||
futures = { version = "0.3.30", default-features = false, features = ["alloc"] }
|
||||
dhat = { version = "0.3.2", optional = true, default-features = false }
|
||||
mimalloc = { version = "0.1.43", default-features = false }
|
||||
async-once-cell = { version = "0.5.3", default-features = false }
|
||||
actix-governor = { version = "0.5.0", default-features = false }
|
||||
mini-moka = { version = "0.10", optional = true, default-features = false, features = [
|
||||
"sync",
|
||||
moka = { version = "0.12.8", optional = true, default-features = false, features = [
|
||||
"future",
|
||||
] }
|
||||
async-compression = { version = "0.4.12", default-features = false, features = [
|
||||
"brotli",
|
||||
@ -82,8 +80,8 @@ base64 = { version = "0.21.5", default-features = false, features = [
|
||||
cfg-if = { version = "1.0.0", default-features = false, optional = true }
|
||||
keyword_extraction = { version = "1.4.3", default-features = false, features = [
|
||||
"tf_idf",
|
||||
"rayon",
|
||||
] }
|
||||
|
||||
stop-words = { version = "0.8.0", default-features = false, features = ["iso"] }
|
||||
thesaurus = { version = "0.5.2", default-features = false, optional = true, features = [
|
||||
"moby",
|
||||
@ -104,8 +102,6 @@ lightningcss = { version = "1.0.0-alpha.57", default-features = false, features
|
||||
# Temporary fork with fix
|
||||
minify-js = { git = "https://github.com/RuairidhWilliamson/minify-js", branch = "master", version = "0.6.0", default-features = false}
|
||||
|
||||
|
||||
|
||||
[profile.dev]
|
||||
opt-level = 0
|
||||
debug = true
|
||||
@ -178,12 +174,13 @@ opt-level = "z"
|
||||
|
||||
[features]
|
||||
use-synonyms-search = ["thesaurus/static"]
|
||||
default = ["memory-cache"]
|
||||
default = ["memory-cache", "socks"]
|
||||
dhat-heap = ["dep:dhat"]
|
||||
memory-cache = ["dep:mini-moka"]
|
||||
memory-cache = ["dep:moka"]
|
||||
redis-cache = ["dep:redis", "dep:base64"]
|
||||
compress-cache-results = ["dep:async-compression", "dep:cfg-if"]
|
||||
encrypt-cache-results = ["dep:chacha20poly1305", "dep:chacha20"]
|
||||
cec-cache-results = ["compress-cache-results", "encrypt-cache-results"]
|
||||
experimental-io-uring = ["actix-web/experimental-io-uring"]
|
||||
use-non-static-synonyms-search = ["thesaurus"]
|
||||
socks = ["reqwest/socks"]
|
41
src/cache/cacher.rs
vendored
41
src/cache/cacher.rs
vendored
@ -2,10 +2,9 @@
|
||||
//! from the upstream search engines in a json format.
|
||||
|
||||
use error_stack::Report;
|
||||
use futures::future::join_all;
|
||||
#[cfg(feature = "memory-cache")]
|
||||
use mini_moka::sync::Cache as MokaCache;
|
||||
#[cfg(feature = "memory-cache")]
|
||||
use mini_moka::sync::ConcurrentCacheExt;
|
||||
use moka::future::Cache as MokaCache;
|
||||
|
||||
#[cfg(feature = "memory-cache")]
|
||||
use std::time::Duration;
|
||||
@ -376,13 +375,13 @@ impl Cacher for RedisCache {
|
||||
}
|
||||
}
|
||||
/// TryInto implementation for SearchResults from Vec<u8>
|
||||
use std::convert::TryInto;
|
||||
use std::{convert::TryInto, sync::Arc};
|
||||
|
||||
impl TryInto<SearchResults> for Vec<u8> {
|
||||
type Error = CacheError;
|
||||
|
||||
fn try_into(self) -> Result<SearchResults, Self::Error> {
|
||||
serde_json::from_slice(&self).map_err(|_| CacheError::SerializationError)
|
||||
bincode::deserialize_from(self.as_slice()).map_err(|_| CacheError::SerializationError)
|
||||
}
|
||||
}
|
||||
|
||||
@ -390,7 +389,7 @@ impl TryInto<Vec<u8>> for &SearchResults {
|
||||
type Error = CacheError;
|
||||
|
||||
fn try_into(self) -> Result<Vec<u8>, Self::Error> {
|
||||
serde_json::to_vec(self).map_err(|_| CacheError::SerializationError)
|
||||
bincode::serialize(self).map_err(|_| CacheError::SerializationError)
|
||||
}
|
||||
}
|
||||
|
||||
@ -398,7 +397,16 @@ impl TryInto<Vec<u8>> for &SearchResults {
|
||||
#[cfg(feature = "memory-cache")]
|
||||
pub struct InMemoryCache {
|
||||
/// The backend cache which stores data.
|
||||
cache: MokaCache<String, Vec<u8>>,
|
||||
cache: Arc<MokaCache<String, Vec<u8>>>,
|
||||
}
|
||||
|
||||
#[cfg(feature = "memory-cache")]
|
||||
impl Clone for InMemoryCache {
|
||||
fn clone(&self) -> Self {
|
||||
Self {
|
||||
cache: self.cache.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "memory-cache")]
|
||||
@ -408,15 +416,17 @@ impl Cacher for InMemoryCache {
|
||||
log::info!("Initialising in-memory cache");
|
||||
|
||||
InMemoryCache {
|
||||
cache: MokaCache::builder()
|
||||
.time_to_live(Duration::from_secs(config.cache_expiry_time.into()))
|
||||
.build(),
|
||||
cache: Arc::new(
|
||||
MokaCache::builder()
|
||||
.time_to_live(Duration::from_secs(config.cache_expiry_time.into()))
|
||||
.build(),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
async fn cached_results(&mut self, url: &str) -> Result<SearchResults, Report<CacheError>> {
|
||||
let hashed_url_string = self.hash_url(url);
|
||||
match self.cache.get(&hashed_url_string) {
|
||||
match self.cache.get(&hashed_url_string).await {
|
||||
Some(res) => self.post_process_search_results(res).await,
|
||||
None => Err(Report::new(CacheError::MissingValue)),
|
||||
}
|
||||
@ -427,13 +437,18 @@ impl Cacher for InMemoryCache {
|
||||
search_results: &[SearchResults],
|
||||
urls: &[String],
|
||||
) -> Result<(), Report<CacheError>> {
|
||||
let mut tasks: Vec<_> = Vec::with_capacity(urls.len());
|
||||
for (url, search_result) in urls.iter().zip(search_results.iter()) {
|
||||
let hashed_url_string = self.hash_url(url);
|
||||
let bytes = self.pre_process_search_results(search_result).await?;
|
||||
self.cache.insert(hashed_url_string, bytes);
|
||||
let new_self = self.clone();
|
||||
tasks.push(tokio::spawn(async move {
|
||||
new_self.cache.insert(hashed_url_string, bytes).await
|
||||
}));
|
||||
}
|
||||
|
||||
self.cache.sync();
|
||||
join_all(tasks).await;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
6
src/cache/redis_cacher.rs
vendored
6
src/cache/redis_cacher.rs
vendored
@ -16,7 +16,7 @@ const REDIS_PIPELINE_SIZE: usize = 3;
|
||||
/// connect to.
|
||||
pub struct RedisCache {
|
||||
/// It stores a pool of connections ready to be used.
|
||||
connection_pool: Vec<ConnectionManager>,
|
||||
connection_pool: Box<[ConnectionManager]>,
|
||||
/// It stores the size of the connection pool (in other words the number of
|
||||
/// connections that should be stored in the pool).
|
||||
pool_size: u8,
|
||||
@ -58,13 +58,13 @@ impl RedisCache {
|
||||
}));
|
||||
}
|
||||
|
||||
let mut outputs = Vec::new();
|
||||
let mut outputs = Vec::with_capacity(tasks.len());
|
||||
for task in tasks {
|
||||
outputs.push(task.await??);
|
||||
}
|
||||
|
||||
let redis_cache = RedisCache {
|
||||
connection_pool: outputs,
|
||||
connection_pool: outputs.into_boxed_slice(),
|
||||
pool_size,
|
||||
current_connection: Default::default(),
|
||||
cache_ttl,
|
||||
|
@ -6,6 +6,7 @@ use crate::handler::{file_path, FileType};
|
||||
use crate::models::parser_models::{AggregatorConfig, RateLimiter, Style};
|
||||
use log::LevelFilter;
|
||||
use mlua::Lua;
|
||||
use reqwest::Proxy;
|
||||
use std::{collections::HashMap, fs, thread::available_parallelism};
|
||||
|
||||
/// A named struct which stores the parsed config file options.
|
||||
@ -48,6 +49,9 @@ pub struct Config {
|
||||
pub tcp_connection_keep_alive: u8,
|
||||
/// It stores the pool idle connection timeout in seconds.
|
||||
pub pool_idle_connection_timeout: u8,
|
||||
|
||||
/// Url of the proxy to use for outgoing requests.
|
||||
pub proxy: Option<Proxy>,
|
||||
}
|
||||
|
||||
impl Config {
|
||||
@ -118,6 +122,15 @@ impl Config {
|
||||
_ => parsed_cet,
|
||||
};
|
||||
|
||||
let proxy_str = globals.get::<_, String>("proxy")?;
|
||||
let proxy = match Proxy::all(proxy_str) {
|
||||
Ok(proxy) => Some(proxy),
|
||||
Err(_) => {
|
||||
log::error!("Invalid proxy url, defaulting to no proxy.");
|
||||
None
|
||||
}
|
||||
};
|
||||
|
||||
Ok(Config {
|
||||
port: globals.get::<_, u16>("port")?,
|
||||
binding_ip: globals.get::<_, String>("binding_ip")?,
|
||||
@ -148,6 +161,7 @@ impl Config {
|
||||
safe_search,
|
||||
#[cfg(any(feature = "redis-cache", feature = "memory-cache"))]
|
||||
cache_expiry_time,
|
||||
proxy,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -3,7 +3,6 @@
|
||||
|
||||
use super::engine_models::EngineError;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use smallvec::SmallVec;
|
||||
#[cfg(any(
|
||||
feature = "use-synonyms-search",
|
||||
feature = "use-non-static-synonyms-search"
|
||||
@ -23,7 +22,7 @@ pub struct SearchResult {
|
||||
/// The description of the search result.
|
||||
pub description: String,
|
||||
/// The names of the upstream engines from which this results were provided.
|
||||
pub engine: SmallVec<[String; 0]>,
|
||||
pub engine: Vec<String>,
|
||||
/// The td-tdf score of the result in regards to the title, url and description and the user's query
|
||||
pub relevance_score: f32,
|
||||
}
|
||||
@ -153,10 +152,10 @@ impl EngineErrorInfo {
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SearchResults {
|
||||
/// Stores the individual serializable `SearchResult` struct into a vector of
|
||||
pub results: Vec<SearchResult>,
|
||||
pub results: Box<[SearchResult]>,
|
||||
/// Stores the information on which engines failed with their engine name
|
||||
/// and the type of error that caused it.
|
||||
pub engine_errors_info: Vec<EngineErrorInfo>,
|
||||
pub engine_errors_info: Box<[EngineErrorInfo]>,
|
||||
/// Stores the flag option which holds the check value that the following
|
||||
/// search query was disallowed when the safe search level set to 4 and it
|
||||
/// was present in the `Blocklist` file.
|
||||
@ -183,10 +182,10 @@ impl SearchResults {
|
||||
/// the search url.
|
||||
/// * `engine_errors_info` - Takes an array of structs which contains information regarding
|
||||
/// which engines failed with their names, reason and their severity color name.
|
||||
pub fn new(results: Vec<SearchResult>, engine_errors_info: &[EngineErrorInfo]) -> Self {
|
||||
pub fn new(results: Box<[SearchResult]>, engine_errors_info: Box<[EngineErrorInfo]>) -> Self {
|
||||
Self {
|
||||
results,
|
||||
engine_errors_info: engine_errors_info.to_owned(),
|
||||
engine_errors_info,
|
||||
disallowed: Default::default(),
|
||||
filtered: Default::default(),
|
||||
safe_search_level: Default::default(),
|
||||
@ -205,11 +204,11 @@ impl SearchResults {
|
||||
}
|
||||
|
||||
/// A getter function that gets the value of `engine_errors_info`.
|
||||
pub fn engine_errors_info(&mut self) -> Vec<EngineErrorInfo> {
|
||||
pub fn engine_errors_info(&mut self) -> Box<[EngineErrorInfo]> {
|
||||
std::mem::take(&mut self.engine_errors_info)
|
||||
}
|
||||
/// A getter function that gets the value of `results`.
|
||||
pub fn results(&mut self) -> Vec<SearchResult> {
|
||||
pub fn results(&mut self) -> Box<[SearchResult]> {
|
||||
self.results.clone()
|
||||
}
|
||||
|
||||
@ -254,27 +253,50 @@ fn calculate_tf_idf(
|
||||
let tf_idf = TfIdf::new(params);
|
||||
let tokener = Tokenizer::new(query, stop_words, Some(punctuation));
|
||||
let query_tokens = tokener.split_into_words();
|
||||
let mut search_tokens = vec![];
|
||||
|
||||
for token in query_tokens {
|
||||
#[cfg(any(
|
||||
feature = "use-synonyms-search",
|
||||
feature = "use-non-static-synonyms-search"
|
||||
))]
|
||||
{
|
||||
// find some synonyms and add them to the search (from wordnet or moby if feature is enabled)
|
||||
let synonyms = synonyms(&token);
|
||||
search_tokens.extend(synonyms)
|
||||
}
|
||||
search_tokens.push(token);
|
||||
}
|
||||
#[cfg(any(
|
||||
feature = "use-synonyms-search",
|
||||
feature = "use-non-static-synonyms-search"
|
||||
))]
|
||||
let mut extra_tokens = vec![];
|
||||
|
||||
let mut total_score = 0.0f32;
|
||||
for token in search_tokens.iter() {
|
||||
total_score += tf_idf.get_score(token);
|
||||
}
|
||||
let total_score: f32 = query_tokens
|
||||
.iter()
|
||||
.map(|token| {
|
||||
#[cfg(any(
|
||||
feature = "use-synonyms-search",
|
||||
feature = "use-non-static-synonyms-search"
|
||||
))]
|
||||
{
|
||||
// find some synonyms and add them to the search (from wordnet or moby if feature is enabled)
|
||||
extra_tokens.extend(synonyms(token))
|
||||
}
|
||||
|
||||
let result = total_score / (search_tokens.len() as f32);
|
||||
tf_idf.get_score(token)
|
||||
})
|
||||
.sum();
|
||||
|
||||
#[cfg(not(any(
|
||||
feature = "use-synonyms-search",
|
||||
feature = "use-non-static-synonyms-search"
|
||||
)))]
|
||||
let result = total_score / (query_tokens.len() as f32);
|
||||
|
||||
#[cfg(any(
|
||||
feature = "use-synonyms-search",
|
||||
feature = "use-non-static-synonyms-search"
|
||||
))]
|
||||
let extra_total_score: f32 = extra_tokens
|
||||
.iter()
|
||||
.map(|token| tf_idf.get_score(token))
|
||||
.sum();
|
||||
|
||||
#[cfg(any(
|
||||
feature = "use-synonyms-search",
|
||||
feature = "use-non-static-synonyms-search"
|
||||
))]
|
||||
let result =
|
||||
(extra_total_score + total_score) / ((query_tokens.len() + extra_tokens.len()) as f32);
|
||||
|
||||
f32::from(!result.is_nan()) * result
|
||||
}
|
||||
|
@ -11,7 +11,7 @@ use super::parser_models::Style;
|
||||
pub struct SearchParams {
|
||||
/// It stores the search parameter option `q` (or query in simple words)
|
||||
/// of the search url.
|
||||
pub q: Option<String>,
|
||||
pub q: Option<Cow<'static, str>>,
|
||||
/// It stores the search parameter `page` (or pageno in simple words)
|
||||
/// of the search url.
|
||||
pub page: Option<u32>,
|
||||
@ -29,7 +29,7 @@ pub struct Cookie<'a> {
|
||||
/// It stores the colorscheme name used for the website theme.
|
||||
pub colorscheme: Cow<'a, str>,
|
||||
/// It stores the user selected upstream search engines selected from the UI.
|
||||
pub engines: Cow<'a, Vec<Cow<'a, str>>>,
|
||||
pub engines: Cow<'a, [Cow<'a, str>]>,
|
||||
/// It stores the user selected safe search level from the UI.
|
||||
pub safe_search_level: u8,
|
||||
}
|
||||
|
@ -14,7 +14,6 @@ use futures::stream::FuturesUnordered;
|
||||
use regex::Regex;
|
||||
use reqwest::{Client, ClientBuilder};
|
||||
use std::sync::Arc;
|
||||
use std::time::{SystemTime, UNIX_EPOCH};
|
||||
use tokio::{
|
||||
fs::File,
|
||||
io::{AsyncBufReadExt, BufReader},
|
||||
@ -76,7 +75,7 @@ pub async fn aggregate(
|
||||
safe_search: u8,
|
||||
) -> Result<SearchResults, Box<dyn std::error::Error>> {
|
||||
let client = CLIENT.get_or_init(|| {
|
||||
ClientBuilder::new()
|
||||
let mut cb = ClientBuilder::new()
|
||||
.timeout(Duration::from_secs(config.request_timeout as u64)) // Add timeout to request to avoid DDOSing the server
|
||||
.pool_idle_timeout(Duration::from_secs(
|
||||
config.pool_idle_connection_timeout as u64,
|
||||
@ -86,20 +85,17 @@ pub async fn aggregate(
|
||||
.https_only(true)
|
||||
.gzip(true)
|
||||
.brotli(true)
|
||||
.http2_adaptive_window(config.adaptive_window)
|
||||
.build()
|
||||
.unwrap()
|
||||
.http2_adaptive_window(config.adaptive_window);
|
||||
|
||||
if config.proxy.is_some() {
|
||||
cb = cb.proxy(config.proxy.clone().unwrap());
|
||||
}
|
||||
|
||||
cb.build().unwrap()
|
||||
});
|
||||
|
||||
let user_agent: &str = random_user_agent();
|
||||
|
||||
// Add a random delay before making the request.
|
||||
if config.aggregator.random_delay || !config.debug {
|
||||
let nanos = SystemTime::now().duration_since(UNIX_EPOCH)?.subsec_nanos() as f32;
|
||||
let delay = ((nanos / 1_0000_0000 as f32).floor() as u64) + 1;
|
||||
tokio::time::sleep(Duration::from_secs(delay)).await;
|
||||
}
|
||||
|
||||
let mut names: Vec<&str> = Vec::with_capacity(0);
|
||||
|
||||
// create tasks for upstream result fetching
|
||||
@ -188,19 +184,21 @@ pub async fn aggregate(
|
||||
drop(blacklist_map);
|
||||
}
|
||||
|
||||
let mut results: Vec<SearchResult> = result_map
|
||||
.iter()
|
||||
.map(|(_, value)| {
|
||||
let mut copy = value.clone();
|
||||
if !copy.url.contains("temu.com") {
|
||||
copy.calculate_relevance(query.as_str())
|
||||
let mut results: Box<[SearchResult]> = result_map
|
||||
.into_iter()
|
||||
.map(|(_, mut value)| {
|
||||
if !value.url.contains("temu.com") {
|
||||
value.calculate_relevance(query.as_str())
|
||||
}
|
||||
copy
|
||||
value
|
||||
})
|
||||
.collect();
|
||||
sort_search_results(&mut results);
|
||||
|
||||
Ok(SearchResults::new(results, &engine_errors_info))
|
||||
Ok(SearchResults::new(
|
||||
results,
|
||||
engine_errors_info.into_boxed_slice(),
|
||||
))
|
||||
}
|
||||
|
||||
/// Filters a map of search results using a list of regex patterns.
|
||||
@ -247,6 +245,7 @@ pub async fn filter_with_lists(
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Sorts SearchResults by relevance score.
|
||||
/// <br> sort_unstable is used as its faster,stability is not an issue on our side.
|
||||
/// For reasons why, check out [`this`](https://rust-lang.github.io/rfcs/1884-unstable-sort.html)
|
||||
@ -262,10 +261,10 @@ fn sort_search_results(results: &mut [SearchResult]) {
|
||||
.unwrap_or(Ordering::Less)
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use smallvec::smallvec;
|
||||
use std::io::Write;
|
||||
use tempfile::NamedTempFile;
|
||||
|
||||
@ -281,7 +280,7 @@ mod tests {
|
||||
description: "This domain is for use in illustrative examples in documents."
|
||||
.to_owned(),
|
||||
relevance_score: 0.0,
|
||||
engine: smallvec!["Google".to_owned(), "Bing".to_owned()],
|
||||
engine: vec!["Google".to_owned(), "Bing".to_owned()],
|
||||
},
|
||||
));
|
||||
map_to_be_filtered.push((
|
||||
@ -290,7 +289,7 @@ mod tests {
|
||||
title: "Rust Programming Language".to_owned(),
|
||||
url: "https://www.rust-lang.org/".to_owned(),
|
||||
description: "A systems programming language that runs blazingly fast, prevents segfaults, and guarantees thread safety.".to_owned(),
|
||||
engine: smallvec!["Google".to_owned(), "DuckDuckGo".to_owned()],
|
||||
engine: vec!["Google".to_owned(), "DuckDuckGo".to_owned()],
|
||||
relevance_score:0.0
|
||||
},)
|
||||
);
|
||||
@ -331,7 +330,7 @@ mod tests {
|
||||
url: "https://www.example.com".to_owned(),
|
||||
description: "This domain is for use in illustrative examples in documents."
|
||||
.to_owned(),
|
||||
engine: smallvec!["Google".to_owned(), "Bing".to_owned()],
|
||||
engine: vec!["Google".to_owned(), "Bing".to_owned()],
|
||||
relevance_score: 0.0,
|
||||
},
|
||||
));
|
||||
@ -341,7 +340,7 @@ mod tests {
|
||||
title: "Rust Programming Language".to_owned(),
|
||||
url: "https://www.rust-lang.org/".to_owned(),
|
||||
description: "A systems programming language that runs blazingly fast, prevents segfaults, and guarantees thread safety.".to_owned(),
|
||||
engine: smallvec!["Google".to_owned(), "DuckDuckGo".to_owned()],
|
||||
engine: vec!["Google".to_owned(), "DuckDuckGo".to_owned()],
|
||||
relevance_score:0.0
|
||||
},
|
||||
));
|
||||
@ -398,7 +397,7 @@ mod tests {
|
||||
url: "https://www.example.com".to_owned(),
|
||||
description: "This domain is for use in illustrative examples in documents."
|
||||
.to_owned(),
|
||||
engine: smallvec!["Google".to_owned(), "Bing".to_owned()],
|
||||
engine: vec!["Google".to_owned(), "Bing".to_owned()],
|
||||
relevance_score: 0.0,
|
||||
},
|
||||
));
|
||||
|
@ -14,7 +14,8 @@ use crate::{
|
||||
use actix_web::{get, http::header::ContentType, web, HttpRequest, HttpResponse};
|
||||
use itertools::Itertools;
|
||||
use regex::Regex;
|
||||
use std::borrow::Cow;
|
||||
use std::time::{SystemTime, UNIX_EPOCH};
|
||||
use std::{borrow::Cow, time::Duration};
|
||||
use tokio::{
|
||||
fs::File,
|
||||
io::{AsyncBufReadExt, BufReader},
|
||||
@ -83,6 +84,13 @@ pub async fn search(
|
||||
let previous_page = page.saturating_sub(1);
|
||||
let next_page = page + 1;
|
||||
|
||||
// Add a random delay before making the request.
|
||||
if config.aggregator.random_delay || !config.debug {
|
||||
let nanos = SystemTime::now().duration_since(UNIX_EPOCH)?.subsec_nanos() as f32;
|
||||
let delay = ((nanos / 1_0000_0000 as f32).floor() as u64) + 1;
|
||||
tokio::time::sleep(Duration::from_secs(delay)).await;
|
||||
}
|
||||
|
||||
let results: (SearchResults, String, bool);
|
||||
if page != previous_page {
|
||||
let (previous_results, current_results, next_results) = join!(
|
||||
@ -96,9 +104,7 @@ pub async fn search(
|
||||
let (results_list, cache_keys): (Vec<SearchResults>, Vec<String>) =
|
||||
[previous_results?, results.clone(), next_results?]
|
||||
.into_iter()
|
||||
.filter_map(|(result, cache_key, flag)| {
|
||||
dbg!(flag).then_some((result, cache_key))
|
||||
})
|
||||
.filter_map(|(result, cache_key, flag)| flag.then_some((result, cache_key)))
|
||||
.multiunzip();
|
||||
|
||||
tokio::spawn(async move { cache.cache_results(&results_list, &cache_keys).await });
|
||||
|
@ -73,3 +73,5 @@ upstream_search_engines = {
|
||||
Mojeek = false,
|
||||
Bing = false,
|
||||
} -- select the upstream search engines from which the results should be fetched.
|
||||
|
||||
proxy = "" -- Proxy to send outgoing requests through. Set to empty string to disable.
|
Loading…
Reference in New Issue
Block a user