mirror of
https://github.com/neon-mmd/websurfx.git
synced 2024-11-22 05:58:21 -05:00
Merge branch 'rolling' into 532
This commit is contained in:
commit
c6b93403b8
47
Cargo.lock
generated
47
Cargo.lock
generated
@ -351,7 +351,7 @@ version = "0.4.12"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fec134f64e2bc57411226dfc4e52dec859ddfc7e711fc5e07b612584f000e4aa"
|
||||
dependencies = [
|
||||
"brotli 5.0.0",
|
||||
"brotli",
|
||||
"flate2",
|
||||
"futures-core",
|
||||
"memchr",
|
||||
@ -490,28 +490,7 @@ checksum = "74f7971dbd9326d58187408ab83117d8ac1bb9c17b085fdacd1cf2f598719b6b"
|
||||
dependencies = [
|
||||
"alloc-no-stdlib",
|
||||
"alloc-stdlib",
|
||||
"brotli-decompressor 2.5.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "brotli"
|
||||
version = "5.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "19483b140a7ac7174d34b5a581b406c64f84da5409d3e09cf4fff604f9270e67"
|
||||
dependencies = [
|
||||
"alloc-no-stdlib",
|
||||
"alloc-stdlib",
|
||||
"brotli-decompressor 4.0.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "brotli-decompressor"
|
||||
version = "4.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9a45bd2e4095a8b518033b128020dd4a55aab1c0a381ba4404a472630f4bc362"
|
||||
dependencies = [
|
||||
"alloc-no-stdlib",
|
||||
"alloc-stdlib",
|
||||
"brotli-decompressor",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -872,7 +851,7 @@ dependencies = [
|
||||
"clap",
|
||||
"criterion-plot",
|
||||
"is-terminal",
|
||||
"itertools",
|
||||
"itertools 0.10.5",
|
||||
"num-traits",
|
||||
"once_cell",
|
||||
"oorandom",
|
||||
@ -891,7 +870,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1"
|
||||
dependencies = [
|
||||
"cast",
|
||||
"itertools",
|
||||
"itertools 0.10.5",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1902,6 +1881,15 @@ dependencies = [
|
||||
"either",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "itertools"
|
||||
version = "0.13.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186"
|
||||
dependencies = [
|
||||
"either",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "itoa"
|
||||
version = "0.4.8"
|
||||
@ -2004,7 +1992,7 @@ dependencies = [
|
||||
"cssparser-color",
|
||||
"data-encoding",
|
||||
"getrandom",
|
||||
"itertools",
|
||||
"itertools 0.10.5",
|
||||
"lazy_static",
|
||||
"parcel_selectors",
|
||||
"paste",
|
||||
@ -2849,9 +2837,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "quinn-proto"
|
||||
version = "0.11.6"
|
||||
version = "0.11.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ba92fb39ec7ad06ca2582c0ca834dfeadcaf06ddfc8e635c80aa7e1c05315fdd"
|
||||
checksum = "fadfaed2cd7f389d0161bb73eeb07b7b78f8691047a6f3e73caaeae55310a4a6"
|
||||
dependencies = [
|
||||
"bytes 1.6.0",
|
||||
"rand 0.8.5",
|
||||
@ -4461,7 +4449,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "websurfx"
|
||||
version = "1.17.0"
|
||||
version = "1.17.20"
|
||||
dependencies = [
|
||||
"actix-cors",
|
||||
"actix-files",
|
||||
@ -4481,6 +4469,7 @@ dependencies = [
|
||||
"error-stack",
|
||||
"fake-useragent",
|
||||
"futures 0.3.30",
|
||||
"itertools 0.13.0",
|
||||
"keyword_extraction",
|
||||
"lightningcss",
|
||||
"log",
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "websurfx"
|
||||
version = "1.17.0"
|
||||
version = "1.17.20"
|
||||
edition = "2021"
|
||||
description = "An open-source alternative to Searx that provides clean, ad-free, and organic results with incredible speed while keeping privacy and security in mind."
|
||||
repository = "https://github.com/neon-mmd/websurfx"
|
||||
@ -82,14 +82,13 @@ base64 = { version = "0.21.5", default-features = false, features = [
|
||||
cfg-if = { version = "1.0.0", default-features = false, optional = true }
|
||||
keyword_extraction = { version = "1.4.3", default-features = false, features = [
|
||||
"tf_idf",
|
||||
|
||||
|
||||
] }
|
||||
|
||||
stop-words = { version = "0.8.0", default-features = false, features = ["iso"] }
|
||||
thesaurus = { version = "0.5.2", default-features = false, optional = true, features = [
|
||||
"moby",
|
||||
]}
|
||||
itertools = {version = "0.13.0", default-features = false}
|
||||
|
||||
[dev-dependencies]
|
||||
rusty-hook = { version = "^0.11.2", default-features = false }
|
||||
|
15
src/cache/redis_cacher.rs
vendored
15
src/cache/redis_cacher.rs
vendored
@ -4,7 +4,10 @@
|
||||
use super::error::CacheError;
|
||||
use error_stack::Report;
|
||||
use futures::stream::FuturesUnordered;
|
||||
use redis::{aio::ConnectionManager, AsyncCommands, Client, RedisError};
|
||||
use redis::{
|
||||
aio::ConnectionManager, AsyncCommands, Client, ExistenceCheck, RedisError, SetExpiry,
|
||||
SetOptions,
|
||||
};
|
||||
|
||||
/// A constant holding the redis pipeline size.
|
||||
const REDIS_PIPELINE_SIZE: usize = 3;
|
||||
@ -139,8 +142,14 @@ impl RedisCache {
|
||||
self.current_connection = Default::default();
|
||||
|
||||
for (key, json_result) in keys.zip(json_results) {
|
||||
self.pipeline
|
||||
.set_ex(key, json_result, self.cache_ttl.into());
|
||||
self.pipeline.set_options(
|
||||
key,
|
||||
json_result,
|
||||
SetOptions::default()
|
||||
.conditional_set(ExistenceCheck::NX)
|
||||
.get(true)
|
||||
.with_expiration(SetExpiry::EX(self.cache_ttl.into())),
|
||||
);
|
||||
}
|
||||
|
||||
let mut result: Result<(), RedisError> = self
|
||||
|
@ -12,6 +12,7 @@ use crate::{
|
||||
results::aggregator::aggregate,
|
||||
};
|
||||
use actix_web::{get, http::header::ContentType, web, HttpRequest, HttpResponse};
|
||||
use itertools::Itertools;
|
||||
use regex::Regex;
|
||||
use std::borrow::Cow;
|
||||
use tokio::{
|
||||
@ -40,7 +41,6 @@ pub async fn search(
|
||||
config: web::Data<&'static Config>,
|
||||
cache: web::Data<&'static SharedCache>,
|
||||
) -> Result<HttpResponse, Box<dyn std::error::Error>> {
|
||||
use std::sync::Arc;
|
||||
let params = web::Query::<SearchParams>::from_query(req.query_string())?;
|
||||
match ¶ms.q {
|
||||
Some(query) => {
|
||||
@ -83,44 +83,36 @@ pub async fn search(
|
||||
let previous_page = page.saturating_sub(1);
|
||||
let next_page = page + 1;
|
||||
|
||||
let mut results = Arc::new((SearchResults::default(), String::default()));
|
||||
let results: (SearchResults, String, bool);
|
||||
if page != previous_page {
|
||||
let (previous_results, current_results, next_results) = join!(
|
||||
get_results(previous_page),
|
||||
get_results(page),
|
||||
get_results(next_page)
|
||||
);
|
||||
let (parsed_previous_results, parsed_next_results) =
|
||||
(previous_results?, next_results?);
|
||||
|
||||
let (cache_keys, results_list) = (
|
||||
[
|
||||
parsed_previous_results.1,
|
||||
results.1.clone(),
|
||||
parsed_next_results.1,
|
||||
],
|
||||
[
|
||||
parsed_previous_results.0,
|
||||
results.0.clone(),
|
||||
parsed_next_results.0,
|
||||
],
|
||||
);
|
||||
results = current_results?;
|
||||
|
||||
results = Arc::new(current_results?);
|
||||
let (results_list, cache_keys): (Vec<SearchResults>, Vec<String>) =
|
||||
[previous_results?, results.clone(), next_results?]
|
||||
.into_iter()
|
||||
.filter_map(|(result, cache_key, flag)| {
|
||||
dbg!(flag).then_some((result, cache_key))
|
||||
})
|
||||
.multiunzip();
|
||||
|
||||
tokio::spawn(async move { cache.cache_results(&results_list, &cache_keys).await });
|
||||
} else {
|
||||
let (current_results, next_results) =
|
||||
join!(get_results(page), get_results(page + 1));
|
||||
|
||||
let parsed_next_results = next_results?;
|
||||
results = current_results?;
|
||||
|
||||
results = Arc::new(current_results?);
|
||||
|
||||
let (cache_keys, results_list) = (
|
||||
[results.1.clone(), parsed_next_results.1.clone()],
|
||||
[results.0.clone(), parsed_next_results.0],
|
||||
);
|
||||
let (results_list, cache_keys): (Vec<SearchResults>, Vec<String>) =
|
||||
[results.clone(), next_results?]
|
||||
.into_iter()
|
||||
.filter_map(|(result, cache_key, flag)| flag.then_some((result, cache_key)))
|
||||
.multiunzip();
|
||||
|
||||
tokio::spawn(async move { cache.cache_results(&results_list, &cache_keys).await });
|
||||
}
|
||||
@ -163,7 +155,7 @@ async fn results(
|
||||
query: &str,
|
||||
page: u32,
|
||||
search_settings: &server_models::Cookie<'_>,
|
||||
) -> Result<(SearchResults, String), Box<dyn std::error::Error>> {
|
||||
) -> Result<(SearchResults, String, bool), Box<dyn std::error::Error>> {
|
||||
// eagerly parse cookie value to evaluate safe search level
|
||||
let safe_search_level = search_settings.safe_search_level;
|
||||
|
||||
@ -182,7 +174,7 @@ async fn results(
|
||||
// check if fetched cache results was indeed fetched or it was an error and if so
|
||||
// handle the data accordingly.
|
||||
match cached_results {
|
||||
Ok(results) => Ok((results, cache_key)),
|
||||
Ok(results) => Ok((results, cache_key, false)),
|
||||
Err(_) => {
|
||||
if safe_search_level == 4 {
|
||||
let mut results: SearchResults = SearchResults::default();
|
||||
@ -196,7 +188,7 @@ async fn results(
|
||||
.cache_results(&[results.clone()], &[cache_key.clone()])
|
||||
.await?;
|
||||
results.set_safe_search_level(safe_search_level);
|
||||
return Ok((results, cache_key));
|
||||
return Ok((results, cache_key, true));
|
||||
}
|
||||
}
|
||||
|
||||
@ -235,7 +227,7 @@ async fn results(
|
||||
.cache_results(&[results.clone()], &[cache_key.clone()])
|
||||
.await?;
|
||||
results.set_safe_search_level(safe_search_level);
|
||||
Ok((results, cache_key))
|
||||
Ok((results, cache_key, true))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user