0
0
mirror of https://github.com/neon-mmd/websurfx.git synced 2025-01-01 09:18:20 -05:00

Merge branch 'rolling' into qwant

This commit is contained in:
neon_arch 2024-12-05 23:25:11 +03:00 committed by GitHub
commit b3dfb7f7c1
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
17 changed files with 351 additions and 409 deletions

View File

@ -1,13 +1,14 @@
pull_request_rules:
- name: Automatic merge on approval
conditions:
queue_rules:
- name: default
queue_conditions:
- "#approved-reviews-by>=2"
- check-success=build (stable)
- check-success=CodeFactor
- check-success=Rust project
actions:
queue:
method: squash
merge_conditions: []
merge_method: squash
pull_request_rules:
- name: automatic update of pull requests where more 5 commits behind
conditions:
- "#commits-behind>5"
@ -17,4 +18,8 @@ pull_request_rules:
conditions:
- merged
actions:
delete_head_branch: {}
delete_head_branch: {}
- name: Automatic merge on approval
conditions: []
actions:
queue:

547
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
[package]
name = "websurfx"
version = "1.17.22"
version = "1.18.0"
edition = "2021"
description = "An open-source alternative to Searx that provides clean, ad-free, and organic results with incredible speed while keeping privacy and security in mind."
repository = "https://github.com/neon-mmd/websurfx"
@ -19,20 +19,21 @@ reqwest = { version = "0.12.5", default-features = false, features = [
"gzip",
"http2",
"json"
"socks",
] }
tokio = { version = "1.32.0", features = [
tokio = { version = "1.41.0", features = [
"rt-multi-thread",
"macros",
"fs",
"io-util",
], default-features = false }
serde = { version = "1.0.209", default-features = false, features = ["derive"] }
serde = { version = "1.0.215", default-features = false, features = ["derive"] }
serde_json = { version = "1.0.122", default-features = false }
bincode = {version="1.3.3", default-features=false}
maud = { version = "0.26.0", default-features = false, features = [
"actix-web",
] }
scraper = { version = "0.20.0", default-features = false }
scraper = { version = "0.21.0", default-features = false }
actix-web = { version = "4.9.0", features = [
"cookies",
"macros",
@ -47,22 +48,22 @@ mlua = { version = "0.9.9", features = [
"luajit",
"vendored",
], default-features = false }
redis = { version = "0.27.2", features = [
redis = { version = "0.27.5", features = [
"tokio-comp",
"connection-manager",
"tcp_nodelay"
], default-features = false, optional = true }
blake3 = { version = "1.5.4", default-features = false }
blake3 = { version = "1.5.5", default-features = false }
error-stack = { version = "0.5.0", default-features = false, features = [
"std",
] }
async-trait = { version = "0.1.80", default-features = false }
regex = { version = "1.11.0", features = ["perf"], default-features = false }
futures = { version = "0.3.30", default-features = false, features = ["alloc"] }
regex = { version = "1.11.1", features = ["perf"], default-features = false }
futures = { version = "0.3.31", default-features = false, features = ["alloc"] }
dhat = { version = "0.3.2", optional = true, default-features = false }
mimalloc = { version = "0.1.43", default-features = false }
async-once-cell = { version = "0.5.3", default-features = false }
actix-governor = { version = "0.6.0", default-features = false }
actix-governor = { version = "0.7.0", default-features = false }
moka = { version = "0.12.8", optional = true, default-features = false, features = [
"future",
] }
@ -79,7 +80,7 @@ base64 = { version = "0.21.5", default-features = false, features = [
"std",
], optional = true }
cfg-if = { version = "1.0.0", default-features = false, optional = true }
keyword_extraction = { version = "1.4.3", default-features = false, features = [
keyword_extraction = { version = "1.5.0", default-features = false, features = [
"tf_idf",
"rayon",
] }
@ -92,7 +93,7 @@ itertools = {version = "0.13.0", default-features = false}
[dev-dependencies]
rusty-hook = { version = "^0.11.2", default-features = false }
criterion = { version = "0.5.1", default-features = false }
tempfile = { version = "3.13.0", default-features = false }
tempfile = { version = "3.14.0", default-features = false }
[build-dependencies]
lightningcss = { version = "1.0.0-alpha.57", default-features = false, features = [

1
public/images/close.svg Normal file
View File

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="feather feather-x"><line x1="18" y1="6" x2="6" y2="18"></line><line x1="6" y1="6" x2="18" y2="18"></line></svg>

After

(image error) Size: 299 B

View File

@ -1,34 +1,6 @@
/**
* Selects the input element for the search box
* @type {HTMLInputElement}
*/
const searchBox = document.querySelector('input')
/**
* Redirects the user to the search results page with the query parameter
*/
function searchWeb() {
const query = searchBox.value.trim()
try {
let safeSearchLevel = document.querySelector('.search_options select').value
if (query) {
window.location.href = `search?q=${encodeURIComponent(
query,
)}&safesearch=${encodeURIComponent(safeSearchLevel)}`
}
} catch (error) {
if (query) {
window.location.href = `search?q=${encodeURIComponent(query)}`
}
}
* A function that clears the search input text when the clear button is clicked.
*/
function clearSearchText() {
document.querySelector('.search_bar > input').value = ''
}
/**
* Listens for the 'Enter' key press event on the search box and calls the searchWeb function
* @param {KeyboardEvent} e - The keyboard event object
*/
searchBox.addEventListener('keyup', (e) => {
if (e.key === 'Enter') {
searchWeb()
}
})

View File

@ -1,39 +0,0 @@
/**
* Navigates to the next page by incrementing the current page number in the URL query string.
* @returns {void}
*/
function navigate_forward() {
let url = new URL(window.location);
let searchParams = url.searchParams;
let q = searchParams.get('q');
let page = parseInt(searchParams.get('page'));
if (isNaN(page)) {
page = 1;
} else {
page++;
}
window.location.href = `${url.origin}${url.pathname}?q=${encodeURIComponent(q)}&page=${page}`;
}
/**
* Navigates to the previous page by decrementing the current page number in the URL query string.
* @returns {void}
*/
function navigate_backward() {
let url = new URL(window.location);
let searchParams = url.searchParams;
let q = searchParams.get('q');
let page = parseInt(searchParams.get('page'));
if (isNaN(page)) {
page = 0;
} else if (page > 0) {
page--;
}
window.location.href = `${url.origin}${url.pathname}?q=${encodeURIComponent(q)}&page=${page}`;
}

View File

@ -1,18 +0,0 @@
document.addEventListener(
'DOMContentLoaded',
() => {
let url = new URL(window.location)
let searchParams = url.searchParams
let safeSearchLevel = searchParams.get('safesearch')
if (
safeSearchLevel >= 0 &&
safeSearchLevel <= 2 &&
safeSearchLevel !== null
) {
document.querySelector('.search_options select').value = safeSearchLevel
}
},
false,
)

View File

@ -73,6 +73,11 @@ button {
font-size: 1.6rem;
}
.search_bar input::-webkit-search-results-button,
.search_bar input::-webkit-search-cancel-button{
display: none;
}
.search_bar input:focus {
outline: 2px solid var(--foreground-color);
}
@ -443,7 +448,7 @@ footer div {
align-items: center;
}
.page_navigation button {
.page_navigation a {
background: var(--background-color);
color: var(--foreground-color);
padding: 1rem;
@ -452,7 +457,7 @@ footer div {
border: none;
}
.page_navigation button:active {
.page_navigation a:active {
filter: brightness(1.2);
}

View File

@ -6,6 +6,7 @@ use crate::handler::{file_path, FileType};
use crate::models::parser_models::{AggregatorConfig, RateLimiter, Style};
use log::LevelFilter;
use mlua::Lua;
use reqwest::Proxy;
use std::{collections::HashMap, fs, thread::available_parallelism};
/// A named struct which stores the parsed config file options.
@ -48,8 +49,12 @@ pub struct Config {
pub tcp_connection_keep_alive: u8,
/// It stores the pool idle connection timeout in seconds.
pub pool_idle_connection_timeout: u8,
/// Url of the proxy to use for outgoing requests.
pub proxy: Option<Proxy>,
/// It stores the number of https connections to keep in the pool.
pub number_of_https_connections: u8,
/// It stores the operating system's TLS certificates for https requests.
pub operating_system_tls_certificates: bool,
}
impl Config {
@ -120,7 +125,17 @@ impl Config {
_ => parsed_cet,
};
let proxy_opt = globals.get::<_, Option<String>>("proxy")?;
let proxy = proxy_opt.and_then(|proxy_str| {
Proxy::all(proxy_str).ok().and_then(|_| {
log::error!("Invalid proxy url, defaulting to no proxy.");
None
})
});
Ok(Config {
operating_system_tls_certificates: globals
.get::<_, bool>("operating_system_tls_certificates")?,
port: globals.get::<_, u16>("port")?,
binding_ip: globals.get::<_, String>("binding_ip")?,
style: Style::new(
@ -151,6 +166,7 @@ impl Config {
safe_search,
#[cfg(any(feature = "redis-cache", feature = "memory-cache"))]
cache_expiry_time,
proxy,
})
}
}

View File

@ -30,7 +30,7 @@ impl LibreX {
Ok(Self {
parser: SearchResultParser::new(
".text-result-container>p",
".text-result-container",
".text-result-wrapper",
".text-result-wrapper>a>h2",
".text-result-wrapper>a",
".text-result-wrapper>span",

View File

@ -91,7 +91,7 @@ pub fn run(
.wrap(cors)
.wrap(Governor::new(
&GovernorConfigBuilder::default()
.per_second(config.rate_limiter.time_limit as u64)
.seconds_per_request(config.rate_limiter.time_limit as u64)
.burst_size(config.rate_limiter.number_of_requests as u32)
.finish()
.unwrap(),

View File

@ -75,7 +75,7 @@ pub async fn aggregate(
safe_search: u8,
) -> Result<SearchResults, Box<dyn std::error::Error>> {
let client = CLIENT.get_or_init(|| {
ClientBuilder::new()
let mut cb = ClientBuilder::new()
.timeout(Duration::from_secs(config.request_timeout as u64)) // Add timeout to request to avoid DDOSing the server
.pool_idle_timeout(Duration::from_secs(
config.pool_idle_connection_timeout as u64,
@ -83,12 +83,18 @@ pub async fn aggregate(
.tcp_keepalive(Duration::from_secs(config.tcp_connection_keep_alive as u64))
.pool_max_idle_per_host(config.number_of_https_connections as usize)
.connect_timeout(Duration::from_secs(config.request_timeout as u64)) // Add timeout to request to avoid DDOSing the server
.use_rustls_tls()
.tls_built_in_root_certs(config.operating_system_tls_certificates)
.https_only(true)
.gzip(true)
.brotli(true)
.http2_adaptive_window(config.adaptive_window)
.build()
.unwrap()
.http2_adaptive_window(config.adaptive_window);
if config.proxy.is_some() {
cb = cb.proxy(config.proxy.clone().unwrap());
}
cb.build().unwrap()
});
let user_agent: &str = random_user_agent();
@ -242,6 +248,7 @@ pub async fn filter_with_lists(
Ok(())
}
/// Sorts SearchResults by relevance score.
/// <br> sort_unstable is used as its faster,stability is not an issue on our side.
/// For reasons why, check out [`this`](https://rust-lang.github.io/rfcs/1884-unstable-sort.html)
@ -257,6 +264,7 @@ fn sort_search_results(results: &mut [SearchResult]) {
.unwrap_or(Ordering::Less)
})
}
#[cfg(test)]
mod tests {
use super::*;

View File

@ -85,7 +85,7 @@ pub async fn search(
let next_page = page + 1;
// Add a random delay before making the request.
if config.aggregator.random_delay || !config.debug {
if config.aggregator.random_delay || config.debug {
let nanos = SystemTime::now().duration_since(UNIX_EPOCH)?.subsec_nanos() as f32;
let delay = ((nanos / 1_0000_0000 as f32).floor() as u64) + 1;
tokio::time::sleep(Duration::from_secs(delay)).await;
@ -129,6 +129,7 @@ pub async fn search(
&config.style.theme,
&config.style.animation,
query,
page,
&results.0,
)
.0,

View File

@ -14,9 +14,13 @@ use maud::{html, Markup, PreEscaped};
/// It returns the compiled html code for the search bar as a result.
pub fn bar(query: &str) -> Markup {
html!(
(PreEscaped("<form action=\"/search\">"))
(PreEscaped("<div class=\"search_bar\">"))
input type="search" name="search-box" value=(query) placeholder="Type to search";
button type="submit" onclick="searchWeb()" {
input type="search" name="q" value=(query) placeholder="Type to search";
button type="button" onclick="clearSearchText()" {
img src="./images/close.svg" alt="Clear button icon for clearing search input text";
}
button type="submit" {
img src="./images/magnifying_glass.svg" alt="Info icon for error box";
}
)

View File

@ -29,7 +29,7 @@ pub fn search_bar(
(bar(query))
.error_box {
@if !engine_errors_info.is_empty(){
button onclick="toggleErrorBox()" class="error_box_toggle_button"{
button type="button" onclick="toggleErrorBox()" class="error_box_toggle_button"{
img src="./images/warning.svg" alt="Info icon for error box";
}
.dropdown_error_box{
@ -43,7 +43,7 @@ pub fn search_bar(
}
}
@else {
button onclick="toggleErrorBox()" class="error_box_toggle_button"{
button type="button" onclick="toggleErrorBox()" class="error_box_toggle_button"{
img src="./images/info.svg" alt="Warning icon for error box";
}
.dropdown_error_box {
@ -56,10 +56,10 @@ pub fn search_bar(
(PreEscaped("</div>"))
.search_options {
@if safe_search_level >= 3 {
(PreEscaped("<select name=\"safe_search_levels\" disabled>"))
(PreEscaped("<select name=\"safesearch\" disabled>"))
}
@else{
(PreEscaped("<select name=\"safe_search_levels\">"))
(PreEscaped(format!("<select name=\"safesearch\" value=\"{}\">", safe_search_level)))
}
@for (idx, name) in SAFE_SEARCH_LEVELS_NAME.iter().enumerate() {
@if (safe_search_level as usize) == idx {
@ -71,6 +71,7 @@ pub fn search_bar(
}
(PreEscaped("</select>"))
}
(PreEscaped("</form>"))
}
)
}

View File

@ -24,6 +24,7 @@ pub fn search(
theme: &str,
animation: &Option<String>,
query: &str,
page: u32,
search_results: &SearchResults,
) -> Markup {
html!(
@ -108,15 +109,14 @@ pub fn search(
}
}
.page_navigation {
button type="button" onclick="navigate_backward()"{
a href=(format!("/search?q={}&safesearch={}&page={}", query, search_results.safe_search_level, if page > 1 {page-1} else {1})) {
(PreEscaped("&#8592;")) "previous"
}
button type="button" onclick="navigate_forward()"{"next" (PreEscaped("&#8594;"))}
a href=(format!("/search?q={}&safesearch={}&page={}", query, search_results.safe_search_level, page+2)) {
"next" (PreEscaped("&#8594;"))}
}
}
script src="static/index.js"{}
script src="static/search_area_options.js"{}
script src="static/pagination.js"{}
script src="static/error_box.js"{}
(footer())
)

View File

@ -19,6 +19,8 @@ rate_limiter = {
-- Set whether the server will use an adaptive/dynamic HTTPS window size, see https://httpwg.org/specs/rfc9113.html#fc-principles
https_adaptive_window_size = false
operating_system_tls_certificates = true -- Set whether the server will use operating system's tls certificates alongside rustls certificates while fetching search results from the upstream engines.
number_of_https_connections = 10 -- the number of https connections that should be available in the connection pool.
-- Set keep-alive timer in seconds; keeps clients connected to the HTTP server, different from the connection to upstream search engines
client_connection_keep_alive = 120
@ -76,3 +78,5 @@ upstream_search_engines = {
Bing = false,
Qwant = false,
} -- select the upstream search engines from which the results should be fetched.
proxy = nil -- Proxy to send outgoing requests through. Set to nil to disable.