0
0
mirror of https://github.com/neon-mmd/websurfx.git synced 2024-11-25 07:28:22 -05:00

Compare commits

..

1 Commits

Author SHA1 Message Date
Kekma
22b59e2f3e
Merge ba1b68801e into 408858a91e 2024-05-04 16:58:22 +00:00
9 changed files with 501 additions and 760 deletions

1164
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
[package]
name = "websurfx"
version = "1.17.0"
version = "1.12.1"
edition = "2021"
description = "An open-source alternative to Searx that provides clean, ad-free, and organic results with incredible speed while keeping privacy and security in mind."
repository = "https://github.com/neon-mmd/websurfx"
@ -13,11 +13,10 @@ bench = false
path = "src/bin/websurfx.rs"
[dependencies]
reqwest = { version = "0.12.5", default-features = false, features = [
reqwest = { version = "0.11.24", default-features = false, features = [
"rustls-tls",
"brotli",
"gzip",
"http2"
] }
tokio = { version = "1.32.0", features = [
"rt-multi-thread",
@ -25,13 +24,13 @@ tokio = { version = "1.32.0", features = [
"fs",
"io-util",
], default-features = false }
serde = { version = "1.0.209", default-features = false, features = ["derive"] }
serde_json = { version = "1.0.122", default-features = false }
serde = { version = "1.0.196", default-features = false, features = ["derive"] }
serde_json = { version = "1.0.116", default-features = false }
maud = { version = "0.26.0", default-features = false, features = [
"actix-web",
] }
scraper = { version = "0.20.0", default-features = false }
actix-web = { version = "4.9.0", features = [
scraper = { version = "0.18.1", default-features = false }
actix-web = { version = "4.4.0", features = [
"cookies",
"macros",
"compress-brotli",
@ -41,15 +40,15 @@ actix-cors = { version = "0.7.0", default-features = false }
fake-useragent = { version = "0.1.3", default-features = false }
env_logger = { version = "0.11.1", default-features = false }
log = { version = "0.4.21", default-features = false }
mlua = { version = "0.9.9", features = [
mlua = { version = "0.9.1", features = [
"luajit",
"vendored",
], default-features = false }
redis = { version = "0.25.4", features = [
redis = { version = "0.24.0", features = [
"tokio-comp",
"connection-manager",
], default-features = false, optional = true }
blake3 = { version = "1.5.4", default-features = false }
blake3 = { version = "1.5.0", default-features = false }
error-stack = { version = "0.4.0", default-features = false, features = [
"std",
] }
@ -61,13 +60,13 @@ smallvec = { version = "1.13.1", features = [
], default-features = false }
futures = { version = "0.3.30", default-features = false, features = ["alloc"] }
dhat = { version = "0.3.2", optional = true, default-features = false }
mimalloc = { version = "0.1.43", default-features = false }
mimalloc = { version = "0.1.38", default-features = false }
async-once-cell = { version = "0.5.3", default-features = false }
actix-governor = { version = "0.5.0", default-features = false }
mini-moka = { version = "0.10", optional = true, default-features = false, features = [
"sync",
] }
async-compression = { version = "0.4.12", default-features = false, features = [
async-compression = { version = "0.4.6", default-features = false, features = [
"brotli",
"tokio",
], optional = true }
@ -97,7 +96,7 @@ criterion = { version = "0.5.1", default-features = false }
tempfile = { version = "3.10.1", default-features = false }
[build-dependencies]
lightningcss = { version = "1.0.0-alpha.57", default-features = false, features = [
lightningcss = { version = "1.0.0-alpha.55", default-features = false, features = [
"grid",
] }
# Disabled until bug fixing update
@ -133,50 +132,6 @@ codegen-units = 1
rpath = false
strip = "symbols"
[profile.bsr1]
inherits = "release"
opt-level = "s"
[profile.bsr2]
inherits = "bsr1"
opt-level = "z"
[profile.lpcb1]
inherits = "release"
codegen-units = 16
[profile.lpcb2]
inherits = "lpcb1"
lto = "off"
[profile.lpcb3]
inherits = "lpcb2"
opt-level = 2
[profile.bsr_and_lpcb1]
inherits = "lpcb1"
opt-level = "s"
[profile.bsr_and_lpcb2]
inherits = "lpcb2"
opt-level = "s"
[profile.bsr_and_lpcb3]
inherits = "lpcb3"
opt-level = "s"
[profile.bsr_and_lpcb4]
inherits = "lpcb1"
opt-level = "z"
[profile.bsr_and_lpcb5]
inherits = "lpcb1"
opt-level = "z"
[profile.bsr_and_lpcb6]
inherits = "lpcb1"
opt-level = "z"
[features]
use-synonyms-search = ["thesaurus/static"]
default = ["memory-cache"]

View File

@ -1,4 +1,4 @@
FROM --platform=$BUILDPLATFORM rust:1.78.0-alpine3.18 AS chef
FROM --platform=$BUILDPLATFORM rust:1.77.2-alpine3.18 AS chef
# We only pay the installation cost once,
# it will be cached from the second build onwards
RUN apk add --no-cache alpine-sdk musl-dev g++ make libcrypto3 libressl-dev upx perl build-base

View File

@ -115,9 +115,6 @@
- 🚀 Easy to setup with Docker or on bare metal with various installation and deployment options.
- ⛔ Search filtering to filter search results based on four different levels.
- 💾 Different caching levels focusing on reliability, speed and resiliancy.
- ⬆️ Organic Search results (with ranking algorithm builtin to rerank the search results according to user's search query.).
- 🔒 Different compression and encryption levels focusing on speed and privacy.
- 🧪 Experimental IO-uring feature for Linux operating systems focused on performance of the engine.
- 🔐 Fast, private, and secure
- 🆓 100% free and open source
- 💨 Ad-free and clean results

View File

@ -1,5 +1,5 @@
# Create Builder image
FROM --platform=$BUILDPLATFORM rust:1.78.0-alpine3.18
FROM --platform=$BUILDPLATFORM rust:1.77.2-alpine3.18
# Install required dependencies
RUN apk add --no-cache alpine-sdk musl-dev g++ make libcrypto3 libressl-dev perl build-base

View File

@ -37,15 +37,13 @@ pub struct Config {
pub request_timeout: u8,
/// It stores the number of threads which controls the app will use to run.
pub threads: u8,
/// Set the keep-alive time for client connections to the HTTP server
pub client_connection_keep_alive: u8,
/// It stores configuration options for the ratelimiting middleware.
pub rate_limiter: RateLimiter,
/// It stores the level of safe search to be used for restricting content in the
/// search results.
pub safe_search: u8,
/// It stores the TCP connection keepalive duration in seconds.
pub tcp_connection_keep_alive: u8,
pub tcp_connection_keepalive: u8,
/// It stores the pool idle connection timeout in seconds.
pub pool_idle_connection_timeout: u8,
}
@ -137,10 +135,9 @@ impl Config {
upstream_search_engines: globals
.get::<_, HashMap<String, bool>>("upstream_search_engines")?,
request_timeout: globals.get::<_, u8>("request_timeout")?,
tcp_connection_keep_alive: globals.get::<_, u8>("tcp_connection_keep_alive")?,
tcp_connection_keepalive: globals.get::<_, u8>("tcp_connection_keepalive")?,
pool_idle_connection_timeout: globals.get::<_, u8>("pool_idle_connection_timeout")?,
threads,
client_connection_keep_alive: globals.get::<_, u8>("client_connection_keep_alive")?,
rate_limiter: RateLimiter {
number_of_requests: rate_limiter["number_of_requests"],
time_limit: rate_limiter["time_limit"],

View File

@ -14,7 +14,7 @@ pub mod results;
pub mod server;
pub mod templates;
use std::{net::TcpListener, sync::OnceLock, time::Duration};
use std::{net::TcpListener, sync::OnceLock};
use crate::server::router;
@ -113,10 +113,6 @@ pub fn run(
.default_service(web::route().to(router::not_found)) // error page
})
.workers(config.threads as usize)
// Set the keep-alive timer for client connections
.keep_alive(Duration::from_secs(
config.client_connection_keep_alive as u64,
))
// Start server on 127.0.0.1 with the user provided port number. for example 127.0.0.1:8080.
.listen(listener)?
.run();

View File

@ -81,7 +81,7 @@ pub async fn aggregate(
.pool_idle_timeout(Duration::from_secs(
config.pool_idle_connection_timeout as u64,
))
.tcp_keepalive(Duration::from_secs(config.tcp_connection_keep_alive as u64))
.tcp_keepalive(Duration::from_secs(config.tcp_connection_keepalive as u64))
.connect_timeout(Duration::from_secs(config.request_timeout as u64)) // Add timeout to request to avoid DDOSing the server
.https_only(true)
.gzip(true)

View File

@ -10,18 +10,14 @@ production_use = false -- whether to use production mode or not (in other words
-- if production_use is set to true
-- There will be a random delay before sending the request to the search engines, this is to prevent DDoSing the upstream search engines from a large number of simultaneous requests.
request_timeout = 30 -- timeout for the search requests sent to the upstream search engines to be fetched (value in seconds).
tcp_connection_keep_alive = 30 -- the amount of time the tcp connection should remain alive to the upstream search engines (or connected to the server). (value in seconds).
tcp_connection_keepalive = 30 -- the amount of time the tcp connection should remain alive (or connected to the server). (value in seconds).
pool_idle_connection_timeout = 30 -- timeout for the idle connections in the reqwest HTTP connection pool (value in seconds).
rate_limiter = {
number_of_requests = 20, -- The number of request that are allowed within a provided time limit.
time_limit = 3, -- The time limit in which the quantity of requests that should be accepted.
}
https_adaptive_window_size = false
number_of_https_connections = 10 -- the number of https connections that should be available in the connection pool.
-- Set keep-alive timer in seconds; keeps clients connected to the HTTP server, different from the connection to upstream search engines
client_connection_keep_alive = 120
-- ### Search ###
-- Filter results based on different levels. The levels provided are:
-- {{