0
0
mirror of https://github.com/neon-mmd/websurfx.git synced 2024-11-25 15:38:21 -05:00

Merge branch 'rolling' into patch-searx-results-on-first-page

This commit is contained in:
neon_arch 2023-07-31 13:57:34 +03:00 committed by GitHub
commit a8b6a9bce2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 26 additions and 7 deletions

View File

@ -29,6 +29,7 @@ Some of the configuration options provided in the file are stated below. These a
- **port:** Port number on which server should be launched.
- **binding_ip_addr:** IP address on the which server should be launched.
- **production_use:** Whether to use production mode or not (in other words this option should be used if it is to be used to host it on the server to provide a service to a large number of users). If production_use is set to true. There will be a random delay before sending the request to the search engines, this is to prevent DDoSing the upstream search engines from a large number of simultaneous requests. This is newly added option and hence is only available in the **edge version**.
- **request_timeout:** Timeout for the search requests sent to the upstream search engines to be fetched (value in seconds).
## Website

View File

@ -86,6 +86,7 @@ binding_ip_addr = "0.0.0.0" --ip address on the which server should be launched.
production_use = false -- whether to use production mode or not (in other words this option should be used if it is to be used to host it on the server to provide a service to a large number of users)
-- if production_use is set to true
-- There will be a random delay before sending the request to the search engines, this is to prevent DDoSing the upstream search engines from a large number of simultaneous requests.
request_timeout = 60 -- timeout for the search requests sent to the upstream search engines to be fetched (value in seconds).
-- ### Website ###
-- The different colorschemes provided are:

View File

@ -22,6 +22,7 @@ static CONFIG_FILE_NAME: &str = "config.lua";
/// * `logging` - It stores the option to whether enable or disable logs.
/// * `debug` - It stores the option to whether enable or disable debug mode.
/// * `upstream_search_engines` - It stores all the engine names that were enabled by the user.
/// * `request_timeout` - It stores the time (secs) which controls the server request timeout.
#[derive(Clone)]
pub struct Config {
pub port: u16,
@ -32,6 +33,7 @@ pub struct Config {
pub logging: bool,
pub debug: bool,
pub upstream_search_engines: Vec<String>,
pub request_timeout: u8,
}
/// Configuration options for the aggregator.
@ -80,6 +82,7 @@ impl Config {
.into_iter()
.filter_map(|(key, value)| value.then_some(key))
.collect(),
request_timeout: globals.get::<_, u8>("request_timeout")?,
})
})
}

View File

@ -29,6 +29,7 @@ impl SearchEngine for DuckDuckGo {
/// * `query` - Takes the user provided query to query to the upstream search engine with.
/// * `page` - Takes an u32 as an argument.
/// * `user_agent` - Takes a random user agent string as an argument.
/// * `request_timeout` - Takes a time (secs) as a value which controls the server request timeout.
///
/// # Errors
///
@ -41,6 +42,7 @@ impl SearchEngine for DuckDuckGo {
query: String,
page: u32,
user_agent: String,
request_timeout: u8,
) -> Result<HashMap<String, RawSearchResult>, EngineError> {
// Page number can be missing or empty string and so appropriate handling is required
// so that upstream server recieves valid page number.
@ -90,7 +92,7 @@ impl SearchEngine for DuckDuckGo {
);
let document: Html = Html::parse_document(
&DuckDuckGo::fetch_html_from_upstream(self, url, header_map).await?,
&DuckDuckGo::fetch_html_from_upstream(self, url, header_map, request_timeout).await?,
);
let no_result: Selector = Selector::parse(".no-results")

View File

@ -50,11 +50,12 @@ pub trait SearchEngine {
&self,
url: String,
header_map: reqwest::header::HeaderMap,
request_timeout: u8,
) -> Result<String, EngineError> {
// fetch the html from upstream search engine
Ok(reqwest::Client::new()
.get(url)
.timeout(Duration::from_secs(30)) // Add timeout to request to avoid DDOSing the server
.timeout(Duration::from_secs(request_timeout as u64)) // Add timeout to request to avoid DDOSing the server
.headers(header_map) // add spoofed headers to emulate human behaviour
.send()
.await
@ -71,5 +72,6 @@ pub trait SearchEngine {
query: String,
page: u32,
user_agent: String,
request_timeout: u8,
) -> Result<HashMap<String, RawSearchResult>, EngineError>;
}

View File

@ -27,6 +27,7 @@ impl SearchEngine for Searx {
/// * `query` - Takes the user provided query to query to the upstream search engine with.
/// * `page` - Takes an u32 as an argument.
/// * `user_agent` - Takes a random user agent string as an argument.
/// * `request_timeout` - Takes a time (secs) as a value which controls the server request timeout.
///
/// # Errors
///
@ -40,6 +41,7 @@ impl SearchEngine for Searx {
query: String,
page: u32,
user_agent: String,
request_timeout: u8,
) -> Result<HashMap<String, RawSearchResult>, EngineError> {
// Page number can be missing or empty string and so appropriate handling is required
// so that upstream server recieves valid page number.
@ -73,8 +75,9 @@ impl SearchEngine for Searx {
);
header_map.insert(COOKIE, "categories=general; language=auto; locale=en; autocomplete=duckduckgo; image_proxy=1; method=POST; safesearch=2; theme=simple; results_on_new_tab=1; doi_resolver=oadoi.org; simple_style=auto; center_alignment=1; query_in_title=1; infinite_scroll=0; disabled_engines=; enabled_engines=\"archive is__general\\054yep__general\\054curlie__general\\054currency__general\\054ddg definitions__general\\054wikidata__general\\054duckduckgo__general\\054tineye__general\\054lingva__general\\054startpage__general\\054yahoo__general\\054wiby__general\\054marginalia__general\\054alexandria__general\\054wikibooks__general\\054wikiquote__general\\054wikisource__general\\054wikiversity__general\\054wikivoyage__general\\054dictzone__general\\054seznam__general\\054mojeek__general\\054naver__general\\054wikimini__general\\054brave__general\\054petalsearch__general\\054goo__general\"; disabled_plugins=; enabled_plugins=\"searx.plugins.hostname_replace\\054searx.plugins.oa_doi_rewrite\\054searx.plugins.vim_hotkeys\"; tokens=; maintab=on; enginetab=on".parse().into_report().change_context(EngineError::UnexpectedError)?);
let document: Html =
Html::parse_document(&Searx::fetch_html_from_upstream(self, url, header_map).await?);
let document: Html = Html::parse_document(
&Searx::fetch_html_from_upstream(self, url, header_map, request_timeout).await?,
);
let no_result: Selector = Selector::parse("#urls>.dialog-error>p")
.map_err(|_| Report::new(EngineError::UnexpectedError))

View File

@ -51,6 +51,7 @@ type FutureVec = Vec<JoinHandle<Result<HashMap<String, RawSearchResult>, Report<
/// * `random_delay` - Accepts a boolean value to add a random delay before making the request.
/// * `debug` - Accepts a boolean value to enable or disable debug mode option.
/// * `upstream_search_engines` - Accepts a vector of search engine names which was selected by the
/// * `request_timeout` - Accepts a time (secs) as a value which controls the server request timeout.
/// user through the UI or the config file.
///
/// # Error
@ -64,6 +65,7 @@ pub async fn aggregate(
random_delay: bool,
debug: bool,
upstream_search_engines: Vec<String>,
request_timeout: u8,
) -> Result<SearchResults, Box<dyn std::error::Error>> {
let user_agent: String = random_user_agent();
let mut result_map: HashMap<String, RawSearchResult> = HashMap::new();
@ -92,9 +94,11 @@ pub async fn aggregate(
.map(|search_engine| {
let query: String = query.clone();
let user_agent: String = user_agent.clone();
tokio::spawn(
async move { search_engine.results(query, page, user_agent.clone()).await },
)
tokio::spawn(async move {
search_engine
.results(query, page, user_agent.clone(), request_timeout)
.await
})
})
.collect();

View File

@ -146,6 +146,7 @@ async fn results(
config.aggregator.random_delay,
config.debug,
cookie_value.engines,
config.request_timeout,
)
.await?
}
@ -156,6 +157,7 @@ async fn results(
config.aggregator.random_delay,
config.debug,
config.upstream_search_engines.clone(),
config.request_timeout,
)
.await?
}

View File

@ -8,6 +8,7 @@ binding_ip = "127.0.0.1" --ip address on the which server should be launched.
production_use = false -- whether to use production mode or not (in other words this option should be used if it is to be used to host it on the server to provide a service to a large number of users (more than one))
-- if production_use is set to true
-- There will be a random delay before sending the request to the search engines, this is to prevent DDoSing the upstream search engines from a large number of simultaneous requests.
request_timeout = 30 -- timeout for the search requests sent to the upstream search engines to be fetched (value in seconds).
-- ### Website ###
-- The different colorschemes provided are: