mirror of
https://github.com/neon-mmd/websurfx.git
synced 2024-11-22 05:58:21 -05:00
✨ feat: provide the functionality to use the new config option
This commit is contained in:
parent
6d3396b6f9
commit
2d47e8d730
@ -41,6 +41,7 @@ impl SearchEngine for DuckDuckGo {
|
||||
query: String,
|
||||
page: u32,
|
||||
user_agent: String,
|
||||
request_timeout: u8,
|
||||
) -> Result<HashMap<String, RawSearchResult>, EngineError> {
|
||||
// Page number can be missing or empty string and so appropriate handling is required
|
||||
// so that upstream server recieves valid page number.
|
||||
@ -90,7 +91,7 @@ impl SearchEngine for DuckDuckGo {
|
||||
);
|
||||
|
||||
let document: Html = Html::parse_document(
|
||||
&DuckDuckGo::fetch_html_from_upstream(self, url, header_map).await?,
|
||||
&DuckDuckGo::fetch_html_from_upstream(self, url, header_map, request_timeout).await?,
|
||||
);
|
||||
|
||||
let no_result: Selector = Selector::parse(".no-results")
|
||||
|
@ -50,11 +50,12 @@ pub trait SearchEngine {
|
||||
&self,
|
||||
url: String,
|
||||
header_map: reqwest::header::HeaderMap,
|
||||
request_timeout: u8,
|
||||
) -> Result<String, EngineError> {
|
||||
// fetch the html from upstream search engine
|
||||
Ok(reqwest::Client::new()
|
||||
.get(url)
|
||||
.timeout(Duration::from_secs(30)) // Add timeout to request to avoid DDOSing the server
|
||||
.timeout(Duration::from_secs(request_timeout as u64)) // Add timeout to request to avoid DDOSing the server
|
||||
.headers(header_map) // add spoofed headers to emulate human behaviour
|
||||
.send()
|
||||
.await
|
||||
@ -71,5 +72,6 @@ pub trait SearchEngine {
|
||||
query: String,
|
||||
page: u32,
|
||||
user_agent: String,
|
||||
request_timeout: u8,
|
||||
) -> Result<HashMap<String, RawSearchResult>, EngineError>;
|
||||
}
|
||||
|
@ -4,7 +4,7 @@
|
||||
|
||||
use reqwest::header::{HeaderMap, CONTENT_TYPE, COOKIE, REFERER, USER_AGENT};
|
||||
use scraper::{Html, Selector};
|
||||
use std::collections::HashMap;
|
||||
use std::{collections::HashMap};
|
||||
|
||||
use crate::results::aggregation_models::RawSearchResult;
|
||||
|
||||
@ -40,6 +40,7 @@ impl SearchEngine for Searx {
|
||||
query: String,
|
||||
page: u32,
|
||||
user_agent: String,
|
||||
request_timeout: u8,
|
||||
) -> Result<HashMap<String, RawSearchResult>, EngineError> {
|
||||
// Page number can be missing or empty string and so appropriate handling is required
|
||||
// so that upstream server recieves valid page number.
|
||||
@ -70,8 +71,9 @@ impl SearchEngine for Searx {
|
||||
);
|
||||
header_map.insert(COOKIE, "categories=general; language=auto; locale=en; autocomplete=duckduckgo; image_proxy=1; method=POST; safesearch=2; theme=simple; results_on_new_tab=1; doi_resolver=oadoi.org; simple_style=auto; center_alignment=1; query_in_title=1; infinite_scroll=0; disabled_engines=; enabled_engines=\"archive is__general\\054yep__general\\054curlie__general\\054currency__general\\054ddg definitions__general\\054wikidata__general\\054duckduckgo__general\\054tineye__general\\054lingva__general\\054startpage__general\\054yahoo__general\\054wiby__general\\054marginalia__general\\054alexandria__general\\054wikibooks__general\\054wikiquote__general\\054wikisource__general\\054wikiversity__general\\054wikivoyage__general\\054dictzone__general\\054seznam__general\\054mojeek__general\\054naver__general\\054wikimini__general\\054brave__general\\054petalsearch__general\\054goo__general\"; disabled_plugins=; enabled_plugins=\"searx.plugins.hostname_replace\\054searx.plugins.oa_doi_rewrite\\054searx.plugins.vim_hotkeys\"; tokens=; maintab=on; enginetab=on".parse().into_report().change_context(EngineError::UnexpectedError)?);
|
||||
|
||||
let document: Html =
|
||||
Html::parse_document(&Searx::fetch_html_from_upstream(self, url, header_map).await?);
|
||||
let document: Html = Html::parse_document(
|
||||
&Searx::fetch_html_from_upstream(self, url, header_map, request_timeout).await?,
|
||||
);
|
||||
|
||||
let no_result: Selector = Selector::parse("#urls>.dialog-error>p")
|
||||
.map_err(|_| Report::new(EngineError::UnexpectedError))
|
||||
|
@ -64,6 +64,7 @@ pub async fn aggregate(
|
||||
random_delay: bool,
|
||||
debug: bool,
|
||||
upstream_search_engines: Vec<String>,
|
||||
request_timeout: u8,
|
||||
) -> Result<SearchResults, Box<dyn std::error::Error>> {
|
||||
let user_agent: String = random_user_agent();
|
||||
let mut result_map: HashMap<String, RawSearchResult> = HashMap::new();
|
||||
@ -92,9 +93,11 @@ pub async fn aggregate(
|
||||
.map(|search_engine| {
|
||||
let query: String = query.clone();
|
||||
let user_agent: String = user_agent.clone();
|
||||
tokio::spawn(
|
||||
async move { search_engine.results(query, page, user_agent.clone()).await },
|
||||
)
|
||||
tokio::spawn(async move {
|
||||
search_engine
|
||||
.results(query, page, user_agent.clone(), request_timeout)
|
||||
.await
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
|
@ -146,6 +146,7 @@ async fn results(
|
||||
config.aggregator.random_delay,
|
||||
config.debug,
|
||||
cookie_value.engines,
|
||||
config.request_timeout,
|
||||
)
|
||||
.await?
|
||||
}
|
||||
@ -156,6 +157,7 @@ async fn results(
|
||||
config.aggregator.random_delay,
|
||||
config.debug,
|
||||
config.upstream_search_engines.clone(),
|
||||
config.request_timeout,
|
||||
)
|
||||
.await?
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user