mirror of
https://github.com/neon-mmd/websurfx.git
synced 2024-11-22 05:58:21 -05:00
♻️ refactor: code to take prebuilt client to request search results (#384)
This commit is contained in:
parent
b42adaa5a3
commit
fe74f2eef7
@ -4,7 +4,7 @@
|
|||||||
|
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
use reqwest::header::HeaderMap;
|
use reqwest::{header::HeaderMap, Client};
|
||||||
use scraper::Html;
|
use scraper::Html;
|
||||||
|
|
||||||
use crate::models::aggregation_models::SearchResult;
|
use crate::models::aggregation_models::SearchResult;
|
||||||
@ -42,7 +42,7 @@ impl SearchEngine for Brave {
|
|||||||
query: &str,
|
query: &str,
|
||||||
page: u32,
|
page: u32,
|
||||||
user_agent: &str,
|
user_agent: &str,
|
||||||
request_timeout: u8,
|
client: &Client,
|
||||||
safe_search: u8,
|
safe_search: u8,
|
||||||
) -> Result<HashMap<String, SearchResult>, EngineError> {
|
) -> Result<HashMap<String, SearchResult>, EngineError> {
|
||||||
let url = format!("https://search.brave.com/search?q={query}&offset={page}");
|
let url = format!("https://search.brave.com/search?q={query}&offset={page}");
|
||||||
@ -68,7 +68,7 @@ impl SearchEngine for Brave {
|
|||||||
.change_context(EngineError::UnexpectedError)?;
|
.change_context(EngineError::UnexpectedError)?;
|
||||||
|
|
||||||
let document: Html = Html::parse_document(
|
let document: Html = Html::parse_document(
|
||||||
&Brave::fetch_html_from_upstream(self, &url, header_map, request_timeout).await?,
|
&Brave::fetch_html_from_upstream(self, &url, header_map, client).await?,
|
||||||
);
|
);
|
||||||
|
|
||||||
if let Some(no_result_msg) = self.parser.parse_for_no_results(&document).nth(0) {
|
if let Some(no_result_msg) = self.parser.parse_for_no_results(&document).nth(0) {
|
||||||
|
@ -5,6 +5,7 @@
|
|||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
use reqwest::header::HeaderMap;
|
use reqwest::header::HeaderMap;
|
||||||
|
use reqwest::Client;
|
||||||
use scraper::Html;
|
use scraper::Html;
|
||||||
|
|
||||||
use crate::models::aggregation_models::SearchResult;
|
use crate::models::aggregation_models::SearchResult;
|
||||||
@ -44,7 +45,7 @@ impl SearchEngine for DuckDuckGo {
|
|||||||
query: &str,
|
query: &str,
|
||||||
page: u32,
|
page: u32,
|
||||||
user_agent: &str,
|
user_agent: &str,
|
||||||
request_timeout: u8,
|
client: &Client,
|
||||||
_safe_search: u8,
|
_safe_search: u8,
|
||||||
) -> Result<HashMap<String, SearchResult>, EngineError> {
|
) -> Result<HashMap<String, SearchResult>, EngineError> {
|
||||||
// Page number can be missing or empty string and so appropriate handling is required
|
// Page number can be missing or empty string and so appropriate handling is required
|
||||||
@ -76,7 +77,7 @@ impl SearchEngine for DuckDuckGo {
|
|||||||
.change_context(EngineError::UnexpectedError)?;
|
.change_context(EngineError::UnexpectedError)?;
|
||||||
|
|
||||||
let document: Html = Html::parse_document(
|
let document: Html = Html::parse_document(
|
||||||
&DuckDuckGo::fetch_html_from_upstream(self, &url, header_map, request_timeout).await?,
|
&DuckDuckGo::fetch_html_from_upstream(self, &url, header_map, client).await?,
|
||||||
);
|
);
|
||||||
|
|
||||||
if self.parser.parse_for_no_results(&document).next().is_some() {
|
if self.parser.parse_for_no_results(&document).next().is_some() {
|
||||||
|
@ -3,6 +3,7 @@
|
|||||||
//! number if provided.
|
//! number if provided.
|
||||||
|
|
||||||
use reqwest::header::HeaderMap;
|
use reqwest::header::HeaderMap;
|
||||||
|
use reqwest::Client;
|
||||||
use scraper::Html;
|
use scraper::Html;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
@ -40,7 +41,7 @@ impl SearchEngine for Searx {
|
|||||||
query: &str,
|
query: &str,
|
||||||
page: u32,
|
page: u32,
|
||||||
user_agent: &str,
|
user_agent: &str,
|
||||||
request_timeout: u8,
|
client: &Client,
|
||||||
mut safe_search: u8,
|
mut safe_search: u8,
|
||||||
) -> Result<HashMap<String, SearchResult>, EngineError> {
|
) -> Result<HashMap<String, SearchResult>, EngineError> {
|
||||||
// Page number can be missing or empty string and so appropriate handling is required
|
// Page number can be missing or empty string and so appropriate handling is required
|
||||||
@ -68,7 +69,7 @@ impl SearchEngine for Searx {
|
|||||||
.change_context(EngineError::UnexpectedError)?;
|
.change_context(EngineError::UnexpectedError)?;
|
||||||
|
|
||||||
let document: Html = Html::parse_document(
|
let document: Html = Html::parse_document(
|
||||||
&Searx::fetch_html_from_upstream(self, &url, header_map, request_timeout).await?,
|
&Searx::fetch_html_from_upstream(self, &url, header_map, client).await?,
|
||||||
);
|
);
|
||||||
|
|
||||||
if let Some(no_result_msg) = self.parser.parse_for_no_results(&document).nth(1) {
|
if let Some(no_result_msg) = self.parser.parse_for_no_results(&document).nth(1) {
|
||||||
|
Loading…
Reference in New Issue
Block a user