0
0
mirror of https://github.com/neon-mmd/websurfx.git synced 2024-12-22 20:38:22 -05:00

Add Wikipedia as a search engine

Currently, it only search in the English wikipedia, but it can be
customized to use different ones.
This commit is contained in:
Zsombor Gegesy 2024-11-27 01:06:14 +01:00
parent ef0ae2f0aa
commit c23d5c99ff
7 changed files with 136 additions and 12 deletions

View File

@ -15,6 +15,7 @@ use crate::models::engine_models::{EngineError, SearchEngine};
use error_stack::{Report, Result, ResultExt}; use error_stack::{Report, Result, ResultExt};
use super::common::build_cookie;
use super::search_result_parser::SearchResultParser; use super::search_result_parser::SearchResultParser;
/// A new Bing engine type defined in-order to implement the `SearchEngine` trait which allows to /// A new Bing engine type defined in-order to implement the `SearchEngine` trait which allows to
@ -73,10 +74,7 @@ impl SearchEngine for Bing {
("_UR=QS=0&TQS", "0"), ("_UR=QS=0&TQS", "0"),
]; ];
let mut cookie_string = String::new(); let cookie_string = build_cookie(&query_params);
for (k, v) in &query_params {
cookie_string.push_str(&format!("{k}={v}; "));
}
let header_map = HeaderMap::try_from(&HashMap::from([ let header_map = HeaderMap::try_from(&HashMap::from([
("User-Agent".to_string(), user_agent.to_string()), ("User-Agent".to_string(), user_agent.to_string()),

23
src/engines/common.rs Normal file
View File

@ -0,0 +1,23 @@
//! This module provides common functionalities for engines
/**
* Build a query from a list of key value pairs.
*/
pub fn build_query(query_params: &[(&str, &str)]) -> String {
let mut query_params_string = String::new();
for (k, v) in query_params {
query_params_string.push_str(&format!("&{k}={v}"));
}
query_params_string
}
/**
* Build a cookie from a list of key value pairs.
*/
pub fn build_cookie(cookie_params: &[(&str, &str)]) -> String {
let mut cookie_string = String::new();
for (k, v) in cookie_params {
cookie_string.push_str(&format!("{k}={v}; "));
}
cookie_string
}

View File

@ -5,9 +5,11 @@
pub mod bing; pub mod bing;
pub mod brave; pub mod brave;
pub mod common;
pub mod duckduckgo; pub mod duckduckgo;
pub mod librex; pub mod librex;
pub mod mojeek; pub mod mojeek;
pub mod search_result_parser; pub mod search_result_parser;
pub mod searx; pub mod searx;
pub mod startpage; pub mod startpage;
pub mod wikipedia;

View File

@ -14,6 +14,7 @@ use crate::models::engine_models::{EngineError, SearchEngine};
use error_stack::{Report, Result, ResultExt}; use error_stack::{Report, Result, ResultExt};
use super::common::{build_cookie, build_query};
use super::search_result_parser::SearchResultParser; use super::search_result_parser::SearchResultParser;
/// A new Mojeek engine type defined in-order to implement the `SearchEngine` trait which allows to /// A new Mojeek engine type defined in-order to implement the `SearchEngine` trait which allows to
@ -107,10 +108,7 @@ impl SearchEngine for Mojeek {
("safe", &safe), ("safe", &safe),
]; ];
let mut query_params_string = String::new(); let query_params_string = build_query(&query_params);
for (k, v) in &query_params {
query_params_string.push_str(&format!("&{k}={v}"));
}
let url: String = match page { let url: String = match page {
0 => { 0 => {
@ -123,10 +121,7 @@ impl SearchEngine for Mojeek {
} }
}; };
let mut cookie_string = String::new(); let cookie_string = build_cookie(&query_params);
for (k, v) in &query_params {
cookie_string.push_str(&format!("{k}={v}; "));
}
let header_map = HeaderMap::try_from(&HashMap::from([ let header_map = HeaderMap::try_from(&HashMap::from([
("User-Agent".to_string(), user_agent.to_string()), ("User-Agent".to_string(), user_agent.to_string()),

101
src/engines/wikipedia.rs Normal file
View File

@ -0,0 +1,101 @@
//! The `wikipedia` module handles the scraping of results from wikipedia
//! with user provided query and with a page number if provided.
use std::collections::HashMap;
use reqwest::header::HeaderMap;
use reqwest::Client;
use scraper::Html;
use crate::models::aggregation_models::SearchResult;
use crate::models::engine_models::{EngineError, SearchEngine};
use error_stack::{Report, Result, ResultExt};
use super::common::build_query;
use super::search_result_parser::SearchResultParser;
/// A new Wikipedia engine type defined in-order to implement the `SearchEngine` trait which allows to
/// reduce code duplication as well as allows to create vector of different search engines easily.
pub struct Wikipedia {
/// The parser, used to interpret the search result.
parser: SearchResultParser,
/// The id of the engine, equals to 'wikipedia-' + language
id: String,
/// The host where wikipedia can be accessed.
host: String,
}
impl Wikipedia {
/// Creates the Wikipedia parser.
pub fn new(language: &str) -> Result<Self, EngineError> {
let host = format!("https://{}.wikipedia.org", &language);
let id = format!("wikipedia-{}", &language);
Ok(Self {
parser: SearchResultParser::new(
"p.mw-search-nonefound",
".mw-search-results li.mw-search-result",
".mw-search-result-heading a",
".mw-search-result-heading a",
".searchresult",
)?,
id,
host,
})
}
}
#[async_trait::async_trait]
impl SearchEngine for Wikipedia {
async fn results(
&self,
query: &str,
page: u32,
user_agent: &str,
client: &Client,
_safe_search: u8,
) -> Result<Vec<(String, SearchResult)>, EngineError> {
let header_map = HeaderMap::try_from(&HashMap::from([
("User-Agent".to_string(), user_agent.to_string()),
("Referer".to_string(), self.host.to_string()),
]))
.change_context(EngineError::UnexpectedError)?;
let offset = (page * 20).to_string();
let query_params: Vec<(&str, &str)> = vec![
("limit", "20"),
("offset", &offset),
("profile", "default"),
("search", query),
("title", "Special:Search"),
("ns0", "1"),
];
let query_params_string = build_query(&query_params);
let url: String = format!("{}/w/index.php?{}", self.host, query_params_string);
let document: Html = Html::parse_document(
&Wikipedia::fetch_html_from_upstream(self, &url, header_map, client).await?,
);
if self.parser.parse_for_no_results(&document).next().is_some() {
return Err(Report::new(EngineError::EmptyResultSet));
}
// scrape all the results from the html
self.parser
.parse_for_results(&document, |title, url, desc| {
let found_url = url.attr("href");
found_url.map(|relative_url| {
SearchResult::new(
title.inner_html().trim(),
&format!("{}{relative_url}", self.host),
desc.inner_html().trim(),
&[&self.id],
)
})
})
}
}

View File

@ -206,6 +206,10 @@ impl EngineHandler {
let engine = crate::engines::bing::Bing::new()?; let engine = crate::engines::bing::Bing::new()?;
("bing", Box::new(engine)) ("bing", Box::new(engine))
} }
"wikipedia" => {
let engine = crate::engines::wikipedia::Wikipedia::new("en")?;
("wikipedia", Box::new(engine))
}
_ => { _ => {
return Err(Report::from(EngineError::NoSuchEngineFound( return Err(Report::from(EngineError::NoSuchEngineFound(
engine_name.to_string(), engine_name.to_string(),

View File

@ -74,6 +74,7 @@ upstream_search_engines = {
LibreX = false, LibreX = false,
Mojeek = false, Mojeek = false,
Bing = false, Bing = false,
Wikipedia = true,
} -- select the upstream search engines from which the results should be fetched. } -- select the upstream search engines from which the results should be fetched.
proxy = nil -- Proxy to send outgoing requests through. Set to nil to disable. proxy = nil -- Proxy to send outgoing requests through. Set to nil to disable.