diff --git a/.mergify.yml b/.mergify.yml index 8c2f886..05da0ed 100644 --- a/.mergify.yml +++ b/.mergify.yml @@ -1,8 +1,10 @@ pull_request_rules: - name: Automatic merge on approval conditions: - - "status-success=checks/approved" - "#approved-reviews-by>=2" + - check-success=build (stable) + - check-success=CodeFactor + - check-success=Rust project actions: queue: method: squash diff --git a/.stylelintrc.json b/.stylelintrc.json index 9019f4f..05ffdd4 100644 --- a/.stylelintrc.json +++ b/.stylelintrc.json @@ -1,10 +1,10 @@ { - "extends": "stylelint-config-standard", + "extends": "stylelint-config-standard", "rules": { "alpha-value-notation": "number", "selector-class-pattern": null }, -"overrides": [ + "overrides": [ { "files": ["*.js"], "customSyntax": "postcss-lit" diff --git a/Cargo.lock b/Cargo.lock index 11b8343..1184f29 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2899,9 +2899,9 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.15" +version = "0.38.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2f9da0cbd88f9f09e7814e388301c8414c51c62aa6ce1e4b5c551d49d96e531" +checksum = "745ecfa778e66b2b63c88a61cb36e0eea109e803b0b86bf9879fbc77c70e86ed" dependencies = [ "bitflags 2.4.0", "errno", @@ -4042,7 +4042,7 @@ checksum = "14247bb57be4f377dfb94c72830b8ce8fc6beac03cf4bf7b9732eadd414123fc" [[package]] name = "websurfx" -version = "1.0.11" +version = "1.2.1" dependencies = [ "actix-cors", "actix-files", diff --git a/Cargo.toml b/Cargo.toml index 6c4b287..b8f6e5d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "websurfx" -version = "1.0.11" +version = "1.2.1" edition = "2021" description = "An open-source alternative to Searx that provides clean, ad-free, and organic results with incredible speed while keeping privacy and security in mind." repository = "https://github.com/neon-mmd/websurfx" diff --git a/dev.Dockerfile b/dev.Dockerfile new file mode 100644 index 0000000..412283e --- /dev/null +++ b/dev.Dockerfile @@ -0,0 +1,15 @@ +# Create Builder image +FROM --platform=$BUILDPLATFORM rust:1.73.0-alpine3.18 + +# Install required dependencies +RUN apk add --no-cache alpine-sdk musl-dev g++ make libcrypto3 libressl-dev perl build-base + +RUN cargo install cargo-watch --locked + +# Create project directory +RUN mkdir -p /project +WORKDIR /project + +ENV RUSTFLAGS="-C target-feature=-crt-static" + +ENTRYPOINT ["cargo"] diff --git a/dev.docker-compose.yml b/dev.docker-compose.yml new file mode 100644 index 0000000..f242849 --- /dev/null +++ b/dev.docker-compose.yml @@ -0,0 +1,26 @@ +--- +version: "3.9" +services: + redis: + container_name: redis + image: redis:6.2.5-alpine + tty: true + hostname: surfx-redis + websurx: + container_name: websurx-dev + image: websurfx:dev + working_dir: /project + tty: true + build: + context: . + dockerfile: dev.Dockerfile + ports: + - 8080:8080 + volumes: + - type: bind + source: . + target: /project + command: + - watch + - -x + - run diff --git a/src/engines/brave.rs b/src/engines/brave.rs new file mode 100644 index 0000000..5c7c126 --- /dev/null +++ b/src/engines/brave.rs @@ -0,0 +1,95 @@ +//! The `brave` module handles the scraping of results from the brave search engine +//! by querying the upstream brave search engine with user provided query and with a page +//! number if provided. + +use std::collections::HashMap; + +use reqwest::header::HeaderMap; +use scraper::Html; + +use crate::models::aggregation_models::SearchResult; +use error_stack::{Report, Result, ResultExt}; + +use crate::models::engine_models::{EngineError, SearchEngine}; + +use super::search_result_parser::SearchResultParser; + +/// Scrapes the results from the Brave search engine. +pub struct Brave { + /// Utilises generic logic for parsing search results. + parser: SearchResultParser, +} + +impl Brave { + /// Creates the Brave parser. + pub fn new() -> Result { + Ok(Self { + parser: SearchResultParser::new( + "#results h4", + "#results [data-pos]", + "a > .url", + "a", + ".snippet-description", + )?, + }) + } +} + +#[async_trait::async_trait] +impl SearchEngine for Brave { + async fn results( + &self, + query: &str, + page: u32, + user_agent: &str, + request_timeout: u8, + safe_search: u8, + ) -> Result, EngineError> { + let url = format!("https://search.brave.com/search?q={query}&offset={page}"); + + let safe_search_level = match safe_search { + 0 => "off", + 1 => "moderate", + _ => "strict", + }; + + let header_map = HeaderMap::try_from(&HashMap::from([ + ("USER_AGENT".to_string(), user_agent.to_string()), + ( + "CONTENT_TYPE".to_string(), + "application/x-www-form-urlencoded".to_string(), + ), + ("REFERER".to_string(), "https://google.com/".to_string()), + ( + "COOKIE".to_string(), + format!("safe_search={safe_search_level}"), + ), + ])) + .change_context(EngineError::UnexpectedError)?; + + let document: Html = Html::parse_document( + &Brave::fetch_html_from_upstream(self, &url, header_map, request_timeout).await?, + ); + + if let Some(no_result_msg) = self.parser.parse_for_no_results(&document).nth(0) { + if no_result_msg + .inner_html() + .contains("Not many great matches came back for your search") + { + return Err(Report::new(EngineError::EmptyResultSet)); + } + } + + self.parser + .parse_for_results(&document, |title, url, desc| { + url.value().attr("href").map(|url| { + SearchResult::new( + title.text().collect::>().join("").trim(), + url.trim(), + desc.inner_html().trim(), + &["brave"], + ) + }) + }) + } +} diff --git a/src/engines/mod.rs b/src/engines/mod.rs index 39b50c8..2892445 100644 --- a/src/engines/mod.rs +++ b/src/engines/mod.rs @@ -3,6 +3,7 @@ //! provide a standard functions to be implemented for all the upstream search engine handling //! code. Moreover, it also provides a custom error for the upstream search engine handling code. +pub mod brave; pub mod duckduckgo; pub mod search_result_parser; pub mod searx; diff --git a/src/models/engine_models.rs b/src/models/engine_models.rs index 05b5a11..98367e8 100644 --- a/src/models/engine_models.rs +++ b/src/models/engine_models.rs @@ -150,6 +150,10 @@ impl EngineHandler { let engine = crate::engines::searx::Searx::new()?; ("searx", Box::new(engine)) } + "brave" => { + let engine = crate::engines::brave::Brave::new()?; + ("brave", Box::new(engine)) + } _ => { return Err(Report::from(EngineError::NoSuchEngineFound( engine_name.to_string(), diff --git a/websurfx/config.lua b/websurfx/config.lua index 09b418d..eeeb638 100644 --- a/websurfx/config.lua +++ b/websurfx/config.lua @@ -52,4 +52,5 @@ redis_url = "redis://127.0.0.1:8082" -- redis connection url address on which th upstream_search_engines = { DuckDuckGo = true, Searx = false, + Brave = false, } -- select the upstream search engines from which the results should be fetched.