0
0
mirror of https://github.com/neon-mmd/websurfx.git synced 2024-11-22 14:08:23 -05:00

Merge branch 'rolling' into rolling

This commit is contained in:
alamin655 2023-10-19 18:03:00 +05:30 committed by GitHub
commit b0ece9b921
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 151 additions and 7 deletions

View File

@ -1,8 +1,10 @@
pull_request_rules: pull_request_rules:
- name: Automatic merge on approval - name: Automatic merge on approval
conditions: conditions:
- "status-success=checks/approved"
- "#approved-reviews-by>=2" - "#approved-reviews-by>=2"
- check-success=build (stable)
- check-success=CodeFactor
- check-success=Rust project
actions: actions:
queue: queue:
method: squash method: squash

View File

@ -4,7 +4,7 @@
"alpha-value-notation": "number", "alpha-value-notation": "number",
"selector-class-pattern": null "selector-class-pattern": null
}, },
"overrides": [ "overrides": [
{ {
"files": ["*.js"], "files": ["*.js"],
"customSyntax": "postcss-lit" "customSyntax": "postcss-lit"

6
Cargo.lock generated
View File

@ -2899,9 +2899,9 @@ dependencies = [
[[package]] [[package]]
name = "rustix" name = "rustix"
version = "0.38.15" version = "0.38.19"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d2f9da0cbd88f9f09e7814e388301c8414c51c62aa6ce1e4b5c551d49d96e531" checksum = "745ecfa778e66b2b63c88a61cb36e0eea109e803b0b86bf9879fbc77c70e86ed"
dependencies = [ dependencies = [
"bitflags 2.4.0", "bitflags 2.4.0",
"errno", "errno",
@ -4042,7 +4042,7 @@ checksum = "14247bb57be4f377dfb94c72830b8ce8fc6beac03cf4bf7b9732eadd414123fc"
[[package]] [[package]]
name = "websurfx" name = "websurfx"
version = "1.0.11" version = "1.2.1"
dependencies = [ dependencies = [
"actix-cors", "actix-cors",
"actix-files", "actix-files",

View File

@ -1,6 +1,6 @@
[package] [package]
name = "websurfx" name = "websurfx"
version = "1.0.11" version = "1.2.1"
edition = "2021" edition = "2021"
description = "An open-source alternative to Searx that provides clean, ad-free, and organic results with incredible speed while keeping privacy and security in mind." description = "An open-source alternative to Searx that provides clean, ad-free, and organic results with incredible speed while keeping privacy and security in mind."
repository = "https://github.com/neon-mmd/websurfx" repository = "https://github.com/neon-mmd/websurfx"

15
dev.Dockerfile Normal file
View File

@ -0,0 +1,15 @@
# Create Builder image
FROM --platform=$BUILDPLATFORM rust:1.73.0-alpine3.18
# Install required dependencies
RUN apk add --no-cache alpine-sdk musl-dev g++ make libcrypto3 libressl-dev perl build-base
RUN cargo install cargo-watch --locked
# Create project directory
RUN mkdir -p /project
WORKDIR /project
ENV RUSTFLAGS="-C target-feature=-crt-static"
ENTRYPOINT ["cargo"]

26
dev.docker-compose.yml Normal file
View File

@ -0,0 +1,26 @@
---
version: "3.9"
services:
redis:
container_name: redis
image: redis:6.2.5-alpine
tty: true
hostname: surfx-redis
websurx:
container_name: websurx-dev
image: websurfx:dev
working_dir: /project
tty: true
build:
context: .
dockerfile: dev.Dockerfile
ports:
- 8080:8080
volumes:
- type: bind
source: .
target: /project
command:
- watch
- -x
- run

95
src/engines/brave.rs Normal file
View File

@ -0,0 +1,95 @@
//! The `brave` module handles the scraping of results from the brave search engine
//! by querying the upstream brave search engine with user provided query and with a page
//! number if provided.
use std::collections::HashMap;
use reqwest::header::HeaderMap;
use scraper::Html;
use crate::models::aggregation_models::SearchResult;
use error_stack::{Report, Result, ResultExt};
use crate::models::engine_models::{EngineError, SearchEngine};
use super::search_result_parser::SearchResultParser;
/// Scrapes the results from the Brave search engine.
pub struct Brave {
/// Utilises generic logic for parsing search results.
parser: SearchResultParser,
}
impl Brave {
/// Creates the Brave parser.
pub fn new() -> Result<Brave, EngineError> {
Ok(Self {
parser: SearchResultParser::new(
"#results h4",
"#results [data-pos]",
"a > .url",
"a",
".snippet-description",
)?,
})
}
}
#[async_trait::async_trait]
impl SearchEngine for Brave {
async fn results(
&self,
query: &str,
page: u32,
user_agent: &str,
request_timeout: u8,
safe_search: u8,
) -> Result<HashMap<String, SearchResult>, EngineError> {
let url = format!("https://search.brave.com/search?q={query}&offset={page}");
let safe_search_level = match safe_search {
0 => "off",
1 => "moderate",
_ => "strict",
};
let header_map = HeaderMap::try_from(&HashMap::from([
("USER_AGENT".to_string(), user_agent.to_string()),
(
"CONTENT_TYPE".to_string(),
"application/x-www-form-urlencoded".to_string(),
),
("REFERER".to_string(), "https://google.com/".to_string()),
(
"COOKIE".to_string(),
format!("safe_search={safe_search_level}"),
),
]))
.change_context(EngineError::UnexpectedError)?;
let document: Html = Html::parse_document(
&Brave::fetch_html_from_upstream(self, &url, header_map, request_timeout).await?,
);
if let Some(no_result_msg) = self.parser.parse_for_no_results(&document).nth(0) {
if no_result_msg
.inner_html()
.contains("Not many great matches came back for your search")
{
return Err(Report::new(EngineError::EmptyResultSet));
}
}
self.parser
.parse_for_results(&document, |title, url, desc| {
url.value().attr("href").map(|url| {
SearchResult::new(
title.text().collect::<Vec<_>>().join("").trim(),
url.trim(),
desc.inner_html().trim(),
&["brave"],
)
})
})
}
}

View File

@ -3,6 +3,7 @@
//! provide a standard functions to be implemented for all the upstream search engine handling //! provide a standard functions to be implemented for all the upstream search engine handling
//! code. Moreover, it also provides a custom error for the upstream search engine handling code. //! code. Moreover, it also provides a custom error for the upstream search engine handling code.
pub mod brave;
pub mod duckduckgo; pub mod duckduckgo;
pub mod search_result_parser; pub mod search_result_parser;
pub mod searx; pub mod searx;

View File

@ -150,6 +150,10 @@ impl EngineHandler {
let engine = crate::engines::searx::Searx::new()?; let engine = crate::engines::searx::Searx::new()?;
("searx", Box::new(engine)) ("searx", Box::new(engine))
} }
"brave" => {
let engine = crate::engines::brave::Brave::new()?;
("brave", Box::new(engine))
}
_ => { _ => {
return Err(Report::from(EngineError::NoSuchEngineFound( return Err(Report::from(EngineError::NoSuchEngineFound(
engine_name.to_string(), engine_name.to_string(),

View File

@ -52,4 +52,5 @@ redis_url = "redis://127.0.0.1:8082" -- redis connection url address on which th
upstream_search_engines = { upstream_search_engines = {
DuckDuckGo = true, DuckDuckGo = true,
Searx = false, Searx = false,
Brave = false,
} -- select the upstream search engines from which the results should be fetched. } -- select the upstream search engines from which the results should be fetched.