0
0
mirror of https://github.com/neon-mmd/websurfx.git synced 2024-10-18 14:32:52 -04:00

Merge branch 'rolling' into check

This commit is contained in:
alamin655 2023-07-03 18:41:09 +05:30 committed by GitHub
commit 8fc1b06cca
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 550 additions and 384 deletions

45
.gitpod.yml Normal file
View File

@ -0,0 +1,45 @@
---
image: ubuntu:latest
# Commands that will run on workspace start
tasks:
- name: Setup, Install & Build
- before: apt install cargo redis-server nodejs npm && cargo test
- init: cargo install cargo-watch
- command: redis-server --port 8080 & cargo watch -q -w "." -x "run"
# Ports to expose on workspace startup
ports:
- name: Website
description: Website Preview
port: 8080
onOpen: open-browser
# vscode IDE setup
vscode:
extensions:
- vadimcn.vscode-lldb
- rust-lang.rust-analyzer
- bungcip.better-toml
- serayuzgur.crates
- usernamehw.errorlens
- DavidAnson.vscode-markdownlint
- esbenp.prettier-vscode
- stylelint.vscode-stylelint
- dbaeumer.vscode-eslint
- evgeniypeshkov.syntax-highlighter
- redhat.vscode-yaml
- ms-azuretools.vscode-docker
- GitHub.vscode-github-actions
- Catppuccin.catppuccin-vsc
- PKief.material-icon-theme
- tal7aouy.rainbow-bracket
- oderwat.indent-rainbow
- formulahendry.auto-rename-tag
- eamodio.gitlens
github:
prebuilds:
master: true
branches: true
pullRequests: true
pullRequestsFromForks: true
addCheck: true
addComment: false
addBadge: true

View File

@ -14,7 +14,7 @@ Know how to fix or improve a github action?. Consider Submitting a Pull request
## Source Code ## Source Code
You should know atleast one of the things below to start contributing: You should know at least one of the things below to start contributing:
- Rust basics - Rust basics
- Actix-web crate basics - Actix-web crate basics

749
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
[package] [package]
name = "websurfx" name = "websurfx"
version = "0.13.1" version = "0.13.7"
edition = "2021" edition = "2021"
description = "An open-source alternative to Searx that provides clean, ad-free, and organic results with incredible speed while keeping privacy and security in mind." description = "An open-source alternative to Searx that provides clean, ad-free, and organic results with incredible speed while keeping privacy and security in mind."
repository = "https://github.com/neon-mmd/websurfx" repository = "https://github.com/neon-mmd/websurfx"
@ -27,6 +27,7 @@ error-stack = {version="0.3.1"}
[dev-dependencies] [dev-dependencies]
rusty-hook = "^0.11.2" rusty-hook = "^0.11.2"
criterion = "0.5.1"
[profile.dev] [profile.dev]
opt-level = 0 opt-level = 0

View File

@ -16,7 +16,7 @@
## Author's checklist ## Author's checklist
<!-- additional notes for reviewiers --> <!-- additional notes for reviewers -->
## Related issues ## Related issues

View File

@ -59,7 +59,7 @@
- **Community** - **Community**
- [📊 System Requirements](#system-requirements-) - [📊 System Requirements](#system-requirements-)
- [🗨️ FAQ (Frequently Asked Questions)](#faq-frequently-asked-questions-) - [🗨️ FAQ (Frequently Asked Questions)](#faq-frequently-asked-questions-)
- [📣 More Contributers Wanted](#more-contributers-wanted-) - [📣 More Contributors Wanted](#more-contributors-wanted-)
- [💖 Supporting Websurfx](#supporting-websurfx-) - [💖 Supporting Websurfx](#supporting-websurfx-)
- [📘 Documentation](#documentation-) - [📘 Documentation](#documentation-)
- [🛣️ Roadmap](#roadmap-) - [🛣️ Roadmap](#roadmap-)
@ -165,7 +165,7 @@ Websurfx is based on Rust due to its memory safety features, which prevents vuln
**[⬆️ Back to Top](#--)** **[⬆️ Back to Top](#--)**
# More Contributers Wanted 📣 # More Contributors Wanted 📣
We are looking for more willing contributors to help grow this project. For more information on how you can contribute, check out the [project board](https://github.com/neon-mmd/websurfx/projects?query=is%3Aopen) and the [CONTRIBUTING.md](CONTRIBUTING.md) file for guidelines and rules for making contributions. We are looking for more willing contributors to help grow this project. For more information on how you can contribute, check out the [project board](https://github.com/neon-mmd/websurfx/projects?query=is%3Aopen) and the [CONTRIBUTING.md](CONTRIBUTING.md) file for guidelines and rules for making contributions.

View File

@ -30,8 +30,8 @@ function navigate_backward() {
let page = parseInt(searchParams.get('page')); let page = parseInt(searchParams.get('page'));
if (isNaN(page)) { if (isNaN(page)) {
page = 1; page = 0;
} else if (page > 1) { } else if (page > 0) {
page--; page--;
} }

View File

@ -24,46 +24,35 @@ pub struct Config {
pub binding_ip_addr: String, pub binding_ip_addr: String,
pub style: Style, pub style: Style,
pub redis_connection_url: String, pub redis_connection_url: String,
pub aggregator: AggreatorConfig, pub aggregator: AggregatorConfig,
pub logging: bool, pub logging: bool,
pub debug: bool, pub debug: bool,
} }
/// Configuration options for the aggregator. /// Configuration options for the aggregator.
#[derive(Clone)] #[derive(Clone)]
pub struct AggreatorConfig { pub struct AggregatorConfig {
/// Whether to introduce a random delay before sending the request to the search engine. /// Whether to introduce a random delay before sending the request to the search engine.
pub random_delay: bool, pub random_delay: bool,
} }
impl Config { impl Config {
/// A function which parses the config.lua file and puts all the parsed options in the newly /// A function which parses the config.lua file and puts all the parsed options in the newly
/// contructed Config struct and returns it. /// constructed Config struct and returns it.
/// ///
/// # Error /// # Error
/// ///
/// Returns a lua parse error if parsing of the config.lua file fails or has a syntax error /// Returns a lua parse error if parsing of the config.lua file fails or has a syntax error
/// or io error if the config.lua file doesn't exists otherwise it returns a newly contructed /// or io error if the config.lua file doesn't exists otherwise it returns a newly constructed
/// Config struct with all the parsed config options from the parsed config file. /// Config struct with all the parsed config options from the parsed config file.
pub fn parse() -> Result<Self, Box<dyn std::error::Error>> { pub fn parse() -> Result<Self, Box<dyn std::error::Error>> {
Lua::new().context(|context| -> Result<Self, Box<dyn std::error::Error>> { Lua::new().context(|context| -> Result<Self, Box<dyn std::error::Error>> {
let globals = context.globals(); let globals = context.globals();
context context
.load(&fs::read_to_string( .load(&fs::read_to_string(Config::get_config_path()?)?)
Config::handle_different_config_file_path()?,
)?)
.exec()?; .exec()?;
let production_use = globals.get::<_, bool>("production_use")?;
let aggregator_config = if production_use {
AggreatorConfig { random_delay: true }
} else {
AggreatorConfig {
random_delay: false,
}
};
Ok(Config { Ok(Config {
port: globals.get::<_, u16>("port")?, port: globals.get::<_, u16>("port")?,
binding_ip_addr: globals.get::<_, String>("binding_ip_addr")?, binding_ip_addr: globals.get::<_, String>("binding_ip_addr")?,
@ -72,7 +61,9 @@ impl Config {
globals.get::<_, String>("colorscheme")?, globals.get::<_, String>("colorscheme")?,
), ),
redis_connection_url: globals.get::<_, String>("redis_connection_url")?, redis_connection_url: globals.get::<_, String>("redis_connection_url")?,
aggregator: aggregator_config, aggregator: AggregatorConfig {
random_delay: globals.get::<_, bool>("production_use")?,
},
logging: globals.get::<_, bool>("logging")?, logging: globals.get::<_, bool>("logging")?,
debug: globals.get::<_, bool>("debug")?, debug: globals.get::<_, bool>("debug")?,
}) })
@ -90,35 +81,37 @@ impl Config {
/// one (3). /// one (3).
/// 3. `websurfx/` (under project folder ( or codebase in other words)) if it is not present /// 3. `websurfx/` (under project folder ( or codebase in other words)) if it is not present
/// here then it returns an error as mentioned above. /// here then it returns an error as mentioned above.
fn handle_different_config_file_path() -> Result<String, Box<dyn std::error::Error>> { fn get_config_path() -> Result<String, Box<dyn std::error::Error>> {
if Path::new( // check user config
format!(
"{}/.config/{}/config.lua", let path = format!(
std::env::var("HOME").unwrap(), "{}/.config/{}/config.lua",
COMMON_DIRECTORY_NAME std::env::var("HOME").unwrap(),
) COMMON_DIRECTORY_NAME
.as_str(), );
) if Path::new(path.as_str()).exists() {
.exists() return Ok(format!(
{
Ok(format!(
"{}/.config/{}/{}", "{}/.config/{}/{}",
std::env::var("HOME").unwrap(), std::env::var("HOME").unwrap(),
COMMON_DIRECTORY_NAME, COMMON_DIRECTORY_NAME,
CONFIG_FILE_NAME CONFIG_FILE_NAME
)) ));
} else if Path::new( }
format!("/etc/xdg/{}/{}", COMMON_DIRECTORY_NAME, CONFIG_FILE_NAME).as_str(),
) // look for config in /etc/xdg
.exists() if Path::new(format!("/etc/xdg/{}/{}", COMMON_DIRECTORY_NAME, CONFIG_FILE_NAME).as_str())
{
Ok("/etc/xdg/websurfx/config.lua".to_string())
} else if Path::new(format!("./{}/{}", COMMON_DIRECTORY_NAME, CONFIG_FILE_NAME).as_str())
.exists() .exists()
{ {
Ok("./websurfx/config.lua".to_string()) return Ok("/etc/xdg/websurfx/config.lua".to_string());
} else {
Err("Config file not found!!".to_string().into())
} }
// use dev config
if Path::new(format!("./{}/{}", COMMON_DIRECTORY_NAME, CONFIG_FILE_NAME).as_str()).exists()
{
return Ok("./websurfx/config.lua".to_string());
}
// if no of the configs above exist, return error
Err("Config file not found!!".to_string().into())
} }
} }

View File

@ -1,5 +1,5 @@
//! This module provides public models for handling, storing and serializing parsed config file //! This module provides public models for handling, storing and serializing parsed config file
//! options from config.lua by grouping them togather. //! options from config.lua by grouping them together.
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};

View File

@ -36,7 +36,7 @@ pub async fn results(
user_agent: &str, user_agent: &str,
) -> Result<HashMap<String, RawSearchResult>, EngineError> { ) -> Result<HashMap<String, RawSearchResult>, EngineError> {
// Page number can be missing or empty string and so appropriate handling is required // Page number can be missing or empty string and so appropriate handling is required
// so that upstream server recieves valid page number. // so that upstream server receives valid page number.
let url: String = match page { let url: String = match page {
1 => { 1 => {
format!("https://html.duckduckgo.com/html/?q={query}&s=&dc=&v=1&o=json&api=/d.js") format!("https://html.duckduckgo.com/html/?q={query}&s=&dc=&v=1&o=json&api=/d.js")
@ -86,7 +86,7 @@ pub async fn results(
let results: String = reqwest::Client::new() let results: String = reqwest::Client::new()
.get(url) .get(url)
.timeout(Duration::from_secs(5)) .timeout(Duration::from_secs(5))
.headers(header_map) // add spoofed headers to emulate human behaviour .headers(header_map) // add spoofed headers to emulate human behavior
.send() .send()
.await .await
.into_report() .into_report()

View File

@ -13,7 +13,7 @@ use std::fmt;
/// search engines. /// search engines.
/// * `UnexpectedError` - This variant handles all the errors which are unexpected or occur rarely /// * `UnexpectedError` - This variant handles all the errors which are unexpected or occur rarely
/// and are errors mostly related to failure in initialization of HeaderMap, Selector errors and /// and are errors mostly related to failure in initialization of HeaderMap, Selector errors and
/// all other errors occuring within the code handling the `upstream search engines`. /// all other errors occurring within the code handling the `upstream search engines`.
#[derive(Debug)] #[derive(Debug)]
pub enum EngineError { pub enum EngineError {
EmptyResultSet, EmptyResultSet,

View File

@ -17,7 +17,7 @@ use crate::engines::{duckduckgo, searx};
/// then removes duplicate results and if two results are found to be from two or more engines /// then removes duplicate results and if two results are found to be from two or more engines
/// then puts their names together to show the results are fetched from these upstream engines /// then puts their names together to show the results are fetched from these upstream engines
/// and then removes all data from the HashMap and puts into a struct of all results aggregated /// and then removes all data from the HashMap and puts into a struct of all results aggregated
/// into a vector and also adds the query used into the struct this is neccessory because /// into a vector and also adds the query used into the struct this is necessary because
/// otherwise the search bar in search remains empty if searched from the query url /// otherwise the search bar in search remains empty if searched from the query url
/// ///
/// # Example: /// # Example:

View File

@ -1,5 +1,5 @@
//! This module provides the functionality to handle different routes of the `websurfx` //! This module provides the functionality to handle different routes of the `websurfx`
//! meta search engine website and provide approriate response to each route/page //! meta search engine website and provide appropriate response to each route/page
//! when requested. //! when requested.
use std::fs::read_to_string; use std::fs::read_to_string;
@ -82,40 +82,16 @@ pub async fn search(
.insert_header(("location", "/")) .insert_header(("location", "/"))
.finish()) .finish())
} else { } else {
let page_url: String; // Declare the page_url variable without initializing it let page = match &params.page {
Some(page) => *page,
// ... None => 0,
let page = match params.page {
Some(page_number) => {
if page_number <= 1 {
page_url = format!(
"http://{}:{}/search?q={}&page={}",
config.binding_ip_addr, config.port, query, 1
);
1
} else {
page_url = format!(
"http://{}:{}/search?q={}&page={}",
config.binding_ip_addr, config.port, query, page_number
);
page_number
}
}
None => {
page_url = format!(
"http://{}:{}{}&page={}",
config.binding_ip_addr,
config.port,
req.uri(),
1
);
1
}
}; };
let page_url = format!(
"http://{}:{}/search?q={}&page={}",
config.binding_ip_addr, config.port, query, page
);
// fetch the cached results json. // fetch the cached results json.
let cached_results_json = redis_cache.cached_results_json(&page_url); let cached_results_json = redis_cache.cached_results_json(&page_url);
// check if fetched results was indeed fetched or it was an error and if so // check if fetched results was indeed fetched or it was an error and if so