0
0
mirror of https://github.com/neon-mmd/websurfx.git synced 2024-11-22 05:58:21 -05:00

Merge branch 'rolling' into rolling

This commit is contained in:
Abhinav Pandey 2023-11-21 20:08:27 +05:30 committed by GitHub
commit 9706363527
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
54 changed files with 933 additions and 941 deletions

View File

@ -32,7 +32,7 @@ jobs:
noCommit: true
- name: Commit & PR
uses: peter-evans/create-pull-request@38e0b6e68b4c852a5500a94740f0e535e0d7ba54 # v4.2.4
uses: peter-evans/create-pull-request@153407881ec5c347639a548ade7d8ad1d6740e38 # v5.0.2
with:
token: ${{ secrets.GITHUB_TOKEN }}
add-paths: .github/assets/CONTRIBUTORS.svg

View File

@ -12,7 +12,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/setup-node@v3
- uses: actions/setup-node@v4
with:
node-version: '14'
- uses: EddieHubCommunity/gh-action-open-source-labels@main

View File

@ -63,7 +63,7 @@ jobs:
- name: Create Pull Request with applied fixes
id: cpr
if: steps.ml.outputs.has_updated_sources == 1 && (env.APPLY_FIXES_EVENT == 'all' || env.APPLY_FIXES_EVENT == github.event_name) && env.APPLY_FIXES_MODE == 'pull_request' && (github.event_name == 'push' || github.event.pull_request.head.repo.full_name == github.repository) && !contains(github.event.head_commit.message, 'skip fix')
uses: peter-evans/create-pull-request@v5
uses: peter-evans/create-pull-request@v5.0.2
with:
token: ${{ secrets.PAT || secrets.GITHUB_TOKEN }}
commit-message: "[MegaLinter] Apply linters automatic fixes"
@ -81,7 +81,7 @@ jobs:
run: sudo chown -Rc $UID .git/
- name: Commit and push applied linter fixes
if: steps.ml.outputs.has_updated_sources == 1 && (env.APPLY_FIXES_EVENT == 'all' || env.APPLY_FIXES_EVENT == github.event_name) && env.APPLY_FIXES_MODE == 'commit' && github.ref != 'refs/heads/main' && (github.event_name == 'push' || github.event.pull_request.head.repo.full_name == github.repository) && !contains(github.event.head_commit.message, 'skip fix')
uses: stefanzweifel/git-auto-commit-action@v4
uses: stefanzweifel/git-auto-commit-action@v5
with:
branch: ${{ github.event.pull_request.head.ref || github.head_ref || github.ref }}
commit_message: "[MegaLinter] Apply linters fixes"

View File

@ -1,9 +1,12 @@
{
{
"extends": "stylelint-config-standard",
"rules": {
"alpha-value-notation": "number",
"selector-class-pattern": null
"selector-class-pattern": null,
"no-descending-specificity": null
},
"fix": true,
"cache": true,
"overrides": [
{
"files": ["*.js"],

568
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
[package]
name = "websurfx"
version = "1.2.15"
version = "1.2.28"
edition = "2021"
description = "An open-source alternative to Searx that provides clean, ad-free, and organic results with incredible speed while keeping privacy and security in mind."
repository = "https://github.com/neon-mmd/websurfx"
@ -13,46 +13,45 @@ bench = false
path = "src/bin/websurfx.rs"
[dependencies]
reqwest = {version="0.11.21",default-features = false,features = ["json", "rustls-tls"]}
tokio = {version="1.32.0",features=["rt-multi-thread","macros"]}
serde = {version="1.0.188",features=["derive"]}
handlebars = { version = "4.4.0", features = ["dir_source"] }
scraper = {version="0.17.1"}
actix-web = {version="4.4.0", features = ["cookies"]}
actix-files = {version="0.6.2"}
actix-cors = {version="0.6.4"}
serde_json = {version="1.0.105"}
fake-useragent = {version="0.1.3"}
env_logger = {version="0.10.0"}
log = {version="0.4.20"}
mlua = {version="0.9.1", features=["luajit", "vendored"]}
redis = {version="0.23.3", features=["tokio-comp","connection-manager"], optional = true}
md5 = {version="0.7.0"}
rand={version="0.8.5"}
error-stack = {version="0.4.0"}
async-trait = {version="0.1.73"}
regex = {version="1.9.4", features=["perf"]}
smallvec = {version="1.11.0", features=["union", "serde"]}
futures = {version="0.3.28"}
dhat = {version="0.3.2", optional = true}
reqwest = {version="0.11.22", default-features=false, features=["rustls-tls","brotli", "gzip"]}
tokio = {version="1.32.0",features=["rt-multi-thread","macros"], default-features = false}
serde = {version="1.0.190", default-features=false, features=["derive"]}
serde_json = {version="1.0.108", default-features=false}
maud = {version="0.25.0", default-features=false, features=["actix-web"]}
scraper = {version="0.18.1", default-features = false}
actix-web = {version="4.4.0", features = ["cookies", "macros"], default-features=false}
actix-files = {version="0.6.2", default-features=false}
actix-cors = {version="0.6.4", default-features=false}
fake-useragent = {version="0.1.3", default-features=false}
env_logger = {version="0.10.0", default-features=false}
log = {version="0.4.20", default-features=false}
mlua = {version="0.9.1", features=["luajit", "vendored"], default-features=false}
redis = {version="0.23.3", features=["tokio-comp","connection-manager"], default-features = false, optional = true}
blake3 = {version="1.5.0", default-features=false}
error-stack = {version="0.4.0", default-features=false, features=["std"]}
async-trait = {version="0.1.73", default-features=false}
regex = {version="1.9.4", features=["perf"], default-features = false}
smallvec = {version="1.11.0", features=["union", "serde"], default-features=false}
futures = {version="0.3.28", default-features=false}
dhat = {version="0.3.2", optional = true, default-features=false}
mimalloc = { version = "0.1.38", default-features = false }
async-once-cell = {version="0.5.3"}
actix-governor = {version="0.4.1"}
mini-moka = { version="0.10", optional = true}
async-once-cell = {version="0.5.3", default-features=false}
actix-governor = {version="0.5.0", default-features=false}
mini-moka = { version="0.10", optional = true, default-features=false, features=["sync"]}
[dev-dependencies]
rusty-hook = "^0.11.2"
criterion = "0.5.1"
tempfile = "3.8.0"
rusty-hook = {version="^0.11.2", default-features=false}
criterion = {version="0.5.1", default-features=false}
tempfile = {version="3.8.0", default-features=false}
[build-dependencies]
lightningcss = "1.0.0-alpha.50"
minify-js = "0.5.6"
lightningcss = {version="1.0.0-alpha.50", default-features=false, features=["grid"]}
minify-js = {version="0.5.6", default-features=false}
[profile.dev]
opt-level = 0
debug = true
split-debuginfo = '...'
split-debuginfo = 'unpacked'
debug-assertions = true
overflow-checks = true
lto = false

View File

@ -2,14 +2,14 @@
## Installed From Source
If you have built `websurfx` from source then the configuration file will be located under project directory (codebase) at `websurfx/`
If you have built `websurfx` from the source then the configuration file will be located under the project directory (codebase) at `websurfx/`
> **Note**
> If you have built websurfx with unstable/rolling/edge branch then you can copy the configuration file from `websurfx/config.lua` located under project directory (codebase) to `~/.config/websurfx/` and make the changes there and rerun the websurfx server. _This is only available from unstable/rolling/edge version_.
> If you have built websurfx with an unstable/rolling/edge branch then you can copy the configuration file from `websurfx/config.lua` located under the project directory (codebase) to `~/.config/websurfx/` and make the changes there and rerun the websurfx server. _This is only available from unstable/rolling/edge version_.
## Installed From Package
If you have installed `websurfx` using the package manager of your Linux distro then the default configuration file will be located at `/etc/xdg/websurfx/`. You can copy the default config to `~/.config/websurfx/` and make the changes there and rerun the websurfx server.
If you have installed `websurfx` using the package manager of your Linux distro then the default configuration file will be located at `/etc/xdg/websurfx/`. You can copy the default config to `~/.config/websurfx/` make the changes there and rerun the websurfx server.
Some of the configuration options provided in the file are stated below. These are subdivided into the following categories:
@ -42,13 +42,13 @@ Some of the configuration options provided in the file are stated below. These a
>
> - Level 0 - With this level no search filtering occurs.
> - Level 1 - With this level some search filtering occurs.
> - Level 2 - With this level the upstream search engines are restricted to send sensitive contents like NSFW search results, etc.
> - Level 3 - With this level the regex based filter lists is used alongside level 2 to filter more search results that have slipped in or custom results that needs to be filtered using the filter lists.
> - Level 4 - This level is similar to level 3 except in this level the regex based filter lists are used to disallow users to search sensitive or disallowed content. This level could be useful if you are parent or someone who wants to completely disallow their kids or yourself from watching sensitive content.
> - Level 2 - With this level the upstream search engines are restricted to sending sensitive content like NSFW search results, etc.
> - Level 3 - With this level the regex-based filter lists are used alongside level 2 to filter more search results that have slipped in or custom results that need to be filtered using the filter lists.
> - Level 4 - This level is similar to level 3 except in this level the regex-based filter lists are used to disallow users to search sensitive or disallowed content. This level could be useful if you are a parent or someone who wants to completely disallow their kids or yourself from watching sensitive content.
## Website
- **colorscheme:** The colorscheme name which should be used for the website theme (the name should be in accordance to the colorscheme file name present in `public/static/colorschemes` folder).
- **colorscheme:** The colorscheme name which should be used for the website theme (the name should be by the colorscheme file name present in the `public/static/colorschemes` folder).
> By Default we provide 12 colorschemes to choose from these are:
>
@ -65,7 +65,7 @@ Some of the configuration options provided in the file are stated below. These a
> 11. tokyo-night
> 12. tomorrow-night
- **theme:** The theme name which should be used for the website (again, the name should be in accordance to the theme file name present in `public/static/themes` folder).
- **theme:** The theme name that should be used for the website (again, the name should be by the theme file name present in the `public/static/themes` folder).
> By Default we provide 1 theme to choose from these are:
>
@ -73,7 +73,7 @@ Some of the configuration options provided in the file are stated below. These a
## Cache
- **redis_url:** Redis connection url address on which the client should connect on.
- **redis_url:** Redis connection URL address on which the client should connect.
> **Note**
> This option can be commented out if you have compiled the app without the `redis-cache` feature. For more information, See [**building**](./building.md).

View File

@ -50,10 +50,11 @@ Before you start working on the project. You will need the following packages in
- The latest version of `cargo` installed on your system which is required to manage building and running the project. The installation instructions for this can be found [here](https://doc.rust-lang.org/cargo/getting-started/installation.html).
- The latest version of `npm` installed on your system which is required to allow the installation of other tools necessary for the project. The installation for this can be found [here](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm).
- The latest version of `redis` installed on your system which will be used to avoid introducing unexpected issues when working on the project. The installation for this can be found [here](https://redis.io/docs/getting-started/installation/).
- The latest version of `stylelint` should be installed on your system which will be used by the pre-commit checks to lint the code before a commit can be made to ensure better code quality. Before you install `stylelint` on your system, make sure you have `npm` installed on your system. To install `stylelint` run the following command:
- The latest version of `stylelint` should be installed on your system which will be used by the pre-commit checks to lint the code before a commit can be made to ensure better code quality. Before you install `stylelint` on your system, make sure you have `npm` installed on your system. To install `stylelint` and plugins run the following command:
```shell
$ npm i -g stylelint
$ npm i -g stylelint stylelint-config-standard postcss-lit
```
> **Note**

View File

@ -4,10 +4,10 @@ A modern-looking, lightning-fast, privacy-respecting, secure [meta search engine
# Motivation
Most meta search engines tend to be slow, lack high level of customization and missing many features and all of them like security as they are written in unsafe languages like python, javascript, etc which tend to open a wide variety of vulnerabilities which can also sometimes pose a threat to privacy as sometimes this can be exploited and can be used to leveraged to leak out sensitive information which is never good.
Most meta search engines tend to be slow, lack a high level of customization, and miss many features, and all of them lack security as they are written in unsafe languages like Python, JavaScript, etc., which tend to open a wide variety of vulnerabilities, which can also sometimes pose a threat to privacy as sometimes this can be exploited and can be used to leak out sensitive information, which is never good.
# Solution
Websurfx is a project which seeks to provide privacy, security, speed and all the features which the user wants.
Websurfx is a project that seeks to provide privacy, security, speed, and all the features that the user wants.
[⬅️ Go back to Home](./README.md)

View File

@ -1,10 +0,0 @@
{{>header this}}
<main class="error_container">
<img src="images/robot-404.svg" alt="Image of broken robot." />
<div class="error_content">
<h1>Aw! snap</h1>
<h2>404 Page Not Found!</h2>
<p>Go to <a href="/">search page</a></p>
</div>
</main>
{{>footer}}

View File

@ -1,29 +0,0 @@
{{>header this}}
<main class="about-container">
<article >
<div>
<h1 >Websurfx</h1>
<hr size="4" width="100%" color="#a6e3a1">
</div>
<p>A modern-looking, lightning-fast, privacy-respecting, secure meta search engine written in Rust. It provides a fast and secure search experience while respecting user privacy.<br> It aggregates results from multiple search engines and presents them in an unbiased manner, filtering out trackers and ads.
</p>
<h2>Some of the Top Features:</h2>
<ul><strong>Lightning fast </strong>- Results load within milliseconds for an instant search experience.</ul>
<ul><strong>Secure search</strong> - All searches are performed over an encrypted connection to prevent snooping.</ul>
<ul><strong>Ad free results</strong> - All search results are ad free and clutter free for a clean search experience.</ul>
<ul><strong>Privacy focused</strong> - Websurface does not track, store or sell your search data. Your privacy is our priority.</ul>
<ul><strong>Free and Open source</strong> - The entire project's code is open source and available for free on <a href="https://github.com/neon-mmd/websurfx">GitHub</a> under an GNU Affero General Public License.</ul>
<ul><strong>Highly customizable</strong> - Websurface comes with 9 built-in color themes and supports creating custom themes effortlessly.</ul>
</article>
<h3>Devoloped by: <a href="https://github.com/neon-mmd/websurfx">Websurfx team</a></h3>
</main>
{{>footer}}

View File

@ -1,3 +0,0 @@
<div class="search_bar">
<input type="search" name="search-box" value="{{this.pageQuery}}" placeholder="Type to search" />
<button type="submit" onclick="searchWeb()">search</button>

View File

@ -1,12 +0,0 @@
<div class="cookies tab">
<h1>Cookies</h1>
<p class="description">
This is the cookies are saved on your system and it contains the preferences
you chose in the settings page
</p>
<input type="text" name="cookie_field" value="" readonly />
<p class="description">
The cookies stored are not used by us for any malicious intend or for
tracking you in any way.
</p>
</div>

View File

@ -1,32 +0,0 @@
<div class="engines tab">
<h1>Engines</h1>
<h3>select search engines</h3>
<p class="description">
Select the search engines from the list of engines that you want results
from
</p>
<div class="engine_selection">
<div class="toggle_btn">
<label class="switch">
<input type="checkbox" class="select_all" onchange="toggleAllSelection()" />
<span class="slider round"></span>
</label>
Select All
</div>
<hr />
<div class="toggle_btn">
<label class="switch">
<input type="checkbox" class="engine" />
<span class="slider round"></span>
</label>
DuckDuckGo
</div>
<div class="toggle_btn">
<label class="switch">
<input type="checkbox" class="engine" />
<span class="slider round"></span>
</label>
Searx
</div>
</div>
</div>

View File

@ -1,16 +0,0 @@
<footer>
<div>
<span>Powered By <b>Websurfx</b></span><span>-</span><span>a lightening fast, privacy respecting, secure meta
search engine</span>
</div>
<div>
<ul>
<li><a href="https://github.com/neon-mmd/websurfx">Source Code</a></li>
<li><a href="https://github.com/neon-mmd/websurfx/issues">Issues/Bugs</a></li>
</ul>
</div>
</footer>
<script src="static/settings.js"></script>
</body>
</html>

View File

@ -1,13 +0,0 @@
<div class="general tab active">
<h1>General</h1>
<h3>Select a safe search level</h3>
<p class="description">
Select a safe search level from the menu below to filter content based on
the level.
</p>
<select name="safe_search_levels">
<option value=0>None</option>
<option value=1>Low</option>
<option value=2>Moderate</option>
</select>
</div>

View File

@ -1,16 +0,0 @@
<!doctype html>
<html lang="en">
<head>
<title>Websurfx</title>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<link href="static/colorschemes/{{colorscheme}}.css" rel="stylesheet" type="text/css" />
<link href="static/themes/{{theme}}.css" rel="stylesheet" type="text/css" />
</head>
<body onload="getClientSettings()">
<header>
<h1><a href="/">Websurfx</a></h1>
{{>navbar}}
</header>

View File

@ -1,8 +0,0 @@
{{>header this}}
<main class="search-container">
<img src="../images/websurfx_logo.png" alt="Websurfx meta-search engine logo" />
{{>bar}}
</div>
</main>
<script src="static/index.js"></script>
{{>footer}}

View File

@ -1,6 +0,0 @@
<nav>
<ul>
<li><a href="about">about</a></li>
<li><a href="settings">settings</a></li>
</ul>
</nav>

View File

@ -1,86 +0,0 @@
{{>header this.style}}
<main class="results">
{{>search_bar this}}
<div class="results_aggregated">
{{#if results}} {{#each results}}
<div class="result">
<h1><a href="{{{this.url}}}">{{{this.title}}}</a></h1>
<small>{{{this.url}}}</small>
<p>{{{this.description}}}</p>
<div class="upstream_engines">
{{#each engine}}
<span>{{{this}}}</span>
{{/each}}
</div>
</div>
{{/each}} {{else}} {{#if disallowed}}
<div class="result_disallowed">
<div class="description">
<p>
Your search - <span class="user_query">{{{this.pageQuery}}}</span> -
has been disallowed.
</p>
<p class="description_paragraph">Dear user,</p>
<p class="description_paragraph">
The query - <span class="user_query">{{{this.pageQuery}}}</span> - has
been blacklisted via server configuration and hence disallowed by the
server. Henceforth no results could be displayed for your query.
</p>
</div>
<img src="./images/barricade.png" alt="Image of a Barricade" />
</div>
{{else}} {{#if filtered}}
<div class="result_filtered">
<div class="description">
<p>
Your search - <span class="user_query">{{{this.pageQuery}}}</span> -
has been filtered.
</p>
<p class="description_paragraph">Dear user,</p>
<p class="description_paragraph">
All the search results contain results that has been configured to be
filtered out via server configuration and henceforth has been
completely filtered out.
</p>
</div>
<img src="./images/filter.png" alt="Image of a paper inside a funnel" />
</div>
{{else}} {{#if noEnginesSelected}}
<div class="result_engine_not_selected">
<div class="description">
<p>
No results could be fetched for your search "<span class="user_query">{{{this.pageQuery}}}</span>" .
</p>
<p class="description_paragraph">Dear user,</p>
<p class="description_paragraph">
No results could be retrieved from the upstream search engines as no
upstream search engines were selected from the settings page.
</p>
</div>
<img src="./images/no_selection.png" alt="Image of a white cross inside a red circle" />
</div>
{{else}}
<div class="result_not_found">
<p>Your search - {{{this.pageQuery}}} - did not match any documents.</p>
<p class="suggestions">Suggestions:</p>
<ul>
<li>Make sure that all words are spelled correctly.</li>
<li>Try different keywords.</li>
<li>Try more general keywords.</li>
</ul>
<img src="./images/no_results.gif" alt="Man fishing gif" />
</div>
{{/if}} {{/if}} {{/if}} {{/if}}
</div>
<div class="page_navigation">
<button type="button" onclick="navigate_backward()">
&#8592; previous
</button>
<button type="button" onclick="navigate_forward()">next &#8594;</button>
</div>
</main>
<script src="static/index.js"></script>
<script src="static/search_area_options.js"></script>
<script src="static/pagination.js"></script>
<script src="static/error_box.js"></script>
{{>footer}}

View File

@ -1,36 +0,0 @@
<div class="search_area">
{{>bar this}}
<div class="error_box">
{{#if engineErrorsInfo}}
<button onclick="toggleErrorBox()" class="error_box_toggle_button">
<img src="./images/warning.svg" alt="Info icon for error box" />
</button>
<div class="dropdown_error_box">
{{#each engineErrorsInfo}}
<div class="error_item">
<span class="engine_name">{{{this.engine}}}</span>
<span class="engine_name">{{{this.error}}}</span>
<span class="severity_color" style="background: {{{this.severity_color}}};"></span>
</div>
{{/each}}
</div>
{{else}}
<button onclick="toggleErrorBox()" class="error_box_toggle_button">
<img src="./images/info.svg" alt="Warning icon for error box" />
</button>
<div class="dropdown_error_box">
<div class="no_errors">
Everything looks good 🙂!!
</div>
</div>
{{/if}}
</div>
</div>
<div class="search_options">
<select name="safe_search_levels" {{#if (gte safeSearchLevel 3)}} disabled {{/if}}>
<option value=0 {{#if (eq safeSearchLevel 0)}} selected {{/if}}>SafeSearch: None</option>
<option value=1 {{#if (eq safeSearchLevel 1)}} selected {{/if}}>SafeSearch: Low</option>
<option value=2 {{#if (eq safeSearchLevel 2)}} selected {{/if}}>SafeSearch: Moderate</option>
</select>
</div>
</div>

View File

@ -1,22 +0,0 @@
{{>header this}}
<main class="settings" >
<h1>Settings</h1>
<hr />
<div class="settings_container">
<div class="sidebar">
<div class="btn active" onclick="setActiveTab(this)">general</div>
<div class="btn" onclick="setActiveTab(this)">user interface</div>
<div class="btn" onclick="setActiveTab(this)">engines</div>
<div class="btn" onclick="setActiveTab(this)">cookies</div>
</div>
<div class="main_container">
{{> general_tab}} {{> user_interface_tab}} {{> engines_tab}} {{>
cookies_tab}}
<p class="message"></p>
<button type="submit" onclick="setClientSettings()">Save</button>
</div>
</div>
</main>
<script src="static/settings.js"></script>
<script src="static/cookies.js"></script>
{{>footer}}

View File

@ -1,28 +0,0 @@
<div class="user_interface tab">
<h1>User Interface</h1>
<h3>select theme</h3>
<p class="description">
Select the theme from the available themes to be used in user interface
</p>
<select name="themes">
<option value="simple">simple</option>
</select>
<h3>select color scheme</h3>
<p class="description">
Select the color scheme for your theme to be used in user interface
</p>
<select name="colorschemes">
<option value="catppuccin-mocha">catppuccin mocha</option>
<option value="dark-chocolate">dark chocolate</option>
<option value="dracula">dracula</option>
<option value="gruvbox-dark">gruvbox dark</option>
<option value="monokai">monokai</option>
<option value="nord">nord</option>
<option value="oceanic-next">oceanic next</option>
<option value="one-dark">one dark</option>
<option value="solarized-dark">solarized dark</option>
<option value="solarized-light">solarized light</option>
<option value="tokyo-night">tokyo night</option>
<option value="tomorrow-night">tomorrow night</option>
</select>
</div>

View File

@ -1,9 +1,9 @@
//! This module provides the functionality to cache the aggregated results fetched and aggregated
//! from the upstream search engines in a json format.
use blake3::hash;
use error_stack::Report;
use futures::future::try_join_all;
use md5::compute;
use redis::{aio::ConnectionManager, AsyncCommands, Client, RedisError};
use super::error::CacheError;
@ -59,7 +59,7 @@ impl RedisCache {
///
/// * `url` - It takes an url as string.
fn hash_url(&self, url: &str) -> String {
format!("{:?}", compute(url))
format!("{:?}", blake3::hash(url.as_bytes()))
}
/// A function which fetches the cached json results as json string from the redis server.

View File

@ -3,7 +3,6 @@
use crate::handler::paths::{file_path, FileType};
use crate::models::engine_models::{EngineError, EngineHandler};
use crate::models::parser_models::{AggregatorConfig, RateLimiter, Style};
use log::LevelFilter;
use mlua::Lua;
@ -29,7 +28,7 @@ pub struct Config {
/// It stores the option to whether enable or disable debug mode.
pub debug: bool,
/// It stores all the engine names that were enabled by the user.
pub upstream_search_engines: Vec<EngineHandler>,
pub upstream_search_engines: HashMap<String, bool>,
/// It stores the time (secs) which controls the server request timeout.
pub request_timeout: u8,
/// It stores the number of threads which controls the app will use to run.
@ -109,11 +108,7 @@ impl Config {
logging,
debug,
upstream_search_engines: globals
.get::<_, HashMap<String, bool>>("upstream_search_engines")?
.into_iter()
.filter_map(|(key, value)| value.then_some(key))
.map(|engine| EngineHandler::new(&engine))
.collect::<Result<Vec<EngineHandler>, error_stack::Report<EngineError>>>()?,
.get::<_, HashMap<String, bool>>("upstream_search_engines")?,
request_timeout: globals.get::<_, u8>("request_timeout")?,
threads,
rate_limiter: RateLimiter {

View File

@ -4,7 +4,7 @@
use std::collections::HashMap;
use reqwest::header::HeaderMap;
use reqwest::{header::HeaderMap, Client};
use scraper::Html;
use crate::models::aggregation_models::SearchResult;
@ -42,7 +42,7 @@ impl SearchEngine for Brave {
query: &str,
page: u32,
user_agent: &str,
request_timeout: u8,
client: &Client,
safe_search: u8,
) -> Result<HashMap<String, SearchResult>, EngineError> {
let url = format!("https://search.brave.com/search?q={query}&offset={page}");
@ -68,7 +68,7 @@ impl SearchEngine for Brave {
.change_context(EngineError::UnexpectedError)?;
let document: Html = Html::parse_document(
&Brave::fetch_html_from_upstream(self, &url, header_map, request_timeout).await?,
&Brave::fetch_html_from_upstream(self, &url, header_map, client).await?,
);
if let Some(no_result_msg) = self.parser.parse_for_no_results(&document).nth(0) {

View File

@ -5,6 +5,7 @@
use std::collections::HashMap;
use reqwest::header::HeaderMap;
use reqwest::Client;
use scraper::Html;
use crate::models::aggregation_models::SearchResult;
@ -44,7 +45,7 @@ impl SearchEngine for DuckDuckGo {
query: &str,
page: u32,
user_agent: &str,
request_timeout: u8,
client: &Client,
_safe_search: u8,
) -> Result<HashMap<String, SearchResult>, EngineError> {
// Page number can be missing or empty string and so appropriate handling is required
@ -76,7 +77,7 @@ impl SearchEngine for DuckDuckGo {
.change_context(EngineError::UnexpectedError)?;
let document: Html = Html::parse_document(
&DuckDuckGo::fetch_html_from_upstream(self, &url, header_map, request_timeout).await?,
&DuckDuckGo::fetch_html_from_upstream(self, &url, header_map, client).await?,
);
if self.parser.parse_for_no_results(&document).next().is_some() {

View File

@ -3,6 +3,7 @@
//! number if provided.
use reqwest::header::HeaderMap;
use reqwest::Client;
use scraper::Html;
use std::collections::HashMap;
@ -40,7 +41,7 @@ impl SearchEngine for Searx {
query: &str,
page: u32,
user_agent: &str,
request_timeout: u8,
client: &Client,
mut safe_search: u8,
) -> Result<HashMap<String, SearchResult>, EngineError> {
// Page number can be missing or empty string and so appropriate handling is required
@ -68,7 +69,7 @@ impl SearchEngine for Searx {
.change_context(EngineError::UnexpectedError)?;
let document: Html = Html::parse_document(
&Searx::fetch_html_from_upstream(self, &url, header_map, request_timeout).await?,
&Searx::fetch_html_from_upstream(self, &url, header_map, client).await?,
);
if let Some(no_result_msg) = self.parser.parse_for_no_results(&document).nth(1) {

View File

@ -12,6 +12,7 @@ pub mod handler;
pub mod models;
pub mod results;
pub mod server;
pub mod templates;
use std::net::TcpListener;
@ -23,7 +24,6 @@ use actix_governor::{Governor, GovernorConfigBuilder};
use actix_web::{dev::Server, http::header, middleware::Logger, web, App, HttpServer};
use cache::cacher::{Cache, SharedCache};
use config::parser::Config;
use handlebars::Handlebars;
use handler::paths::{file_path, FileType};
/// Runs the web server on the provided TCP listener and returns a `Server` instance.
@ -48,16 +48,8 @@ use handler::paths::{file_path, FileType};
/// let server = run(listener,config,cache).expect("Failed to start server");
/// ```
pub fn run(listener: TcpListener, config: Config, cache: Cache) -> std::io::Result<Server> {
let mut handlebars: Handlebars<'_> = Handlebars::new();
let public_folder_path: &str = file_path(FileType::Theme)?;
handlebars
.register_templates_directory(".html", format!("{}/templates", public_folder_path))
.unwrap();
let handlebars_ref: web::Data<Handlebars<'_>> = web::Data::new(handlebars);
let cloned_config_threads_opt: u8 = config.threads;
let cache = web::Data::new(SharedCache::new(cache));
@ -75,7 +67,6 @@ pub fn run(listener: TcpListener, config: Config, cache: Cache) -> std::io::Resu
App::new()
.wrap(Logger::default()) // added logging middleware for logging.
.app_data(handlebars_ref.clone())
.app_data(web::Data::new(config.clone()))
.app_data(cache.clone())
.wrap(cors)

View File

@ -1,11 +1,10 @@
//! This module provides public models for handling, storing and serializing of search results
//! data scraped from the upstream search engines.
use super::engine_models::EngineError;
use serde::{Deserialize, Serialize};
use smallvec::SmallVec;
use super::{engine_models::EngineError, parser_models::Style};
/// A named struct to store the raw scraped search results scraped search results from the
/// upstream search engines before aggregating it.It derives the Clone trait which is needed
/// to write idiomatic rust using `Iterators`.
@ -109,10 +108,6 @@ impl EngineErrorInfo {
pub struct SearchResults {
/// Stores the individual serializable `SearchResult` struct into a vector of
pub results: Vec<SearchResult>,
/// Stores the current pages search query `q` provided in the search url.
pub page_query: String,
/// Stores the theming options for the website.
pub style: Style,
/// Stores the information on which engines failed with their engine name
/// and the type of error that caused it.
pub engine_errors_info: Vec<EngineErrorInfo>,
@ -142,15 +137,9 @@ impl SearchResults {
/// the search url.
/// * `engine_errors_info` - Takes an array of structs which contains information regarding
/// which engines failed with their names, reason and their severity color name.
pub fn new(
results: Vec<SearchResult>,
page_query: &str,
engine_errors_info: &[EngineErrorInfo],
) -> Self {
pub fn new(results: Vec<SearchResult>, engine_errors_info: &[EngineErrorInfo]) -> Self {
Self {
results,
page_query: page_query.to_owned(),
style: Style::default(),
engine_errors_info: engine_errors_info.to_owned(),
disallowed: Default::default(),
filtered: Default::default(),
@ -159,21 +148,11 @@ impl SearchResults {
}
}
/// A setter function to add website style to the return search results.
pub fn add_style(&mut self, style: &Style) {
self.style = style.clone();
}
/// A setter function that sets disallowed to true.
pub fn set_disallowed(&mut self) {
self.disallowed = true;
}
/// A setter function to set the current page search query.
pub fn set_page_query(&mut self, page: &str) {
self.page_query = page.to_owned();
}
/// A setter function that sets the filtered to true.
pub fn set_filtered(&mut self) {
self.filtered = true;

View File

@ -3,7 +3,8 @@
use super::aggregation_models::SearchResult;
use error_stack::{Report, Result, ResultExt};
use std::{collections::HashMap, fmt, time::Duration};
use reqwest::Client;
use std::{collections::HashMap, fmt};
/// A custom error type used for handle engine associated errors.
#[derive(Debug)]
@ -71,12 +72,11 @@ pub trait SearchEngine: Sync + Send {
&self,
url: &str,
header_map: reqwest::header::HeaderMap,
request_timeout: u8,
client: &Client,
) -> Result<String, EngineError> {
// fetch the html from upstream search engine
Ok(reqwest::Client::new()
Ok(client
.get(url)
.timeout(Duration::from_secs(request_timeout as u64)) // Add timeout to request to avoid DDOSing the server
.headers(header_map) // add spoofed headers to emulate human behavior
.send()
.await
@ -109,7 +109,7 @@ pub trait SearchEngine: Sync + Send {
query: &str,
page: u32,
user_agent: &str,
request_timeout: u8,
client: &Client,
safe_search: u8,
) -> Result<HashMap<String, SearchResult>, EngineError>;
}

View File

@ -1,8 +1,6 @@
//! This module provides public models for handling, storing and serializing parsed config file
//! options from config.lua by grouping them together.
use serde::{Deserialize, Serialize};
/// A named struct which stores,deserializes, serializes and groups the parsed config file options
/// of theme and colorscheme names into the Style struct which derives the `Clone`, `Serialize`
/// and Deserialize traits where the `Clone` trait is derived for allowing the struct to be
@ -12,7 +10,7 @@ use serde::{Deserialize, Serialize};
/// order to allow the deserializing the json back to struct in aggregate function in
/// aggregator.rs and create a new struct out of it and then serialize it back to json and pass
/// it to the template files.
#[derive(Serialize, Deserialize, Clone, Default)]
#[derive(Clone, Default)]
pub struct Style {
/// It stores the parsed theme option used to set a theme for the website.
pub theme: String,

View File

@ -8,8 +8,9 @@ use crate::models::{
engine_models::{EngineError, EngineHandler},
};
use error_stack::Report;
use rand::Rng;
use regex::Regex;
use reqwest::{Client, ClientBuilder};
use std::time::{SystemTime, UNIX_EPOCH};
use std::{
collections::HashMap,
io::{BufReader, Read},
@ -18,6 +19,9 @@ use std::{
use std::{fs::File, io::BufRead};
use tokio::task::JoinHandle;
/// A constant for holding the prebuilt Client globally in the app.
static CLIENT: std::sync::OnceLock<Client> = std::sync::OnceLock::new();
/// Aliases for long type annotations
type FutureVec = Vec<JoinHandle<Result<HashMap<String, SearchResult>, Report<EngineError>>>>;
@ -68,13 +72,23 @@ pub async fn aggregate(
request_timeout: u8,
safe_search: u8,
) -> Result<SearchResults, Box<dyn std::error::Error>> {
let client = CLIENT.get_or_init(|| {
ClientBuilder::new()
.timeout(Duration::from_secs(request_timeout as u64)) // Add timeout to request to avoid DDOSing the server
.https_only(true)
.gzip(true)
.brotli(true)
.build()
.unwrap()
});
let user_agent: &str = random_user_agent();
// Add a random delay before making the request.
if random_delay || !debug {
let mut rng = rand::thread_rng();
let delay_secs = rng.gen_range(1..10);
tokio::time::sleep(Duration::from_secs(delay_secs)).await;
let nanos = SystemTime::now().duration_since(UNIX_EPOCH)?.subsec_nanos() as f32;
let delay = ((nanos / 1_0000_0000 as f32).floor() as u64) + 1;
tokio::time::sleep(Duration::from_secs(delay)).await;
}
let mut names: Vec<&str> = Vec::with_capacity(0);
@ -88,7 +102,7 @@ pub async fn aggregate(
let query: String = query.to_owned();
tasks.push(tokio::spawn(async move {
search_engine
.results(&query, page, user_agent, request_timeout, safe_search)
.results(&query, page, user_agent, client, safe_search)
.await
}));
}
@ -166,7 +180,7 @@ pub async fn aggregate(
let results: Vec<SearchResult> = result_map.into_values().collect();
Ok(SearchResults::new(results, query, &engine_errors_info))
Ok(SearchResults::new(results, &engine_errors_info))
}
/// Filters a map of search results using a list of regex patterns.

View File

@ -7,30 +7,30 @@ use crate::{
handler::paths::{file_path, FileType},
};
use actix_web::{get, web, HttpRequest, HttpResponse};
use handlebars::Handlebars;
use std::fs::read_to_string;
/// Handles the route of index page or main page of the `websurfx` meta search engine website.
#[get("/")]
pub async fn index(
hbs: web::Data<Handlebars<'_>>,
config: web::Data<Config>,
) -> Result<HttpResponse, Box<dyn std::error::Error>> {
let page_content: String = hbs.render("index", &config.style).unwrap();
Ok(HttpResponse::Ok().body(page_content))
pub async fn index(config: web::Data<Config>) -> Result<HttpResponse, Box<dyn std::error::Error>> {
Ok(HttpResponse::Ok().body(
crate::templates::views::index::index(&config.style.colorscheme, &config.style.theme).0,
))
}
/// Handles the route of any other accessed route/page which is not provided by the
/// website essentially the 404 error page.
pub async fn not_found(
hbs: web::Data<Handlebars<'_>>,
config: web::Data<Config>,
) -> Result<HttpResponse, Box<dyn std::error::Error>> {
let page_content: String = hbs.render("404", &config.style)?;
Ok(HttpResponse::Ok()
.content_type("text/html; charset=utf-8")
.body(page_content))
.body(
crate::templates::views::not_found::not_found(
&config.style.colorscheme,
&config.style.theme,
)
.0,
))
}
/// Handles the route of robots.txt page of the `websurfx` meta search engine website.
@ -45,20 +45,26 @@ pub async fn robots_data(_req: HttpRequest) -> Result<HttpResponse, Box<dyn std:
/// Handles the route of about page of the `websurfx` meta search engine website.
#[get("/about")]
pub async fn about(
hbs: web::Data<Handlebars<'_>>,
config: web::Data<Config>,
) -> Result<HttpResponse, Box<dyn std::error::Error>> {
let page_content: String = hbs.render("about", &config.style)?;
Ok(HttpResponse::Ok().body(page_content))
pub async fn about(config: web::Data<Config>) -> Result<HttpResponse, Box<dyn std::error::Error>> {
Ok(HttpResponse::Ok().body(
crate::templates::views::about::about(&config.style.colorscheme, &config.style.theme).0,
))
}
/// Handles the route of settings page of the `websurfx` meta search engine website.
#[get("/settings")]
pub async fn settings(
hbs: web::Data<Handlebars<'_>>,
config: web::Data<Config>,
) -> Result<HttpResponse, Box<dyn std::error::Error>> {
let page_content: String = hbs.render("settings", &config.style)?;
Ok(HttpResponse::Ok().body(page_content))
Ok(HttpResponse::Ok().body(
crate::templates::views::settings::settings(
&config.style.colorscheme,
&config.style.theme,
&config
.upstream_search_engines
.keys()
.collect::<Vec<&String>>(),
)?
.0,
))
}

View File

@ -6,13 +6,12 @@ use crate::{
handler::paths::{file_path, FileType},
models::{
aggregation_models::SearchResults,
engine_models::EngineHandler,
engine_models::{EngineError, EngineHandler},
server_models::{Cookie, SearchParams},
},
results::aggregator::aggregate,
};
use actix_web::{get, web, HttpRequest, HttpResponse};
use handlebars::Handlebars;
use regex::Regex;
use std::{
fs::File,
@ -20,19 +19,6 @@ use std::{
};
use tokio::join;
/// Handles the route of any other accessed route/page which is not provided by the
/// website essentially the 404 error page.
pub async fn not_found(
hbs: web::Data<Handlebars<'_>>,
config: web::Data<Config>,
) -> Result<HttpResponse, Box<dyn std::error::Error>> {
let page_content: String = hbs.render("404", &config.style)?;
Ok(HttpResponse::Ok()
.content_type("text/html; charset=utf-8")
.body(page_content))
}
/// Handles the route of search page of the `websurfx` meta search engine website and it takes
/// two search url parameters `q` and `page` where `page` parameter is optional.
///
@ -49,7 +35,6 @@ pub async fn not_found(
/// ```
#[get("/search")]
pub async fn search(
hbs: web::Data<Handlebars<'_>>,
req: HttpRequest,
config: web::Data<Config>,
cache: web::Data<SharedCache>,
@ -58,7 +43,7 @@ pub async fn search(
match &params.q {
Some(query) => {
if query.trim().is_empty() {
return Ok(HttpResponse::Found()
return Ok(HttpResponse::TemporaryRedirect()
.insert_header(("location", "/"))
.finish());
}
@ -112,10 +97,17 @@ pub async fn search(
)
);
let page_content: String = hbs.render("search", &results?)?;
Ok(HttpResponse::Ok().body(page_content))
Ok(HttpResponse::Ok().body(
crate::templates::views::search::search(
&config.style.colorscheme,
&config.style.theme,
query,
&results?,
)
.0,
))
}
None => Ok(HttpResponse::Found()
None => Ok(HttpResponse::TemporaryRedirect()
.insert_header(("location", "/"))
.finish()),
}
@ -171,8 +163,6 @@ async fn results(
if _flag {
results.set_disallowed();
results.add_style(&config.style);
results.set_page_query(query);
cache.cache_results(&results, &url).await?;
results.set_safe_search_level(safe_search_level);
return Ok(results);
@ -221,23 +211,27 @@ async fn results(
true => {
let mut search_results = SearchResults::default();
search_results.set_no_engines_selected();
search_results.set_page_query(query);
search_results
}
}
}
None => {
aggregate(
query,
page,
config.aggregator.random_delay,
config.debug,
&config.upstream_search_engines,
config.request_timeout,
safe_search_level,
)
.await?
}
None => aggregate(
query,
page,
config.aggregator.random_delay,
config.debug,
&config
.upstream_search_engines
.clone()
.into_iter()
.filter_map(|(key, value)| value.then_some(key))
.map(|engine| EngineHandler::new(&engine))
.collect::<Result<Vec<EngineHandler>, error_stack::Report<EngineError>>>(
)?,
config.request_timeout,
safe_search_level,
)
.await?,
};
if results.engine_errors_info().is_empty()
&& results.results().is_empty()
@ -245,7 +239,6 @@ async fn results(
{
results.set_filtered();
}
results.add_style(&config.style);
cache
.cache_results(&results, &(format!("{url}{safe_search_level}")))
.await?;

5
src/templates/mod.rs Normal file
View File

@ -0,0 +1,5 @@
//! This module provides other modules to handle both the view and its partials for the `websurfx`
//! search engine frontend.
mod partials;
pub mod views;

View File

@ -0,0 +1,21 @@
//! A module that handles `bar` partial for the `search_bar` partial and the home/index/main page in the `websurfx` frontend.
use maud::{html, Markup, PreEscaped};
/// A functions that handles the html code for the bar for the `search_bar` partial and the
/// home/index/main page in the search engine frontend.
///
/// # Arguments
///
/// * `query` - It takes the current search query provided by user as an argument.
///
/// # Returns
///
/// It returns the compiled html code for the search bar as a result.
pub fn bar(query: &str) -> Markup {
html!(
(PreEscaped("<div class=\"search_bar\">"))
input type="search" name="search-box" value=(query) placeholder="Type to search";
button type="submit" onclick="searchWeb()"{"search"}
)
}

View File

@ -0,0 +1,29 @@
//! A module that handles the footer for all the pages in the `websurfx` frontend.
use maud::{html, Markup, PreEscaped};
/// A functions that handles the html code for the footer for all the pages in the search engine
/// frontend.
///
/// # Returns
///
/// It returns the compiled html code for the footer as a result.
pub fn footer() -> Markup {
html!(
footer{
div{
span{"Powered By "b{"Websurfx"}}span{"-"}span{"a lightening fast, privacy respecting, secure meta
search engine"}
}
div{
ul{
li{a href="https://github.com/neon-mmd/websurfx"{"Source Code"}}
li{a href="https://github.com/neon-mmd/websurfx/issues"{"Issues/Bugs"}}
}
}
}
script src="static/settings.js"{}
(PreEscaped("</body>"))
(PreEscaped("</html>"))
)
}

View File

@ -0,0 +1,35 @@
//! A module that handles the header for all the pages in the `websurfx` frontend.
use crate::templates::partials::navbar::navbar;
use maud::{html, Markup, PreEscaped, DOCTYPE};
/// A function that handles the html code for the header for all the pages in the search engine frontend.
///
/// # Arguments
///
/// * `colorscheme` - It takes the colorscheme name as an argument.
/// * `theme` - It takes the theme name as an argument.
///
/// # Returns
///
/// It returns the compiled html markup code for the header as a result.
pub fn header(colorscheme: &str, theme: &str) -> Markup {
html!(
(DOCTYPE)
html lang="en"
head{
title{"Websurfx"}
meta charset="UTF-8";
meta name="viewport" content="width=device-width, initial-scale=1";
link href=(format!("static/colorschemes/{colorscheme}.css")) rel="stylesheet" type="text/css";
link href=(format!("static/themes/{theme}.css")) rel="stylesheet" type="text/css";
}
(PreEscaped("<body onload=\"getClientSettings()\">"))
header{
h1{a href="/"{"Websurfx"}}
(navbar())
}
)
}

View File

@ -0,0 +1,8 @@
//! This module provides other modules to handle the partials for the views in the `websurfx` frontend.
pub mod bar;
pub mod footer;
pub mod header;
pub mod navbar;
pub mod search_bar;
pub mod settings_tabs;

View File

@ -0,0 +1,19 @@
//! A module that handles `navbar` partial for the header partial in the `websurfx` frontend.
use maud::{html, Markup};
/// A functions that handles the html code for the header partial.
///
/// # Returns
///
/// It returns the compiled html code for the navbar as a result.
pub fn navbar() -> Markup {
html!(
nav{
ul{
li{a href="about"{"about"}}
li{a href="settings"{"settings"}}
}
}
)
}

View File

@ -0,0 +1,76 @@
//! A module that handles `search bar` partial for the search page in the `websurfx` frontend.
use maud::{html, Markup, PreEscaped};
use crate::{models::aggregation_models::EngineErrorInfo, templates::partials::bar::bar};
/// A constant holding the named safe search level options for the corresponding values 0, 1 and 2.
const SAFE_SEARCH_LEVELS_NAME: [&str; 3] = ["None", "Low", "Moderate"];
/// A functions that handles the html code for the search bar for the search page.
///
/// # Arguments
///
/// * `engine_errors_info` - It takes the engine errors list containing errors for each upstream
/// search engine which failed to provide results as an argument.
/// * `safe_search_level` - It takes the safe search level with values from 0-2 as an argument.
/// * `query` - It takes the current search query provided by user as an argument.
///
/// # Returns
///
/// It returns the compiled html code for the search bar as a result.
pub fn search_bar(
engine_errors_info: &[EngineErrorInfo],
safe_search_level: u8,
query: &str,
) -> Markup {
html!(
.search_area{
(bar(query))
.error_box {
@if !engine_errors_info.is_empty(){
button onclick="toggleErrorBox()" class="error_box_toggle_button"{
img src="./images/warning.svg" alt="Info icon for error box";
}
.dropdown_error_box{
@for errors in engine_errors_info{
.error_item{
span class="engine_name"{(errors.engine)}
span class="engine_name"{(errors.error)}
span class="severity_color" style="background: {{{this.severity_color}}};"{}
}
}
}
}
@else {
button onclick="toggleErrorBox()" class="error_box_toggle_button"{
img src="./images/info.svg" alt="Warning icon for error box";
}
.dropdown_error_box {
.no_errors{
"Everything looks good 🙂!!"
}
}
}
}
(PreEscaped("</div>"))
.search_options {
@if safe_search_level >= 3 {
(PreEscaped("<select name=\"safe_search_levels\" disabled>"))
}
@else{
(PreEscaped("<select name=\"safe_search_levels\">"))
}
@for (idx, name) in SAFE_SEARCH_LEVELS_NAME.iter().enumerate() {
@if (safe_search_level as usize) == idx {
option value=(idx) selected {(format!("SafeSearch: {name}"))}
}
@else{
option value=(idx) {(format!("SafeSearch: {name}"))}
}
}
(PreEscaped("</select>"))
}
}
)
}

View File

@ -0,0 +1,25 @@
//! A module that handles the engines tab for setting page view in the `websurfx` frontend.
use maud::{html, Markup};
/// A functions that handles the html code for the cookies tab for the settings page for the search page.
///
/// # Returns
///
/// It returns the compiled html markup code for the cookies tab.
pub fn cookies() -> Markup {
html!(
div class="cookies tab"{
h1{"Cookies"}
p class="description"{
"This is the cookies are saved on your system and it contains the preferences
you chose in the settings page"
}
input type="text" name="cookie_field" value="" readonly;
p class="description"{
"The cookies stored are not used by us for any malicious intend or for
tracking you in any way."
}
}
)
}

View File

@ -0,0 +1,43 @@
//! A module that handles the engines tab for setting page view in the `websurfx` frontend.
use maud::{html, Markup};
/// A functions that handles the html code for the engines tab for the settings page for the search page.
///
/// # Arguments
///
/// * `engine_names` - It takes the list of all available engine names as an argument.
///
/// # Returns
///
/// It returns the compiled html markup code for the engines tab.
pub fn engines(engine_names: &[&String]) -> Markup {
html!(
div class="engines tab"{
h1{"Engines"}
h3{"select search engines"}
p class="description"{
"Select the search engines from the list of engines that you want results from"
}
.engine_selection{
.toggle_btn{
label class="switch"{
input type="checkbox" class="select_all" onchange="toggleAllSelection()";
span class="slider round"{}
}
"Select All"
}
hr;
@for engine_name in engine_names{
.toggle_btn{
label class="switch"{
input type="checkbox" class="engine";
span class="slider round"{}
}
(format!("{}{}",engine_name[..1].to_uppercase().to_owned(), engine_name[1..].to_owned()))
}
}
}
}
)
}

View File

@ -0,0 +1,28 @@
//! A module that handles the general tab for setting page view in the `websurfx` frontend.
use maud::{html, Markup};
/// A constant holding the named safe search level options for the corresponding values 0, 1 and 2.
const SAFE_SEARCH_LEVELS: [(u8, &str); 3] = [(0, "None"), (1, "Low"), (2, "Moderate")];
/// A functions that handles the html code for the general tab for the settings page for the search page.
///
/// # Returns
///
/// It returns the compiled html markup code for the general tab.
pub fn general() -> Markup {
html!(
div class="general tab active"{
h1{"General"}
h3{"Select a safe search level"}
p class="description"{
"Select a safe search level from the menu below to filter content based on the level."
}
select name="safe_search_levels"{
@for (k,v) in SAFE_SEARCH_LEVELS{
option value=(k){(v)}
}
}
}
)
}

View File

@ -0,0 +1,7 @@
//! This module provides other modules to handle the partials for the tabs for the settings page
//! view in the `websurfx` frontend.
pub mod cookies;
pub mod engines;
pub mod general;
pub mod user_interface;

View File

@ -0,0 +1,65 @@
//! A module that handles the user interface tab for setting page view in the `websurfx` frontend.
use crate::handler::paths::{file_path, FileType};
use maud::{html, Markup};
use std::fs::read_dir;
/// A helper function that helps in building the list of all available colorscheme/theme names
/// present in the colorschemes and themes folder respectively.
///
/// # Arguments
///
/// * `style_type` - It takes the style type of the values `theme` and `colorscheme` as an
/// argument.
///
/// # Error
///
/// Returns a list of colorscheme/theme names as a vector of tuple strings on success otherwise
/// returns a standard error message.
fn style_option_list(
style_type: &str,
) -> Result<Vec<(String, String)>, Box<dyn std::error::Error + '_>> {
let mut style_option_names: Vec<(String, String)> = Vec::new();
for file in read_dir(format!(
"{}static/{}/",
file_path(FileType::Theme)?,
style_type,
))? {
let style_name = file?.file_name().to_str().unwrap().replace(".css", "");
style_option_names.push((style_name.clone(), style_name.replace('-', " ")));
}
Ok(style_option_names)
}
/// A functions that handles the html code for the user interface tab for the settings page for the search page.
///
/// # Error
///
/// It returns the compiled html markup code for the user interface tab on success otherwise
/// returns a standard error message.
pub fn user_interface() -> Result<Markup, Box<dyn std::error::Error>> {
Ok(html!(
div class="user_interface tab"{
h1{"User Interface"}
h3{"select theme"}
p class="description"{
"Select the theme from the available themes to be used in user interface"
}
select name="themes"{
@for (k,v) in style_option_list("themes")?{
option value=(k){(v)}
}
}
h3{"select color scheme"}
p class="description"{
"Select the color scheme for your theme to be used in user interface"
}
select name="colorschemes"{
@for (k,v) in style_option_list("colorschemes")?{
option value=(k){(v)}
}
}
}
))
}

View File

@ -0,0 +1,48 @@
//! A module that handles the view for the about page in the `websurfx` frontend.
use maud::{html, Markup};
use crate::templates::partials::{footer::footer, header::header};
/// A function that handles the html code for the about page view in the search engine frontend.
///
/// # Arguments
///
/// * `colorscheme` - It takes the colorscheme name as an argument.
/// * `theme` - It takes the theme name as an argument.
///
/// # Returns
///
/// It returns the compiled html markup code as a result.
pub fn about(colorscheme: &str, theme: &str) -> Markup {
html!(
(header(colorscheme, theme))
main class="about-container"{
article {
div{
h1{"Websurfx"}
hr size="4" width="100%" color="#a6e3a1"{}
}
p{"A modern-looking, lightning-fast, privacy-respecting, secure meta search engine written in Rust. It provides a fast and secure search experience while respecting user privacy."br{}" It aggregates results from multiple search engines and presents them in an unbiased manner, filtering out trackers and ads."
}
h2{"Some of the Top Features:"}
ul{strong{"Lightning fast "}"- Results load within milliseconds for an instant search experience."}
ul{strong{"Secure search"}" - All searches are performed over an encrypted connection to prevent snooping."}
ul{strong{"Ad free results"}" - All search results are ad free and clutter free for a clean search experience."}
ul{strong{"Privacy focused"}" - Websurfx does not track, store or sell your search data. Your privacy is our priority."}
ul{strong{"Free and Open source"}" - The entire project's code is open source and available for free on "{a href="https://github.com/neon-mmd/websurfx"{"GitHub"}}" under an GNU Affero General Public License."}
ul{strong{"Highly customizable"}" - Websurfx comes with 9 built-in color themes and supports creating custom themes effortlessly."}
}
h3{"Devoloped by: "{a href="https://github.com/neon-mmd/websurfx"{"Websurfx team"}}}
}
(footer())
)
}

View File

@ -0,0 +1,28 @@
//! A module that handles the view for the index/home/main page in the `websurfx` frontend.
use maud::{html, Markup, PreEscaped};
use crate::templates::partials::{bar::bar, footer::footer, header::header};
/// A function that handles the html code for the index/html/main page view in the search engine frontend.
///
/// # Arguments
///
/// * `colorscheme` - It takes the colorscheme name as an argument.
/// * `theme` - It takes the theme name as an argument.
///
/// # Returns
///
/// It returns the compiled html markup code as a result.
pub fn index(colorscheme: &str, theme: &str) -> Markup {
html!(
(header(colorscheme, theme))
main class="search-container"{
img src="../images/websurfx_logo.png" alt="Websurfx meta-search engine logo";
(bar(&String::default()))
(PreEscaped("</div>"))
}
script src="static/index.js"{}
(footer())
)
}

View File

@ -0,0 +1,8 @@
//! This module provides other modules to handle view for each individual page in the
//! `websurfx` frontend.
pub mod about;
pub mod index;
pub mod not_found;
pub mod search;
pub mod settings;

View File

@ -0,0 +1,29 @@
//! A module that handles the view for the 404 page in the `websurfx` frontend.
use crate::templates::partials::{footer::footer, header::header};
use maud::{html, Markup};
/// A function that handles the html code for the 404 page view in the search engine frontend.
///
/// # Arguments
///
/// * `colorscheme` - It takes the colorscheme name as an argument.
/// * `theme` - It takes the theme name as an argument.
///
/// # Returns
///
/// It returns the compiled html markup code as a result.
pub fn not_found(colorscheme: &str, theme: &str) -> Markup {
html!(
(header(colorscheme, theme))
main class="error_container"{
img src="images/robot-404.svg" alt="Image of broken robot.";
.error_content{
h1{"Aw! snap"}
h2{"404 Page Not Found!"}
p{"Go to "{a href="/"{"search page"}}}
}
}
(footer())
)
}

View File

@ -0,0 +1,122 @@
//! A module that handles the view for the search page in the `websurfx` frontend.
use maud::{html, Markup, PreEscaped};
use crate::{
models::aggregation_models::SearchResults,
templates::partials::{footer::footer, header::header, search_bar::search_bar},
};
/// A function that handles the html code for the search page view in the search engine frontend.
///
/// # Arguments
///
/// * `colorscheme` - It takes the colorscheme name as an argument.
/// * `theme` - It takes the theme name as an argument.
/// * `query` - It takes the current search query provided by the user as an argument.
/// * `search_results` - It takes the aggregated search results as an argument.
///
/// # Returns
///
/// It returns the compiled html markup code as a result.
pub fn search(
colorscheme: &str,
theme: &str,
query: &str,
search_results: &SearchResults,
) -> Markup {
html!(
(header(colorscheme, theme))
main class="results"{
(search_bar(&search_results.engine_errors_info, search_results.safe_search_level, query))
.results_aggregated{
@if !search_results.results.is_empty() {
@for result in search_results.results.iter(){
.result {
h1{a href=(result.url){(PreEscaped(&result.title))}}
small{(result.url)}
p{(PreEscaped(&result.description))}
.upstream_engines{
@for name in result.clone().engine{
span{(name)}
}
}
}
}
}
@else if search_results.disallowed{
.result_disallowed{
.description{
p{
"Your search - "{span class="user_query"{(query)}}" -
has been disallowed."
}
p class="description_paragraph"{"Dear user,"}
p class="description_paragraph"{
"The query - "{span class="user_query"{(query)}}" - has
been blacklisted via server configuration and hence disallowed by the
server. Henceforth no results could be displayed for your query."
}
}
img src="./images/barricade.png" alt="Image of a Barricade";
}
}
@else if search_results.filtered {
.result_filtered{
.description{
p{
"Your search - "{span class="user_query"{(query)}}" -
has been filtered."
}
p class="description_paragraph"{"Dear user,"}
p class="description_paragraph"{
"All the search results contain results that has been configured to be
filtered out via server configuration and henceforth has been
completely filtered out."
}
}
img src="./images/filter.png" alt="Image of a paper inside a funnel";
}
}
@else if search_results.no_engines_selected {
.result_engine_not_selected{
.description{
p{
"No results could be fetched for your search '{span class="user_query"{(query)}}'."
}
p class="description_paragraph"{"Dear user,"}
p class="description_paragraph"{
"No results could be retrieved from the upstream search engines as no
upstream search engines were selected from the settings page."
}
}
img src="./images/no_selection.png" alt="Image of a white cross inside a red circle";
}
}
@else{
.result_not_found {
p{"Your search - "{(query)}" - did not match any documents."}
p class="suggestions"{"Suggestions:"}
ul{
li{"Make sure that all words are spelled correctly."}
li{"Try different keywords."}
li{"Try more general keywords."}
}
img src="./images/no_results.gif" alt="Man fishing gif";
}
}
}
.page_navigation {
button type="button" onclick="navigate_backward()"{
(PreEscaped("&#8592;")) "previous"
}
button type="button" onclick="navigate_forward()"{"next" (PreEscaped("&#8594;"))}
}
}
script src="static/index.js"{}
script src="static/search_area_options.js"{}
script src="static/pagination.js"{}
script src="static/error_box.js"{}
(footer())
)
}

View File

@ -0,0 +1,56 @@
//! A module that handles the view for the settings page in the `websurfx` frontend.
use maud::{html, Markup};
use crate::templates::partials::{
footer::footer,
header::header,
settings_tabs::{
cookies::cookies, engines::engines, general::general, user_interface::user_interface,
},
};
/// A function that handles the html code for the settings page view in the search engine frontend.
///
/// # Arguments
///
/// * `colorscheme` - It takes the colorscheme name as an argument.
/// * `theme` - It takes the theme name as an argument.
/// * `engine_names` - It takes a list of engine names as an argument.
///
/// # Error
///
/// This function returns a compiled html markup code on success otherwise returns a standard error
/// message.
pub fn settings(
colorscheme: &str,
theme: &str,
engine_names: &[&String],
) -> Result<Markup, Box<dyn std::error::Error>> {
Ok(html!(
(header(colorscheme, theme))
main class="settings"{
h1{"Settings"}
hr;
.settings_container{
.sidebar{
div class="btn active" onclick="setActiveTab(this)"{"general"}
.btn onclick="setActiveTab(this)"{"user interface"}
.btn onclick="setActiveTab(this)"{"engines"}
.btn onclick="setActiveTab(this)"{"cookies"}
}
.main_container{
(general())
(user_interface()?)
(engines(engine_names))
(cookies())
p class="message"{}
button type="submit" onclick="setClientSettings()"{"Save"}
}
}
}
script src="static/settings.js"{}
script src="static/cookies.js"{}
(footer())
))
}

View File

@ -1,7 +1,6 @@
use std::net::TcpListener;
use handlebars::Handlebars;
use websurfx::{config::parser::Config, run};
use websurfx::{config::parser::Config, run, templates::views};
// Starts a new instance of the HTTP server, bound to a random available port
fn spawn_app() -> String {
@ -21,18 +20,6 @@ fn spawn_app() -> String {
format!("http://127.0.0.1:{}/", port)
}
// Creates a new instance of Handlebars and registers the templates directory.
// This is used to compare the rendered template with the response body.
fn handlebars() -> Handlebars<'static> {
let mut handlebars = Handlebars::new();
handlebars
.register_templates_directory(".html", "./public/templates")
.unwrap();
handlebars
}
#[tokio::test]
async fn test_index() {
let address = spawn_app();
@ -41,9 +28,8 @@ async fn test_index() {
let res = client.get(address).send().await.unwrap();
assert_eq!(res.status(), 200);
let handlebars = handlebars();
let config = Config::parse(true).unwrap();
let template = handlebars.render("index", &config.style).unwrap();
let template = views::index::index(&config.style.colorscheme, &config.style.theme).0;
assert_eq!(res.text().await.unwrap(), template);
}