0
0
mirror of https://github.com/neon-mmd/websurfx.git synced 2024-11-21 21:48:21 -05:00

Merge pull request #284 from neon-mmd/docs-revision

📝 Revise the `docs` to remain in sync with the current changes
This commit is contained in:
alamin655 2023-10-01 08:43:51 +05:30 committed by GitHub
commit 2824d9e4d1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 313 additions and 134 deletions

76
Cargo.lock generated
View File

@ -290,9 +290,9 @@ checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299"
[[package]] [[package]]
name = "anstyle" name = "anstyle"
version = "1.0.3" version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b84bf0a05bbb2a83e5eb6fa36bb6e87baa08193c35ff52bbf6b38d8af2890e46" checksum = "7079075b41f533b8c61d2a4d073c4676e1f8b249ff94a393b0595db304e0dd87"
[[package]] [[package]]
name = "anyhow" name = "anyhow"
@ -412,9 +412,9 @@ dependencies = [
[[package]] [[package]]
name = "brotli" name = "brotli"
version = "3.3.4" version = "3.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1a0b1dbcc8ae29329621f8d4f0d835787c1c38bb1401979b49d13b0b305ff68" checksum = "516074a47ef4bce09577a3b379392300159ce5b1ba2e501ff1c819950066100f"
dependencies = [ dependencies = [
"alloc-no-stdlib", "alloc-no-stdlib",
"alloc-stdlib", "alloc-stdlib",
@ -423,9 +423,9 @@ dependencies = [
[[package]] [[package]]
name = "brotli-decompressor" name = "brotli-decompressor"
version = "2.3.4" version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4b6561fd3f895a11e8f72af2cb7d22e08366bebc2b6b57f7744c4bda27034744" checksum = "da74e2b81409b1b743f8f0c62cc6254afefb8b8e50bbfe3735550f7aeefa3448"
dependencies = [ dependencies = [
"alloc-no-stdlib", "alloc-no-stdlib",
"alloc-stdlib", "alloc-stdlib",
@ -581,18 +581,18 @@ dependencies = [
[[package]] [[package]]
name = "clap" name = "clap"
version = "4.4.4" version = "4.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b1d7b8d5ec32af0fadc644bf1fd509a688c2103b185644bb1e29d164e0703136" checksum = "d04704f56c2cde07f43e8e2c154b43f216dc5c92fc98ada720177362f953b956"
dependencies = [ dependencies = [
"clap_builder", "clap_builder",
] ]
[[package]] [[package]]
name = "clap_builder" name = "clap_builder"
version = "4.4.4" version = "4.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5179bb514e4d7c2051749d8fcefa2ed6d06a9f4e6d69faf3805f5d80b8cf8d56" checksum = "0e231faeaca65ebd1ea3c737966bf858971cd38c3849107aa3ea7de90a804e45"
dependencies = [ dependencies = [
"anstyle", "anstyle",
"clap_lex", "clap_lex",
@ -873,7 +873,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856" checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856"
dependencies = [ dependencies = [
"cfg-if 1.0.0", "cfg-if 1.0.0",
"hashbrown 0.14.0", "hashbrown 0.14.1",
"lock_api 0.4.10", "lock_api 0.4.10",
"once_cell", "once_cell",
"parking_lot_core 0.9.8", "parking_lot_core 0.9.8",
@ -1064,9 +1064,9 @@ dependencies = [
[[package]] [[package]]
name = "fastrand" name = "fastrand"
version = "2.0.0" version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6999dc1837253364c2ebb0704ba97994bd874e8f195d665c50b7548f6ea92764" checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5"
[[package]] [[package]]
name = "flate2" name = "flate2"
@ -1392,9 +1392,9 @@ checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
[[package]] [[package]]
name = "hashbrown" name = "hashbrown"
version = "0.14.0" version = "0.14.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a" checksum = "7dfda62a12f55daeae5015f81b0baea145391cb4520f86c248fc615d72640d12"
[[package]] [[package]]
name = "hermit-abi" name = "hermit-abi"
@ -1735,9 +1735,9 @@ dependencies = [
[[package]] [[package]]
name = "linux-raw-sys" name = "linux-raw-sys"
version = "0.4.7" version = "0.4.8"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1a9bad9f94746442c783ca431b22403b519cd7fbeed0533fdd6328b2f2212128" checksum = "3852614a3bd9ca9804678ba6be5e3b8ce76dfc902cae004e3e0c44051b6e88db"
[[package]] [[package]]
name = "local-channel" name = "local-channel"
@ -2212,9 +2212,9 @@ checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94"
[[package]] [[package]]
name = "pest" name = "pest"
version = "2.7.3" version = "2.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d7a4d085fd991ac8d5b05a147b437791b4260b76326baf0fc60cf7c9c27ecd33" checksum = "c022f1e7b65d6a24c0dbbd5fb344c66881bc01f3e5ae74a1c8100f2f985d98a4"
dependencies = [ dependencies = [
"memchr", "memchr",
"thiserror", "thiserror",
@ -2223,9 +2223,9 @@ dependencies = [
[[package]] [[package]]
name = "pest_derive" name = "pest_derive"
version = "2.7.3" version = "2.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a2bee7be22ce7918f641a33f08e3f43388c7656772244e2bbb2477f44cc9021a" checksum = "35513f630d46400a977c4cb58f78e1bfbe01434316e60c37d27b9ad6139c66d8"
dependencies = [ dependencies = [
"pest", "pest",
"pest_generator", "pest_generator",
@ -2233,9 +2233,9 @@ dependencies = [
[[package]] [[package]]
name = "pest_generator" name = "pest_generator"
version = "2.7.3" version = "2.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d1511785c5e98d79a05e8a6bc34b4ac2168a0e3e92161862030ad84daa223141" checksum = "bc9fc1b9e7057baba189b5c626e2d6f40681ae5b6eb064dc7c7834101ec8123a"
dependencies = [ dependencies = [
"pest", "pest",
"pest_meta", "pest_meta",
@ -2246,9 +2246,9 @@ dependencies = [
[[package]] [[package]]
name = "pest_meta" name = "pest_meta"
version = "2.7.3" version = "2.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b42f0394d3123e33353ca5e1e89092e533d2cc490389f2bd6131c43c634ebc5f" checksum = "1df74e9e7ec4053ceb980e7c0c8bd3594e977fde1af91daba9c928e8e8c6708d"
dependencies = [ dependencies = [
"once_cell", "once_cell",
"pest", "pest",
@ -2739,9 +2739,9 @@ dependencies = [
[[package]] [[package]]
name = "regex" name = "regex"
version = "1.9.5" version = "1.9.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "697061221ea1b4a94a624f67d0ae2bfe4e22b8a17b6a192afb11046542cc8c47" checksum = "ebee201405406dbf528b8b672104ae6d6d63e6d118cb10e4d51abbc7b58044ff"
dependencies = [ dependencies = [
"aho-corasick", "aho-corasick",
"memchr", "memchr",
@ -2751,9 +2751,9 @@ dependencies = [
[[package]] [[package]]
name = "regex-automata" name = "regex-automata"
version = "0.3.8" version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c2f401f4955220693b56f8ec66ee9c78abffd8d1c4f23dc41a23839eb88f0795" checksum = "59b23e92ee4318893fa3fe3e6fb365258efbfe6ac6ab30f090cdcbb7aa37efa9"
dependencies = [ dependencies = [
"aho-corasick", "aho-corasick",
"memchr", "memchr",
@ -2869,9 +2869,9 @@ dependencies = [
[[package]] [[package]]
name = "rustix" name = "rustix"
version = "0.38.14" version = "0.38.15"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "747c788e9ce8e92b12cd485c49ddf90723550b654b32508f979b71a7b1ecda4f" checksum = "d2f9da0cbd88f9f09e7814e388301c8414c51c62aa6ce1e4b5c551d49d96e531"
dependencies = [ dependencies = [
"bitflags 2.4.0", "bitflags 2.4.0",
"errno", "errno",
@ -3098,9 +3098,9 @@ checksum = "ae1a47186c03a32177042e55dbc5fd5aee900b8e0069a8d70fba96a9375cd012"
[[package]] [[package]]
name = "sha2" name = "sha2"
version = "0.10.7" version = "0.10.8"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "479fb9d862239e610720565ca91403019f2f00410f1864c5aa7479b950a76ed8" checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8"
dependencies = [ dependencies = [
"cfg-if 1.0.0", "cfg-if 1.0.0",
"cpufeatures", "cpufeatures",
@ -3361,18 +3361,18 @@ dependencies = [
[[package]] [[package]]
name = "thiserror" name = "thiserror"
version = "1.0.48" version = "1.0.49"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d6d7a740b8a666a7e828dd00da9c0dc290dff53154ea77ac109281de90589b7" checksum = "1177e8c6d7ede7afde3585fd2513e611227efd6481bd78d2e82ba1ce16557ed4"
dependencies = [ dependencies = [
"thiserror-impl", "thiserror-impl",
] ]
[[package]] [[package]]
name = "thiserror-impl" name = "thiserror-impl"
version = "1.0.48" version = "1.0.49"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49922ecae66cc8a249b77e68d1d0623c1b2c514f0060c27cdc68bd62a1219d35" checksum = "10712f02019e9288794769fba95cd6847df9874d49d871d062172f9dd41bc4cc"
dependencies = [ dependencies = [
"proc-macro2 1.0.67", "proc-macro2 1.0.67",
"quote 1.0.33", "quote 1.0.33",
@ -3932,7 +3932,7 @@ dependencies = [
[[package]] [[package]]
name = "websurfx" name = "websurfx"
version = "0.23.5" version = "0.24.3"
dependencies = [ dependencies = [
"actix-cors", "actix-cors",
"actix-files", "actix-files",

View File

@ -1,6 +1,6 @@
[package] [package]
name = "websurfx" name = "websurfx"
version = "0.23.6" version = "0.24.3"
edition = "2021" edition = "2021"
description = "An open-source alternative to Searx that provides clean, ad-free, and organic results with incredible speed while keeping privacy and security in mind." description = "An open-source alternative to Searx that provides clean, ad-free, and organic results with incredible speed while keeping privacy and security in mind."
repository = "https://github.com/neon-mmd/websurfx" repository = "https://github.com/neon-mmd/websurfx"

View File

@ -9,7 +9,7 @@
- [Instances](./instances.md) - [Instances](./instances.md)
- [Installation](./installation.md) - [Installation](./installation.md)
- [Building](./building.md) - [Features](./features.md)
- [Configuration](./configuration.md) - [Configuration](./configuration.md)
- [Theming](./theming.md) - [Theming](./theming.md)

View File

@ -1,59 +0,0 @@
# Build Options
The project provides 4 caching options as conditionally compiled features. This helps reduce the size of the compiled app by only including the code that is necessary for a particular caching option.
The different caching features provided are as follows:
- No cache
- Redis cache
- In memory cache
- Hybrid cache
## No Cache
This feature disables caching for the search engine. This option can drastically reduce binary size but with the cost that subsequent search requests and previous & next page search results are not cached which can make navigating between pages slower. As well as page refreshes of the same page also becomes slower as each refresh has to fetch the results from the upstream search engines.
To build the app with this option run the following command:
``` shell
cargo build -r --no-default-features
```
Once you have build the app with this option follow the commands listed on the [**Installation**](./installation.md#install-from-source) page of the docs to run the app.
## Redis Cache
This feature enables `Redis` caching ability for the search engine. This option allows the search engine to cache the results on the redis server. This feature can be useful for having a dedicated cache server for multiple devices hosted with the `Websurfx` server which can use the one dedicated cache server for hosting their cache on it. But a disadvantage of this solution is that if the `Redis`server is located far away (for example provided by a vps as service) and if it is unavailable or down for some reason then the `Websurfx` server would not be able to function properly or will crash on startup.
To build the app with this option run the following command:
``` shell
cargo build -r --no-default-features --features redis-cache
```
Once you have build the app with this option follow the commands listed on the [**Installation**](./installation.md#install-from-source) page of the docs to run the app.
## In Memory Cache
This feature enables `In Memory` caching soluion within the search engine and it is the default feature provided by the project. This option allows the search engine to cache the results in the memory which can help increase the speed of the fetched cache results and it also has an advantage that it is extremely reliable as all the results are stored in memory within the search engine. Though the disadvantage of this solution are that caching of results is slightly slower than the `redis-cache` solution, it requires a good amount of memory on the system and as such is not ideal for very low memory devices and is highly unscalable.
To build the app with this option run the following command:
``` shell
cargo build -r
```
Once you have build the app with this option follow the commands listed on the [**Installation**](./installation.md#install-from-source) page of the docs to run the app.
## Hybrid Cache
This feature enables the `Hybrid` caching solution for the search engine which provides the advantages of both `In Memory` caching and `Redis` caching and it is an ideal solution if you need a very resiliant and reliable solution for the `Websurfx` which can provide both speed and reliability. Like for example if the `Redis` server becomes unavailable then the search engine switches to `In Memory` caching until the server becomes available again. This solution can be useful for hosting `Websurfx` instance which will be used by hundreds or thousands of users over the world.
To build the app with this option run the following command:
``` shell
cargo build -r --features redis-cache
```
Once you have build the app with this option follow the commands listed on the [**Installation**](./installation.md#install-from-source) page of the docs to run the app.
[⬅️ Go back to Home](./README.md)

42
docs/features.md Normal file
View File

@ -0,0 +1,42 @@
# Features
The project provides 4 caching options as conditionally compiled features. This helps reduce the size of the compiled app by only including the code that is necessary for a particular caching option.
The different caching features provided are as follows:
- No cache
- Redis cache
- In memory cache
- Hybrid cache
## Explaination
### No Cache
This feature can drastically reduce binary size but with the cost that subsequent search requests and previous & next page search results are not cached which can make navigating between pages slower. As well as page refreshes of the same page also becomes slower as each refresh has to fetch the results from the upstream search engines.
### Redis Cache
This feature allows the search engine to cache the results on the redis server. This feature can be useful for having a dedicated cache server for multiple devices hosted with the `Websurfx` server which can use the one dedicated cache server for hosting their cache on it. But a disadvantage of this solution is that if the `Redis`server is located far away (for example provided by a vps as service) and if it is unavailable or down for some reason then the `Websurfx` server would not be able to function properly or will crash on startup.
### In Memory Cache
This feature is the default feature provided by the project. This feature allows the search engine to cache the results in the memory which can help increase the speed of the fetched cache results and it also has an advantage that it is extremely reliable as all the results are stored in memory within the search engine. Though the disadvantage of this solution are that caching of results is slightly slower than the `redis-cache` solution, it requires a good amount of memory on the system and as such is not ideal for very low memory devices and is highly unscalable.
### Hybrid Cache
This feature provides the advantages of both `In Memory` caching and `Redis` caching and it is an ideal solution if you need a very resiliant and reliable solution for the `Websurfx` which can provide both speed and reliability. Like for example if the `Redis` server becomes unavailable then the search engine switches to `In Memory` caching until the server becomes available again. This solution can be useful for hosting `Websurfx` instance which will be used by hundreds or thousands of users over the world.
## Tabular Summary
| **Attributes** | **Hybrid** | **In-Memory** | **No Cache** | **Redis** |
|-----------------------------------------|------------|------------------------------------------------------|-----------------|------------------------|
| **Speed** | Fast | Caching is slow, but retrieval of cache data is fast | Slow | Fastest |
| **Reliability** | ✅ | ✅ | ✅ | ❌ |
| **Scalability** | ✅ | ❌ | - | ✅ |
| **Resiliancy** | ✅ | ✅ | ✅ | ❌ |
| **Production/Large Scale/Instance use** | ✅ | Not Recommended | Not Recommended | Not Recommended |
| **Low Memory Support** | ❌ | ❌ | ✅ | ❌ |
| **Binary Size** | Big | Bigger than `No Cache` | small | Bigger than `No Cache` |
[⬅️ Go back to Home](./README.md)

View File

@ -2,16 +2,17 @@
## Arch Linux ## Arch Linux
You can install `Websurfx` through the [Aur](https://aur.archlinux.org/packages/websurfx-git), Currently we only support `Rolling/Edge` version. You can install the rolling/edge version by running the following command (using [paru](https://github.com/Morganamilo/paru)): ### Rolling/Edge/Unstable
```bash You can install `Websurfx` through the [Aur](https://aur.archlinux.org/packages/websurfx-git), By running the following command (using [paru](https://github.com/Morganamilo/paru)):
```shell
paru -S websurfx-edge-git paru -S websurfx-edge-git
``` ```
After installing it you can run the websurfx server by running the following commands: After installing it you can run the websurfx server by running the following commands:
```bash ```shell
redis-server --port 8082 &
websurfx websurfx
``` ```
@ -19,40 +20,78 @@ Once you have started the server, open your preferred web browser and navigate t
If you want to change the port or the ip or any other configuration setting checkout the [configuration docs](./configuration.md). If you want to change the port or the ip or any other configuration setting checkout the [configuration docs](./configuration.md).
### Stable
For the stable version, follow the same steps as above (as mentioned for the `unstable/rolling/edge` version) with the only difference being that the package to be installed for stable version is called `websurfx-git` instead of `websurfx-edge-git`.
## NixOS ## NixOS
A `flake.nix` has been provided to allow installing `websurfx` easily. It utilizes [nearsk](https://github.com/nix-community/naersk) to automatically generate a derivation based on `Cargo.toml` and `Cargo.lock`. A `flake.nix` has been provided to allow installing `websurfx` easily. It utilizes [nearsk](https://github.com/nix-community/naersk) to automatically generate a derivation based on `Cargo.toml` and `Cargo.lock`.
The flake has several outputs, which may be consumed: The Websurfx project provides 2 versions/flavours for the flake `stable` and `rolling/unstable/edge`. The steps for each are covered below in different sections.
```bash ### Rolling/Edge/Unstable
To get started, First clone the repository, edit the config file which is located in the `websurfx` directory and then build and run the websurfx server by running the following commands:
```shell
git clone https://github.com/neon-mmd/websurfx.git
cd websurfx
cp -rf ./websurfx/ ~/.config/
$ mkdir /opt/websurfx/
$ cp -rf ./public/ /opt/websurfx/
nix build .#websurfx nix build .#websurfx
nix run .#websurfx nix run .#websurfx
``` ```
You may include it in your own flake by adding this repo to its inputs and adding it to `environment.systemPackages` as follows: > **Note**
> In the above command the dollar sign(**$**) refers to running the command in privilaged mode by using utilities `sudo`, `doas`, `pkgexec` or any other privilage access methods.
```nix Once you have run the above set of commands, then open your preferred web browser and navigate to http://127.0.0.1:8080/ to start using Websurfx.
{
description = "My awesome configuration";
inputs = { If you want to change the port or the ip or any other configuration setting checkout the [configuration docs](./configuration.md).
websurfx.url = "github:neon-mmd/websurfx";
};
outputs = { nixpkgs, ... }@inputs: { > Optionally, you may include it in your own flake by adding this repo to its inputs and adding it to `environment.systemPackages` as follows:
nixosConfigurations = { >
hostname = nixpkgs.lib.nixosSystem { > ```nix
system = "x86_64-linux"; > {
modules = [{ > description = "My awesome configuration";
environment.systemPackages = [inputs.websurfx.packages.x86_64-linux.websurfx]; >
}]; > inputs = {
}; > websurfx.url = "github:neon-mmd/websurfx";
}; > };
}; >
} > outputs = { nixpkgs, ... }@inputs: {
> nixosConfigurations = {
> hostname = nixpkgs.lib.nixosSystem {
> system = "x86_64-linux";
> modules = [{
> environment.systemPackages = [inputs.websurfx.packages.x86_64-linux.websurfx];
> }];
> };
> };
> };
> }
> ```
### Stable
For the stable version, follow the same steps as above (as mentioned for the `unstable/rolling/edge version`) with an addition of one command which has to be performed after cloning and changing directory into the repository which makes the building step as follows:
```shell
git clone https://github.com/neon-mmd/websurfx.git
cd websurfx
git checkout stable
cp -rf ./websurfx/ ~/.config/
$ mkdir /opt/websurfx/
$ cp -rf ./public/ /opt/websurfx/
nix build .#websurfx
nix run .#websurfx
``` ```
> **Note**
> In the above command the dollar sign(**$**) refers to running the command in privilaged mode by using utilities `sudo`, `doas`, `pkgexec` or any other privilage access methods.
## Other Distros ## Other Distros
The package is currently not available on other Linux distros. With contribution and support it can be made available on other distros as well 🙂. The package is currently not available on other Linux distros. With contribution and support it can be made available on other distros as well 🙂.
@ -87,21 +126,61 @@ git clone https://github.com/neon-mmd/websurfx.git
cd websurfx cd websurfx
``` ```
Once you have changed the directory to the `websurfx` directory then follow the build options listed in the [building docs](./building.md). Once you have changed the directory to the `websurfx` directory then follow the build options listed below:
After that run the following command if you have build the app with the `redis-cache` feature: ### Hybrid Cache
``` shell > For more information on the features and their pros and cons. see: [**Features**](./features.md)
redis-server --port 8082 &
To build the search engine with the `Hybrid caching` feature. Run the following build command:
```shell
cargo build -r --features redis-cache
``` ```
After that run the following command to start the search engine: ### Memory Cache (Default Feature)
``` shell > For more information on the features and their pros and cons. see: [**Features**](./features.md)
To build the search engine with the `In-Memory caching` feature. Run the following build command:
```shell
cargo build -r
```
### No Cache
> For more information on the features and their pros and cons. see: [**Features**](./features.md)
To build the search engine with the `No caching` feature. Run the following build command:
```shell
cargo build -r --no-default-features
```
### Redis Cache
> For more information on the features and their pros and cons. see: [**Features**](./features.md)
To build the search engine with the `hybrid caching` feature. Run the following build command:
```shell
cargo build -r --no-default-features --features redis-cache
```
> Optionally, If you have build the app with the `Redis cache`or `Hybrid cache` feature (as mentioned above) then before launching the search engine run the following command:
>
> ```shell
> redis-server --port 8082 &
> ```
Once you have finished building the `search engine`. then run the following command to start the search engine:
```shell
./target/release/websurfx ./target/release/websurfx
``` ```
Once you have started the server, open your preferred web browser and navigate to http://127.0.0.1:8080/ to start using Websurfx. Once you have started the server, then launch your preferred web browser and navigate to http://127.0.0.1:8080/ to start using Websurfx.
If you want to change the port or the ip or any other configuration setting checkout the [configuration docs](./configuration.md). If you want to change the port or the ip or any other configuration setting checkout the [configuration docs](./configuration.md).
@ -109,7 +188,121 @@ If you want to change the port or the ip or any other configuration setting chec
Before you start, you will need [Docker](https://docs.docker.com/get-docker/) installed on your system first. Before you start, you will need [Docker](https://docs.docker.com/get-docker/) installed on your system first.
## Unstable/Edge/Rolling ## Prebuild
The Websurfx project provides several prebuild images based on the different features provided by the search engine. To get started using the prebuild image, you will first need to create a `docker-compose.yml` file with the following content:
```yaml
---
version: '3.9'
services:
app:
# Comment the line below if you don't want to use the `hybrid/latest` image.
image: neonmmd/websurfx:latest
# Uncomment the line below if you want to use the `no cache` image.
# image: neonmmd/websurfx:nocache
# Uncomment the line below if you want to use the `memory` image.
# image: neonmmd/websurfx:memory
# Uncomment the line below if you want to use the `redis` image.
# image: neonmmd/websurfx:redis
ports:
- 8080:8080
# Uncomment the following lines if you are using the `hybrid/latest` or `redis` image.
# depends_on:
# - redis
# links:
# - redis
volumes:
- ./websurfx/:/etc/xdg/websurfx/
# Uncomment the following lines if you are using the `hybrid/latest` or `redis` image.
# redis:
# image: redis:latest
# ports:
# - 6379:6379
```
Then make sure to edit the `docker-compose.yml` file as required. After that create a directory `websurfx` in the directory you have placed the `docker-compose.yml` file, and then in the new directory create two new empty files named `allowlist.txt` and `blocklist.txt`. Finally, create a new config file `config.lua` with the default configuration, which looks something like this:
```lua
-- ### General ###
logging = true -- an option to enable or disable logs.
debug = false -- an option to enable or disable debug mode.
threads = 8 -- the amount of threads that the app will use to run (the value should be greater than 0).
-- ### Server ###
port = "8080" -- port on which server should be launched
binding_ip = "0.0.0.0" --ip address on the which server should be launched.
production_use = false -- whether to use production mode or not (in other words this option should be used if it is to be used to host it on the server to provide a service to a large number of users (more than one))
-- if production_use is set to true
-- There will be a random delay before sending the request to the search engines, this is to prevent DDoSing the upstream search engines from a large number of simultaneous requests.
request_timeout = 30 -- timeout for the search requests sent to the upstream search engines to be fetched (value in seconds).
rate_limiter = {
number_of_requests = 20, -- The number of request that are allowed within a provided time limit.
time_limit = 3, -- The time limit in which the quantity of requests that should be accepted.
}
-- ### Search ###
-- Filter results based on different levels. The levels provided are:
-- {{
-- 0 - None
-- 1 - Low
-- 2 - Moderate
-- 3 - High
-- 4 - Aggressive
-- }}
safe_search = 2
-- ### Website ###
-- The different colorschemes provided are:
-- {{
-- catppuccin-mocha
-- dark-chocolate
-- dracula
-- gruvbox-dark
-- monokai
-- nord
-- oceanic-next
-- one-dark
-- solarized-dark
-- solarized-light
-- tokyo-night
-- tomorrow-night
-- }}
colorscheme = "catppuccin-mocha" -- the colorscheme name which should be used for the website theme
theme = "simple" -- the theme name which should be used for the website
-- ### Caching ###
redis_url = "redis://redis:6379" -- redis connection url address on which the client should connect on.
-- ### Search Engines ###
upstream_search_engines = {
DuckDuckGo = true,
Searx = false,
} -- select the upstream search engines from which the results should be fetched.
```
Then run the following command to deploy the search engine:
```shell
$ docker compose up -d
```
> **Note**
> In the above command the dollar sign(**$**) refers to running the command in privilaged mode by using utilities `sudo`, `doas`, `pkgexec` or any other privilage access methods.
Then launch the browser of your choice and navigate to http://<ip_address_of_the_device>:<whatever_port_you_provided_in_the_config>.
> **Note**
> The official prebuild images only support `stable` versions of the app and will not support `rolling/edge/unstable` versions. But with support and contribution it could be made available for these versions as well 🙂.
## Manual Deployment
This section covers how to deploy the app with docker manually by manually building the image and deploying it.
> **Note**
> This section is provided for those who want to futher customize the docker image or for those who are extra cautious about security.
### Unstable/Edge/Rolling
First clone the the repository by running the following command: First clone the the repository by running the following command:
@ -178,15 +371,18 @@ upstream_search_engines = {
} -- select the upstream search engines from which the results should be fetched. } -- select the upstream search engines from which the results should be fetched.
``` ```
After this run the following command to deploy the app: After this make sure to edit the `docker-compose.yml` and `Dockerfile` files as required and run the following command to deploy the app:
```bash ```bash
docker compose up -d --build $ docker compose up -d --build
``` ```
> **Note**
> In the above command the dollar sign(**$**) refers to running the command in privilaged mode by using utilities `sudo`, `doas`, `pkgexec` or any other privilage access methods.
This will take around 5-10 mins for first deployment, afterwards the docker build stages will be cached so it will be faster to be build from next time onwards. After the above step finishes launch your preferred browser and then navigate to `http://<ip_address_of_the_device>:<whatever_port_you_provided_in_the_config>`. This will take around 5-10 mins for first deployment, afterwards the docker build stages will be cached so it will be faster to be build from next time onwards. After the above step finishes launch your preferred browser and then navigate to `http://<ip_address_of_the_device>:<whatever_port_you_provided_in_the_config>`.
## Stable ### Stable
For the stable version, follow the same steps as above (as mentioned for the unstable/rolling/edge version) with an addition of one command which has to be performed after cloning and changing directory into the repository which makes the cloning step as follows: For the stable version, follow the same steps as above (as mentioned for the unstable/rolling/edge version) with an addition of one command which has to be performed after cloning and changing directory into the repository which makes the cloning step as follows: