diff --git a/.github/workflows/rust_format.yml b/.github/workflows/rust_format.yml
index d865c8c..c300863 100644
--- a/.github/workflows/rust_format.yml
+++ b/.github/workflows/rust_format.yml
@@ -19,7 +19,16 @@ jobs:
profile: minimal
toolchain: stable
components: rustfmt, clippy
-
+ - name: Format
+ uses: actions-rs/cargo@v1
+ with:
+ command: fmt
+ args: -- --check
+ - name: Clippy
+ uses: actions-rs/cargo@v1
+ with:
+ command: clippy
+ args: --all-features --all-targets --all
- name: Run cargo check
uses: actions-rs/cargo@v1
with:
diff --git a/public/images/websurfx_logo.png b/public/images/websurfx_logo.png
new file mode 100644
index 0000000..9449e33
Binary files /dev/null and b/public/images/websurfx_logo.png differ
diff --git a/public/templates/index.html b/public/templates/index.html
index 64021e9..5bb4d34 100644
--- a/public/templates/index.html
+++ b/public/templates/index.html
@@ -1,6 +1,6 @@
{{>header this}}
-
+
{{>search_bar}}
diff --git a/src/cache/mod.rs b/src/cache/mod.rs
index 91a91ca..de7dd4e 100644
--- a/src/cache/mod.rs
+++ b/src/cache/mod.rs
@@ -1 +1 @@
-pub mod cacher;
+pub mod cacher;
diff --git a/src/config_parser/parser.rs b/src/config_parser/parser.rs
index bbeba86..55d4bec 100644
--- a/src/config_parser/parser.rs
+++ b/src/config_parser/parser.rs
@@ -24,6 +24,14 @@ pub struct Config {
pub binding_ip_addr: String,
pub style: Style,
pub redis_connection_url: String,
+ pub aggregator: AggreatorConfig,
+}
+
+/// Configuration options for the aggregator.
+#[derive(Clone)]
+pub struct AggreatorConfig {
+ /// Whether to introduce a random delay before sending the request to the search engine.
+ pub random_delay: bool,
}
impl Config {
@@ -45,6 +53,15 @@ impl Config {
)?)
.exec()?;
+ let production_use = globals.get::<_, bool>("production_use")?;
+ let aggregator_config = if production_use {
+ AggreatorConfig { random_delay: true }
+ } else {
+ AggreatorConfig {
+ random_delay: false,
+ }
+ };
+
Ok(Config {
port: globals.get::<_, u16>("port")?,
binding_ip_addr: globals.get::<_, String>("binding_ip_addr")?,
@@ -53,6 +70,7 @@ impl Config {
globals.get::<_, String>("colorscheme")?,
),
redis_connection_url: globals.get::<_, String>("redis_connection_url")?,
+ aggregator: aggregator_config,
})
})
}
diff --git a/src/search_results_handler/aggregation_models.rs b/src/search_results_handler/aggregation_models.rs
index 4fe670e..b6e6b81 100644
--- a/src/search_results_handler/aggregation_models.rs
+++ b/src/search_results_handler/aggregation_models.rs
@@ -116,7 +116,7 @@ impl RawSearchResult {
}
}
-/// A named struct to store, serialize, deserialize the all the search results scraped and
+/// A named struct to store, serialize, deserialize the all the search results scraped and
/// aggregated from the upstream search engines.
///
/// # Fields
diff --git a/src/search_results_handler/aggregator.rs b/src/search_results_handler/aggregator.rs
index 5133094..8b86972 100644
--- a/src/search_results_handler/aggregator.rs
+++ b/src/search_results_handler/aggregator.rs
@@ -29,6 +29,7 @@ use crate::engines::{duckduckgo, searx};
///
/// * `query` - Accepts a string to query with the above upstream search engines.
/// * `page` - Accepts an u32 page number.
+/// * `random_delay` - Accepts a boolean value to add a random delay before making the request.
///
/// # Error
///
@@ -38,14 +39,17 @@ use crate::engines::{duckduckgo, searx};
pub async fn aggregate(
query: &str,
page: u32,
+ random_delay: bool,
) -> Result> {
let user_agent: String = random_user_agent();
let mut result_map: HashMap = HashMap::new();
// Add a random delay before making the request.
- let mut rng = rand::thread_rng();
- let delay_secs = rng.gen_range(1..10);
- std::thread::sleep(Duration::from_secs(delay_secs));
+ if random_delay {
+ let mut rng = rand::thread_rng();
+ let delay_secs = rng.gen_range(1..10);
+ std::thread::sleep(Duration::from_secs(delay_secs));
+ }
// fetch results from upstream search engines simultaneously/concurrently.
let (ddg_map_results, searx_map_results) = join!(
diff --git a/src/server/routes.rs b/src/server/routes.rs
index ed2299f..0f84cc9 100644
--- a/src/server/routes.rs
+++ b/src/server/routes.rs
@@ -81,10 +81,10 @@ pub async fn search(
.insert_header(("location", "/"))
.finish())
} else {
- let page_url: String; // Declare the page_url variable without initializing it
+ let page_url: String; // Declare the page_url variable without initializing it
// ...
-
+
let page = match params.page {
Some(page_number) => {
if page_number <= 1 {
@@ -98,7 +98,7 @@ pub async fn search(
"http://{}:{}/search?q={}&page={}",
config.binding_ip_addr, config.port, query, page_number
);
-
+
page_number
}
}
@@ -110,11 +110,11 @@ pub async fn search(
req.uri(),
1
);
-
+
1
}
};
-
+
// fetch the cached results json.
let cached_results_json = redis_cache.cached_results_json(&page_url);
// check if fetched results was indeed fetched or it was an error and if so
@@ -127,7 +127,7 @@ pub async fn search(
}
Err(_) => {
let mut results_json: crate::search_results_handler::aggregation_models::SearchResults =
- aggregate(query, page).await?;
+ aggregate(query, page, config.aggregator.random_delay).await?;
results_json.add_style(config.style.clone());
redis_cache
.cache_results(serde_json::to_string(&results_json)?, &page_url)?;
diff --git a/websurfx/config.lua b/websurfx/config.lua
index 916a9b3..c30f376 100644
--- a/websurfx/config.lua
+++ b/websurfx/config.lua
@@ -19,3 +19,7 @@ theme = "simple" -- the theme name which should be used for the website
-- Caching
redis_connection_url = "redis://127.0.0.1:8082" -- redis connection url address on which the client should connect on.
+
+production_use = false -- whether to use production mode or not (in other words this option should be used if it is to be used to host it on the server to provide a service to a large number of users)
+-- if production_use is set to true
+ -- There will be a random delay before sending the request to the search engines, this is to prevent DDoSing the upstream search engines from a large number of simultaneous requests.
\ No newline at end of file