mirror of
https://github.com/neon-mmd/websurfx.git
synced 2024-11-21 21:48:21 -05:00
Compare commits
9 Commits
169371bf29
...
135d59c847
Author | SHA1 | Date | |
---|---|---|---|
|
135d59c847 | ||
|
7ca74751f7 | ||
|
bc7d51ad7a | ||
|
1c1f299980 | ||
|
2299e6c28e | ||
|
669e365913 | ||
|
b2cbc5eaa5 | ||
|
851ea314a7 | ||
|
fbf73634ee |
2
Cargo.lock
generated
2
Cargo.lock
generated
@ -4146,7 +4146,7 @@ checksum = "1778a42e8b3b90bff8d0f5032bf22250792889a5cdc752aa0020c84abe3aaf10"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "websurfx"
|
name = "websurfx"
|
||||||
version = "1.9.6"
|
version = "1.9.10"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"actix-cors",
|
"actix-cors",
|
||||||
"actix-files",
|
"actix-files",
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "websurfx"
|
name = "websurfx"
|
||||||
version = "1.9.6"
|
version = "1.9.10"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
description = "An open-source alternative to Searx that provides clean, ad-free, and organic results with incredible speed while keeping privacy and security in mind."
|
description = "An open-source alternative to Searx that provides clean, ad-free, and organic results with incredible speed while keeping privacy and security in mind."
|
||||||
repository = "https://github.com/neon-mmd/websurfx"
|
repository = "https://github.com/neon-mmd/websurfx"
|
||||||
|
@ -32,7 +32,7 @@
|
|||||||
<a href=""
|
<a href=""
|
||||||
><img
|
><img
|
||||||
alt="Maintenance"
|
alt="Maintenance"
|
||||||
src="https://img.shields.io/maintenance/yes/2023?style=flat-square"
|
src="https://img.shields.io/maintenance/yes/2024?style=flat-square"
|
||||||
/>
|
/>
|
||||||
</a>
|
</a>
|
||||||
<a href="https://www.codefactor.io/repository/github/neon-mmd/websurfx">
|
<a href="https://www.codefactor.io/repository/github/neon-mmd/websurfx">
|
||||||
|
@ -4,12 +4,12 @@
|
|||||||
|
|
||||||
This page provides a list of `Websurfx` instances provided by us and our community.
|
This page provides a list of `Websurfx` instances provided by us and our community.
|
||||||
|
|
||||||
|URL|Network|Version|Location|Behind Cloudflare?|Status|TLS|IPv6|Comment|
|
|URL|Network|Version|Location|Behind Cloudflare?|Status|Maintained By|TLS|IPv6|Comment|
|
||||||
|-|-|-|-|-|-|-|-|-|
|
|-|-|-|-|-|-|-|-|-|-|
|
||||||
|https://websurfx.co|www|rolling|🇺🇸 US||<a href="https://status.websurfx.pp.ua"><img src="https://status.websurfx.pp.ua/api/badge/2/status"></a>|✅|❌||
|
|https://websurfx.co|www|rolling|🇺🇸 US||<a href="https://status.websurfx.pp.ua"><img src="https://status.websurfx.pp.ua/api/badge/2/status"></a>||✅|❌||
|
||||||
|https://alamin655-spacex.hf.space|www|rolling|🇺🇸 US||<a href="https://status.websurfx.pp.ua"><img src="https://status.websurfx.pp.ua/api/badge/5/status"></a>|✅|❌||
|
|https://alamin655-spacex.hf.space|www|rolling|🇺🇸 US||<a href="https://status.websurfx.pp.ua"><img src="https://status.websurfx.pp.ua/api/badge/5/status"></a>||✅|❌||
|
||||||
|https://websurfx.instance.pp.ua|www|rolling|🇺🇸 US||<a href="https://status.websurfx.pp.ua"><img src="https://status.websurfx.pp.ua/api/badge/7/status"></a>|✅|✅||
|
|https://websurfx.instance.pp.ua|www|rolling|🇺🇸 US||<a href="https://status.websurfx.pp.ua"><img src="https://status.websurfx.pp.ua/api/badge/7/status"></a>||✅|✅||
|
||||||
|https://alamin655-websurfx.hf.space|www|stable|🇺🇸 US||<a href="https://status.websurfx.pp.ua"><img src="https://status.websurfx.pp.ua/api/badge/6/status"></a>|✅|❌||
|
|https://alamin655-websurfx.hf.space|www|stable|🇺🇸 US||<a href="https://status.websurfx.pp.ua"><img src="https://status.websurfx.pp.ua/api/badge/6/status"></a>||✅|❌||
|
||||||
|
|
||||||
|
|
||||||
[⬅️ Go back to Home](./README.md)
|
[⬅️ Go back to Home](./README.md)
|
||||||
|
1
src/cache/cacher.rs
vendored
1
src/cache/cacher.rs
vendored
@ -4,6 +4,7 @@
|
|||||||
use error_stack::Report;
|
use error_stack::Report;
|
||||||
#[cfg(feature = "memory-cache")]
|
#[cfg(feature = "memory-cache")]
|
||||||
use mini_moka::sync::Cache as MokaCache;
|
use mini_moka::sync::Cache as MokaCache;
|
||||||
|
#[cfg(feature = "memory-cache")]
|
||||||
use mini_moka::sync::ConcurrentCacheExt;
|
use mini_moka::sync::ConcurrentCacheExt;
|
||||||
|
|
||||||
#[cfg(feature = "memory-cache")]
|
#[cfg(feature = "memory-cache")]
|
||||||
|
@ -98,6 +98,7 @@ impl Config {
|
|||||||
|
|
||||||
#[cfg(any(feature = "redis-cache", feature = "memory-cache"))]
|
#[cfg(any(feature = "redis-cache", feature = "memory-cache"))]
|
||||||
let parsed_cet = globals.get::<_, u16>("cache_expiry_time")?;
|
let parsed_cet = globals.get::<_, u16>("cache_expiry_time")?;
|
||||||
|
#[cfg(any(feature = "redis-cache", feature = "memory-cache"))]
|
||||||
let cache_expiry_time = match parsed_cet {
|
let cache_expiry_time = match parsed_cet {
|
||||||
0..=59 => {
|
0..=59 => {
|
||||||
log::error!(
|
log::error!(
|
||||||
|
@ -86,6 +86,42 @@ pub trait SearchEngine: Sync + Send {
|
|||||||
.change_context(EngineError::RequestError)?)
|
.change_context(EngineError::RequestError)?)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// This helper function fetches/requests the json search results from the upstream search engine as a vector of bytes.
|
||||||
|
///
|
||||||
|
/// # Arguments
|
||||||
|
///
|
||||||
|
/// * `url` - It takes the url of the upstream search engine with the user requested search
|
||||||
|
/// query appended in the search parameters.
|
||||||
|
/// * `header_map` - It takes the http request headers to be sent to the upstream engine in
|
||||||
|
/// order to prevent being detected as a bot. It takes the header as a HeaderMap type.
|
||||||
|
/// * `request_timeout` - It takes the request timeout value as seconds which is used to limit
|
||||||
|
/// the amount of time for each request to remain connected when until the results can be provided
|
||||||
|
/// by the upstream engine.
|
||||||
|
///
|
||||||
|
/// # Error
|
||||||
|
///
|
||||||
|
/// It returns the html data as a vector of bytes if the upstream engine provides the data as expected
|
||||||
|
/// otherwise it returns a custom `EngineError`.
|
||||||
|
async fn fetch_json_as_bytes_from_upstream(
|
||||||
|
&self,
|
||||||
|
url: &str,
|
||||||
|
header_map: reqwest::header::HeaderMap,
|
||||||
|
client: &Client,
|
||||||
|
) -> Result<Vec<u8>, EngineError> {
|
||||||
|
// fetch the json response from upstream search engine
|
||||||
|
|
||||||
|
Ok(client
|
||||||
|
.get(url)
|
||||||
|
.headers(header_map) // add spoofed headers to emulate human behavior
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.change_context(EngineError::RequestError)?
|
||||||
|
.bytes()
|
||||||
|
.await
|
||||||
|
.change_context(EngineError::RequestError)?
|
||||||
|
.to_vec())
|
||||||
|
}
|
||||||
|
|
||||||
/// This function scrapes results from the upstream engine and puts all the scraped results like
|
/// This function scrapes results from the upstream engine and puts all the scraped results like
|
||||||
/// title, visiting_url (href in html),engine (from which engine it was fetched from) and description
|
/// title, visiting_url (href in html),engine (from which engine it was fetched from) and description
|
||||||
/// in a RawSearchResult and then adds that to HashMap whose keys are url and values are RawSearchResult
|
/// in a RawSearchResult and then adds that to HashMap whose keys are url and values are RawSearchResult
|
||||||
|
Loading…
Reference in New Issue
Block a user