0
0
mirror of https://github.com/neon-mmd/websurfx.git synced 2024-11-21 21:48:21 -05:00

Merge pull request #469 from neon-mmd/FIX/468_pagination-for-the-upstream-search-engines-not-working

🚑️ Pagination code for the upstream search engines
This commit is contained in:
alamin655 2023-12-30 21:37:06 +05:30 committed by GitHub
commit ddb10f6584
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 32 additions and 51 deletions

30
Cargo.lock generated
View File

@ -79,7 +79,7 @@ dependencies = [
"actix-rt",
"actix-service",
"actix-utils",
"ahash 0.8.6",
"ahash 0.8.7",
"base64 0.21.5",
"bitflags 2.4.1",
"bytes 1.5.0",
@ -190,7 +190,7 @@ dependencies = [
"actix-service",
"actix-utils",
"actix-web-codegen",
"ahash 0.8.6",
"ahash 0.8.7",
"bytes 1.5.0",
"bytestring",
"cfg-if 1.0.0",
@ -255,9 +255,9 @@ dependencies = [
[[package]]
name = "ahash"
version = "0.8.6"
version = "0.8.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "91429305e9f0a25f6205c5b8e0d2db09e0708a7a6df0f42212bb56c32c8ac97a"
checksum = "77c3a9648d43b9cd48db467b3f87fdd6e146bcc88ab0180006cef2179fe11d01"
dependencies = [
"cfg-if 1.0.0",
"getrandom",
@ -489,9 +489,9 @@ dependencies = [
[[package]]
name = "bstr"
version = "1.8.0"
version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "542f33a8835a0884b006a0c3df3dadd99c0c3f296ed26c2fdc8028e01ad6230c"
checksum = "c48f0051a4b4c5e0b6d365cd04af53aeaa209e3cc15ec2cdb69e73cc87fbd0dc"
dependencies = [
"memchr",
"serde",
@ -637,18 +637,18 @@ dependencies = [
[[package]]
name = "clap"
version = "4.4.11"
version = "4.4.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bfaff671f6b22ca62406885ece523383b9b64022e341e53e009a62ebc47a45f2"
checksum = "dcfab8ba68f3668e89f6ff60f5b205cea56aa7b769451a59f34b8682f51c056d"
dependencies = [
"clap_builder",
]
[[package]]
name = "clap_builder"
version = "4.4.11"
version = "4.4.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a216b506622bb1d316cd51328dce24e07bdff4a6128a47c7e7fad11878d5adbb"
checksum = "fb7fb5e4e979aec3be7791562fcba452f94ad85e954da024396433e0e25a79e9"
dependencies = [
"anstyle",
"clap_lex",
@ -1459,7 +1459,7 @@ version = "0.13.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e"
dependencies = [
"ahash 0.8.6",
"ahash 0.8.7",
"bumpalo",
]
@ -1973,9 +1973,9 @@ checksum = "60302e4db3a61da70c0cb7991976248362f30319e88850c487b9b95bbf059e00"
[[package]]
name = "memchr"
version = "2.6.4"
version = "2.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167"
checksum = "523dc4f511e55ab87b694dc30d0f820d60906ef06413f93d4d7a1385599cc149"
[[package]]
name = "memoffset"
@ -3066,7 +3066,7 @@ version = "0.18.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "585480e3719b311b78a573db1c9d9c4c1f8010c2dee4cc59c2efe58ea4dbc3e1"
dependencies = [
"ahash 0.8.6",
"ahash 0.8.7",
"cssparser 0.31.2",
"ego-tree",
"html5ever 0.26.0",
@ -4056,7 +4056,7 @@ checksum = "1778a42e8b3b90bff8d0f5032bf22250792889a5cdc752aa0020c84abe3aaf10"
[[package]]
name = "websurfx"
version = "1.6.11"
version = "1.7.3"
dependencies = [
"actix-cors",
"actix-files",

View File

@ -1,6 +1,6 @@
[package]
name = "websurfx"
version = "1.6.11"
version = "1.7.3"
edition = "2021"
description = "An open-source alternative to Searx that provides clean, ad-free, and organic results with incredible speed while keeping privacy and security in mind."
repository = "https://github.com/neon-mmd/websurfx"

View File

@ -51,15 +51,14 @@ impl SearchEngine for DuckDuckGo {
// Page number can be missing or empty string and so appropriate handling is required
// so that upstream server recieves valid page number.
let url: String = match page {
1 | 0 => {
0 => {
format!("https://html.duckduckgo.com/html/?q={query}&s=&dc=&v=1&o=json&api=/d.js")
}
_ => {
format!(
"https://duckduckgo.com/html/?q={}&s={}&dc={}&v=1&o=json&api=/d.js",
query,
(page / 2 + (page % 2)) * 30,
(page / 2 + (page % 2)) * 30 + 1
"https://duckduckgo.com/html/?q={query}&s={}&dc={}&v=1&o=json&api=/d.js",
page * 30,
page * 30 + 1
)
}
};

View File

@ -65,17 +65,10 @@ impl SearchEngine for LibreX {
) -> Result<HashMap<String, SearchResult>, EngineError> {
// Page number can be missing or empty string and so appropriate handling is required
// so that upstream server recieves valid page number.
let url: String = match page {
1 | 0 => {
format!("https://search.ahwx.org/search.php?q={query}&p=0&t=10")
}
_ => {
format!(
"https://search.ahwx.org/search.php?q={query}&p={}&t=10",
page * 10,
)
}
};
let url: String = format!(
"https://search.ahwx.org/search.php?q={query}&p={}&t=10",
page * 10
);
// initializing HeaderMap and adding appropriate headers.
let header_map = HeaderMap::try_from(&HashMap::from([

View File

@ -50,14 +50,10 @@ impl SearchEngine for Searx {
safe_search = 2;
};
let url: String = match page {
0 | 1 => {
format!("https://searx.be/search?q={query}&pageno=1&safesearch={safe_search}")
}
_ => {
format!("https://searx.be/search?q={query}&pageno={page}&safesearch={safe_search}")
}
};
let url: String = format!(
"https://searx.be/search?q={query}&pageno={}&safesearch={safe_search}",
page + 1
);
// initializing headers and adding appropriate headers.
let header_map = HeaderMap::try_from(&HashMap::from([

View File

@ -50,17 +50,10 @@ impl SearchEngine for Startpage {
) -> Result<HashMap<String, SearchResult>, EngineError> {
// Page number can be missing or empty string and so appropriate handling is required
// so that upstream server recieves valid page number.
let url: String = match page {
1 | 0 => {
format!("https://startpage.com/do/dsearch?q={query}&num=10&start=0")
}
_ => {
format!(
"https://startpage.com/do/dsearch?q={query}&num=10&start={}",
page * 10,
)
}
};
let url: String = format!(
"https://startpage.com/do/dsearch?q={query}&num=10&start={}",
page * 10,
);
// initializing HeaderMap and adding appropriate headers.
let header_map = HeaderMap::try_from(&HashMap::from([