0
0
mirror of https://github.com/neon-mmd/websurfx.git synced 2024-11-22 14:08:23 -05:00

Merge branch 'rolling' of https://github.com/cybrejon/websurfx into rolling

This commit is contained in:
cybrejon 2024-02-20 08:53:59 +08:00
commit d5df3a6122
13 changed files with 451 additions and 247 deletions

View File

@ -34,7 +34,7 @@ jobs:
uses: docker/setup-buildx-action@v3
# Set buildx cache
- name: Cache register
uses: actions/cache@v3
uses: actions/cache@v4
with:
path: /tmp/.buildx-cache
key: buildx-cache

72
.github/workflows/release.yml vendored Normal file
View File

@ -0,0 +1,72 @@
name: Bump release version
on:
pull_request:
branches: [rolling]
types:
- closed
permissions:
contents: write
pull-requests: write
repository-projects: write
concurrency: production
jobs:
build:
name: bump tag version and release
if: github.event.pull_request.merged == true
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
ref: ${{ github.sha }}
fetch-depth: 0
- name: Bump version and push tag
id: version-bump
uses: hennejg/github-tag-action@v4.4.0
with:
github_token: ${{ secrets.ADMIN_RIGHTS_TOKEN }}
release_branches: rolling
- name: create branch
uses: peterjgrainger/action-create-branch@v2.4.0
env:
GITHUB_TOKEN: ${{ secrets.ADMIN_RIGHTS_TOKEN }}
with:
branch: update-from-${{ github.sha }}
- name: update cargo.toml
run: |
appversion=$(echo "${{ steps.version-bump.outputs.new_tag }}" | grep -oE '[0-9]+\.[0-9]+\.[0-9]+')
sed -i -e "s/^version = .*/version = \"$appversion\"/" Cargo.toml
- run: rustup toolchain install stable --profile minimal
- run: rustup update stable && rustup default stable
- name: regenerate cargo.lock
run: cargo generate-lockfile
- name: auto commit
uses: stefanzweifel/git-auto-commit-action@v5
with:
commit_message: "[skip ci] updating app version to ${{ steps.version-bump.outputs.new_tag }}"
branch: update-from-${{ github.sha }}
# create PR using GitHub CLI
- name: create PR with update info
id: create-pr
run: gh pr create --base rolling --head update-from-${{ github.sha }} --title 'Merge new update into rolling' --body 'Created by Github action'
env:
GH_TOKEN: ${{ secrets.ADMIN_RIGHTS_TOKEN }}
# merge PR using GitHub CLI
- name: merge PR with update info
id: merge-pr
run: gh pr merge --admin --merge --subject 'Merge update info' --delete-branch
env:
GH_TOKEN: ${{ secrets.ADMIN_RIGHTS_TOKEN }}
- name: Create Release
uses: softprops/action-gh-release@v1
with:
token: ${{ secrets.ADMIN_RIGHTS_TOKEN }}
generate_release_notes: true
name: ${{ steps.version-bump.outputs.new_tag }}
tag_name: ${{ steps.version-bump.outputs.new_tag }}
prerelease: false
env:
GITHUB_REPOSITORY: ${{ github.repository }}

View File

@ -33,7 +33,7 @@ jobs:
uses: actions-rs/cargo@v1
with:
command: clippy
args: --all-targets --all
args: --all-targets --all-features --all
- name: Run cargo check
uses: actions-rs/cargo@v1
with:

216
Cargo.lock generated
View File

@ -31,21 +31,20 @@ dependencies = [
"futures-util",
"log",
"once_cell",
"smallvec 1.11.2",
"smallvec 1.13.1",
]
[[package]]
name = "actix-files"
version = "0.6.2"
version = "0.6.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d832782fac6ca7369a70c9ee9a20554623c5e51c76e190ad151780ebea1cf689"
checksum = "bf0bdd6ff79de7c9a021f5d9ea79ce23e108d8bfc9b49b5b4a2cf6fad5a35212"
dependencies = [
"actix-http",
"actix-service",
"actix-utils",
"actix-web",
"askama_escape",
"bitflags 1.3.2",
"bitflags 2.4.1",
"bytes 1.5.0",
"derive_more",
"futures-core",
@ -55,6 +54,7 @@ dependencies = [
"mime_guess",
"percent-encoding 2.3.1",
"pin-project-lite",
"v_htmlescape",
]
[[package]]
@ -99,7 +99,7 @@ dependencies = [
"pin-project-lite",
"rand 0.8.5",
"sha1",
"smallvec 1.11.2",
"smallvec 1.13.1",
"tokio 1.35.1",
"tokio-util",
"tracing",
@ -111,8 +111,8 @@ version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e01ed3140b2f8d422c68afa1ed2e85d996ea619c988ac834d255db32138655cb"
dependencies = [
"quote 1.0.33",
"syn 2.0.43",
"quote 1.0.35",
"syn 2.0.48",
]
[[package]]
@ -210,7 +210,7 @@ dependencies = [
"serde",
"serde_json",
"serde_urlencoded 0.7.1",
"smallvec 1.11.2",
"smallvec 1.13.1",
"socket2",
"time 0.3.31",
"url 2.5.0",
@ -223,9 +223,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eb1f50ebbb30eca122b188319a4398b3f7bb4a8cdf50ecfb73bfc6a3c3ce54f5"
dependencies = [
"actix-router",
"proc-macro2 1.0.71",
"quote 1.0.33",
"syn 2.0.43",
"proc-macro2 1.0.78",
"quote 1.0.35",
"syn 2.0.48",
]
[[package]]
@ -346,12 +346,6 @@ version = "0.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711"
[[package]]
name = "askama_escape"
version = "0.10.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "619743e34b5ba4e9703bba34deac3427c72507c7159f5fd030aea8cac0cfe341"
[[package]]
name = "async-compression"
version = "0.4.5"
@ -378,9 +372,9 @@ version = "0.1.76"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "531b97fb4cd3dfdce92c35dedbfdc1f0b9d8091c8ca943d6dae340ef5012d514"
dependencies = [
"proc-macro2 1.0.71",
"quote 1.0.33",
"syn 2.0.43",
"proc-macro2 1.0.78",
"quote 1.0.35",
"syn 2.0.48",
]
[[package]]
@ -744,8 +738,8 @@ version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e1e0fdd2e5d3041e530e1b21158aeeef8b5d0e306bc5c1e3d6cf0930d10e25a"
dependencies = [
"proc-macro2 1.0.71",
"quote 1.0.33",
"proc-macro2 1.0.78",
"quote 1.0.35",
"syn 1.0.109",
]
@ -956,7 +950,7 @@ dependencies = [
"dtoa-short",
"itoa 1.0.10",
"phf 0.11.2",
"smallvec 1.11.2",
"smallvec 1.13.1",
]
[[package]]
@ -969,7 +963,7 @@ dependencies = [
"dtoa-short",
"itoa 1.0.10",
"phf 0.11.2",
"smallvec 1.11.2",
"smallvec 1.13.1",
]
[[package]]
@ -987,8 +981,8 @@ version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "13b588ba4ac1a99f7f2964d24b3d896ddc6bf847ee3855dbd4366f058cfcd331"
dependencies = [
"quote 1.0.33",
"syn 2.0.43",
"quote 1.0.35",
"syn 2.0.48",
]
[[package]]
@ -1026,8 +1020,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321"
dependencies = [
"convert_case",
"proc-macro2 1.0.71",
"quote 1.0.33",
"proc-macro2 1.0.78",
"quote 1.0.35",
"rustc_version 0.4.0",
"syn 1.0.109",
]
@ -1101,14 +1095,24 @@ dependencies = [
]
[[package]]
name = "env_logger"
version = "0.10.1"
name = "env_filter"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "95b3f3e67048839cb0d0781f445682a35113da7121f7c949db0e2be96a4fbece"
checksum = "a009aa4810eb158359dda09d0c87378e4bbb89b5a801f016885a4707ba24f7ea"
dependencies = [
"log",
]
[[package]]
name = "env_logger"
version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "05e7cf40684ae96ade6232ed84582f40ce0a66efcd43a5117aef610534f8e0b8"
dependencies = [
"env_filter",
"log",
]
[[package]]
name = "envmnt"
version = "0.8.4"
@ -1170,8 +1174,8 @@ version = "0.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aa4da3c766cd7a0db8242e326e9e4e081edd567072893ed320008189715366a4"
dependencies = [
"proc-macro2 1.0.71",
"quote 1.0.33",
"proc-macro2 1.0.78",
"quote 1.0.35",
"syn 1.0.109",
"synstructure",
]
@ -1341,9 +1345,9 @@ version = "0.3.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac"
dependencies = [
"proc-macro2 1.0.71",
"quote 1.0.33",
"syn 2.0.43",
"proc-macro2 1.0.78",
"quote 1.0.35",
"syn 2.0.48",
]
[[package]]
@ -1448,7 +1452,7 @@ dependencies = [
"parking_lot 0.12.1",
"quanta",
"rand 0.8.5",
"smallvec 1.11.2",
"smallvec 1.13.1",
]
[[package]]
@ -1554,8 +1558,8 @@ dependencies = [
"log",
"mac",
"markup5ever 0.11.0",
"proc-macro2 1.0.71",
"quote 1.0.33",
"proc-macro2 1.0.78",
"quote 1.0.35",
"syn 1.0.109",
]
@ -1860,9 +1864,9 @@ dependencies = [
[[package]]
name = "lightningcss"
version = "1.0.0-alpha.51"
version = "1.0.0-alpha.52"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "99d6ad516c08b24c246b339159dc2ee2144c012e8ebdf4db4bddefb8734b2b69"
checksum = "771a62dedf5ec563bbfea9760f6c6a6bc546e67355eba0cd7d00c0dc34b11d90"
dependencies = [
"ahash 0.7.7",
"bitflags 2.4.1",
@ -1875,7 +1879,7 @@ dependencies = [
"parcel_selectors",
"paste",
"pathdiff",
"smallvec 1.11.2",
"smallvec 1.13.1",
]
[[package]]
@ -2016,8 +2020,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0be95d66c3024ffce639216058e5bae17a83ecaf266ffc6e4d060ad447c9eed2"
dependencies = [
"proc-macro-error",
"proc-macro2 1.0.71",
"quote 1.0.33",
"proc-macro2 1.0.78",
"quote 1.0.35",
"syn 1.0.109",
]
@ -2077,7 +2081,7 @@ dependencies = [
"crossbeam-utils 0.8.18",
"dashmap",
"skeptic",
"smallvec 1.11.2",
"smallvec 1.13.1",
"tagptr",
"triomphe",
]
@ -2301,9 +2305,9 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c"
dependencies = [
"proc-macro2 1.0.71",
"quote 1.0.33",
"syn 2.0.43",
"proc-macro2 1.0.78",
"quote 1.0.35",
"syn 2.0.48",
]
[[package]]
@ -2337,7 +2341,7 @@ dependencies = [
"phf 0.10.1",
"phf_codegen 0.10.0",
"precomputed-hash",
"smallvec 1.11.2",
"smallvec 1.13.1",
]
[[package]]
@ -2385,7 +2389,7 @@ dependencies = [
"cfg-if 1.0.0",
"libc",
"redox_syscall 0.4.1",
"smallvec 1.11.2",
"smallvec 1.13.1",
"windows-targets 0.48.5",
]
@ -2512,9 +2516,9 @@ checksum = "3444646e286606587e49f3bcf1679b8cef1dc2c5ecc29ddacaffc305180d464b"
dependencies = [
"phf_generator 0.11.2",
"phf_shared 0.11.2",
"proc-macro2 1.0.71",
"quote 1.0.33",
"syn 2.0.43",
"proc-macro2 1.0.78",
"quote 1.0.35",
"syn 2.0.48",
]
[[package]]
@ -2559,9 +2563,9 @@ version = "1.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405"
dependencies = [
"proc-macro2 1.0.71",
"quote 1.0.33",
"syn 2.0.43",
"proc-macro2 1.0.78",
"quote 1.0.35",
"syn 2.0.48",
]
[[package]]
@ -2618,8 +2622,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
dependencies = [
"proc-macro-error-attr",
"proc-macro2 1.0.71",
"quote 1.0.33",
"proc-macro2 1.0.78",
"quote 1.0.35",
"syn 1.0.109",
"version_check",
]
@ -2630,8 +2634,8 @@ version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
dependencies = [
"proc-macro2 1.0.71",
"quote 1.0.33",
"proc-macro2 1.0.78",
"quote 1.0.35",
"version_check",
]
@ -2646,9 +2650,9 @@ dependencies = [
[[package]]
name = "proc-macro2"
version = "1.0.71"
version = "1.0.78"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "75cb1540fadbd5b8fbccc4dddad2734eba435053f725621c070711a14bb5f4b8"
checksum = "e2422ad645d89c99f8f3e6b88a9fdeca7fabeac836b1002371c4367c8f984aae"
dependencies = [
"unicode-ident",
]
@ -2701,11 +2705,11 @@ dependencies = [
[[package]]
name = "quote"
version = "1.0.33"
version = "1.0.35"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae"
checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef"
dependencies = [
"proc-macro2 1.0.71",
"proc-macro2 1.0.78",
]
[[package]]
@ -3208,7 +3212,7 @@ dependencies = [
"phf_codegen 0.10.0",
"precomputed-hash",
"servo_arc",
"smallvec 1.11.2",
"smallvec 1.13.1",
]
[[package]]
@ -3237,22 +3241,22 @@ checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
[[package]]
name = "serde"
version = "1.0.193"
version = "1.0.196"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "25dd9975e68d0cb5aa1120c288333fc98731bd1dd12f561e468ea4728c042b89"
checksum = "870026e60fa08c69f064aa766c10f10b1d62db9ccd4d0abb206472bee0ce3b32"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.193"
version = "1.0.196"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "43576ca501357b9b071ac53cdc7da8ef0cbd9493d8df094cd821777ea6e894d3"
checksum = "33c85360c95e7d137454dc81d9a4ed2b8efd8fbe19cee57357b32b9771fccb67"
dependencies = [
"proc-macro2 1.0.71",
"quote 1.0.33",
"syn 2.0.43",
"proc-macro2 1.0.78",
"quote 1.0.35",
"syn 2.0.48",
]
[[package]]
@ -3366,9 +3370,9 @@ dependencies = [
[[package]]
name = "smallvec"
version = "1.11.2"
version = "1.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4dccd0940a2dcdf68d092b8cbab7dc0ad8fa938bf95787e1b916b0e3d0e8e970"
checksum = "e6ecd384b10a64542d77071bd64bd7b231f4ed5940fba55e98c3de13824cf3d7"
dependencies = [
"serde",
]
@ -3441,8 +3445,8 @@ checksum = "f0f45ed1b65bf9a4bf2f7b7dc59212d1926e9eaf00fa998988e420fd124467c6"
dependencies = [
"phf_generator 0.7.24",
"phf_shared 0.7.24",
"proc-macro2 1.0.71",
"quote 1.0.33",
"proc-macro2 1.0.78",
"quote 1.0.35",
"string_cache_shared",
]
@ -3454,8 +3458,8 @@ checksum = "6bb30289b722be4ff74a408c3cc27edeaad656e06cb1fe8fa9231fa59c728988"
dependencies = [
"phf_generator 0.10.0",
"phf_shared 0.10.0",
"proc-macro2 1.0.71",
"quote 1.0.33",
"proc-macro2 1.0.78",
"quote 1.0.35",
]
[[package]]
@ -3487,19 +3491,19 @@ version = "1.0.109"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
dependencies = [
"proc-macro2 1.0.71",
"quote 1.0.33",
"proc-macro2 1.0.78",
"quote 1.0.35",
"unicode-ident",
]
[[package]]
name = "syn"
version = "2.0.43"
version = "2.0.48"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ee659fb5f3d355364e1f3e5bc10fb82068efbf824a1e9d1c9504244a6469ad53"
checksum = "0f3531638e407dfc0814761abb7c00a5b54992b849452a0646b7f65c9f770f3f"
dependencies = [
"proc-macro2 1.0.71",
"quote 1.0.33",
"proc-macro2 1.0.78",
"quote 1.0.35",
"unicode-ident",
]
@ -3509,8 +3513,8 @@ version = "0.12.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f"
dependencies = [
"proc-macro2 1.0.71",
"quote 1.0.33",
"proc-macro2 1.0.78",
"quote 1.0.35",
"syn 1.0.109",
"unicode-xid 0.2.4",
]
@ -3733,9 +3737,9 @@ version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b"
dependencies = [
"proc-macro2 1.0.71",
"quote 1.0.33",
"syn 2.0.43",
"proc-macro2 1.0.78",
"quote 1.0.35",
"syn 2.0.48",
]
[[package]]
@ -4008,6 +4012,12 @@ dependencies = [
"rand 0.6.5",
]
[[package]]
name = "v_htmlescape"
version = "0.15.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4e8257fbc510f0a46eb602c10215901938b5c2a7d5e70fc11483b1d3c9b5b18c"
[[package]]
name = "vcpkg"
version = "0.2.15"
@ -4081,9 +4091,9 @@ dependencies = [
"bumpalo",
"log",
"once_cell",
"proc-macro2 1.0.71",
"quote 1.0.33",
"syn 2.0.43",
"proc-macro2 1.0.78",
"quote 1.0.35",
"syn 2.0.48",
"wasm-bindgen-shared",
]
@ -4105,7 +4115,7 @@ version = "0.2.89"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0162dbf37223cd2afce98f3d0785506dcb8d266223983e4b5b525859e6e182b2"
dependencies = [
"quote 1.0.33",
"quote 1.0.35",
"wasm-bindgen-macro-support",
]
@ -4115,9 +4125,9 @@ version = "0.2.89"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f0eb82fcb7930ae6219a7ecfd55b217f5f0893484b7a13022ebb2b2bf20b5283"
dependencies = [
"proc-macro2 1.0.71",
"quote 1.0.33",
"syn 2.0.43",
"proc-macro2 1.0.78",
"quote 1.0.35",
"syn 2.0.48",
"wasm-bindgen-backend",
"wasm-bindgen-shared",
]
@ -4146,7 +4156,7 @@ checksum = "1778a42e8b3b90bff8d0f5032bf22250792889a5cdc752aa0020c84abe3aaf10"
[[package]]
name = "websurfx"
version = "1.9.0"
version = "1.9.20"
dependencies = [
"actix-cors",
"actix-files",
@ -4180,7 +4190,7 @@ dependencies = [
"scraper",
"serde",
"serde_json",
"smallvec 1.11.2",
"smallvec 1.13.1",
"tempfile",
"tokio 1.35.1",
]
@ -4417,9 +4427,9 @@ version = "0.7.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6"
dependencies = [
"proc-macro2 1.0.71",
"quote 1.0.33",
"syn 2.0.43",
"proc-macro2 1.0.78",
"quote 1.0.35",
"syn 2.0.48",
]
[[package]]

View File

@ -1,6 +1,6 @@
[package]
name = "websurfx"
version = "1.9.0"
version = "1.9.20"
edition = "2021"
description = "An open-source alternative to Searx that provides clean, ad-free, and organic results with incredible speed while keeping privacy and security in mind."
repository = "https://github.com/neon-mmd/websurfx"
@ -15,15 +15,15 @@ path = "src/bin/websurfx.rs"
[dependencies]
reqwest = {version="0.11.22", default-features=false, features=["rustls-tls","brotli", "gzip"]}
tokio = {version="1.32.0",features=["rt-multi-thread","macros"], default-features = false}
serde = {version="1.0.190", default-features=false, features=["derive"]}
serde = {version="1.0.196", default-features=false, features=["derive"]}
serde_json = {version="1.0.109", default-features=false}
maud = {version="0.25.0", default-features=false, features=["actix-web"]}
scraper = {version="0.18.1", default-features = false}
actix-web = {version="4.4.0", features = ["cookies", "macros", "compress-brotli"], default-features=false}
actix-files = {version="0.6.2", default-features=false}
actix-files = {version="0.6.5", default-features=false}
actix-cors = {version="0.6.4", default-features=false}
fake-useragent = {version="0.1.3", default-features=false}
env_logger = {version="0.10.0", default-features=false}
env_logger = {version="0.11.1", default-features=false}
log = {version="0.4.20", default-features=false}
mlua = {version="0.9.1", features=["luajit", "vendored"], default-features=false}
redis = {version="0.24.0", features=["tokio-comp","connection-manager"], default-features = false, optional = true}
@ -31,7 +31,7 @@ blake3 = {version="1.5.0", default-features=false}
error-stack = {version="0.4.0", default-features=false, features=["std"]}
async-trait = {version="0.1.76", default-features=false}
regex = {version="1.9.4", features=["perf"], default-features = false}
smallvec = {version="1.11.0", features=["union", "serde"], default-features=false}
smallvec = {version="1.13.1", features=["union", "serde"], default-features=false}
futures = {version="0.3.28", default-features=false}
dhat = {version="0.3.2", optional = true, default-features=false}
mimalloc = { version = "0.1.38", default-features = false }
@ -50,7 +50,7 @@ criterion = {version="0.5.1", default-features=false}
tempfile = {version="3.8.0", default-features=false}
[build-dependencies]
lightningcss = {version="1.0.0-alpha.50", default-features=false, features=["grid"]}
lightningcss = {version="1.0.0-alpha.52", default-features=false, features=["grid"]}
minify-js = {version="0.6.0", default-features=false}
[profile.dev]

View File

@ -32,7 +32,7 @@
<a href=""
><img
alt="Maintenance"
src="https://img.shields.io/maintenance/yes/2023?style=flat-square"
src="https://img.shields.io/maintenance/yes/2024?style=flat-square"
/>
</a>
<a href="https://www.codefactor.io/repository/github/neon-mmd/websurfx">
@ -141,7 +141,7 @@ redis-server --port 8082 &
Once you have started the server, open your preferred web browser and navigate to <http://127.0.0.1:8080> to start using Websurfx.
> **Note**
> [!Note]
>
> 1. The project is no longer in the testing phase and is now ready for production use.
> 2. There are many features still missing, like `support for image search`, `different categories`, `quick apps`, etc., but they will be added soon as part of future releases.
@ -166,7 +166,7 @@ Websurfx comes loaded with several themes and color schemes, which you can apply
# Multi-Language Support 🌍
> **Note**
> [!Note]
> Currently, we do not support other languages, but we will start accepting contributions regarding language support in the future. We believe language should never be a barrier to entry.
**[⬆️ Back to Top](#--)**
@ -218,7 +218,7 @@ Several areas that we need a bit of help with at the moment are:
# Documentation 📘
> **Note**
> [!Note]
> We welcome any contributions to the [documentation](docs) as this will benefit everyone who uses this project.
**[⬆️ Back to Top](#--)**
@ -267,7 +267,7 @@ We would like to thank the following people for their contributions and support:
<p>
<a href="https://github.com/neon-mmd/websurfx/stargazers">
<img src="https://reporoster.com/stars/dark/neon-mmd/websurfx" />
<img src="http://reporoster.com/stars/dark/neon-mmd/websurfx"/>
</a>
</p>

View File

@ -4,10 +4,12 @@
This page provides a list of `Websurfx` instances provided by us and our community.
|URL|Network|Version|Location|Behind Cloudflare?|Maintained By|TLS|IPv6|Comment|
|URL|Network|Version|Location|Status|Maintained By|TLS|IPv6|Comment|
|-|-|-|-|-|-|-|-|-|
|https://websurfx.co/|www|edge|🇺🇸 US|||✅|❌||
|https://websurfx.onrender.com/|www|edge|🇺🇸 US|||✅|❌||
|https://alamin655-websurfx.hf.space/|www|v0.21.4|🇺🇸 US||[websurfx project](https://github.com/neon-mmd/websurfx)|✅|❌||
|https://websurfx.pp.ua|www|rolling|🇺🇸 US|<a href="https://status.websurfx.pp.ua"><img src="https://img.shields.io/website?url=https%3A%2F%2Fwebsurfx.pp.ua&label=Status"></a>|[Websurfx Project](https://github.com/neon-mmd/websurfx)|✅|✅||
|https://alamin655-spacex.hf.space|www|rolling|🇺🇸 US|<a href="https://status.websurfx.pp.ua"><img src="https://img.shields.io/website?url=https%3A%2F%2Falamin655-spacex.hf.space&label=Status"></a>|[Websurfx Project](https://github.com/neon-mmd/websurfx)|✅|❌||
|https://websurfx.instance.pp.ua|www|rolling|🇺🇸 US|<a href="https://status.websurfx.pp.ua"><img src="https://img.shields.io/website?url=https%3A%2F%2Fwebsurfx.instance.pp.ua&label=Status"></a>|[Websurfx Project](https://github.com/neon-mmd/websurfx)|✅|✅||
|https://alamin655-surfx.hf.space|www|stable|🇺🇸 US|<a href="https://status.websurfx.pp.ua"><img src="https://img.shields.io/website?url=https%3A%2F%2Falamin655-surfx.hf.space&label=Status"></a>|[Websurfx Project](https://github.com/neon-mmd/websurfx)|✅|❌||
[⬅️ Go back to Home](./README.md)

67
src/cache/cacher.rs vendored
View File

@ -4,6 +4,8 @@
use error_stack::Report;
#[cfg(feature = "memory-cache")]
use mini_moka::sync::Cache as MokaCache;
#[cfg(feature = "memory-cache")]
use mini_moka::sync::ConcurrentCacheExt;
#[cfg(feature = "memory-cache")]
use std::time::Duration;
@ -61,8 +63,8 @@ pub trait Cacher: Send + Sync {
/// failure.
async fn cache_results(
&mut self,
search_results: &SearchResults,
url: &str,
search_results: &[SearchResults],
urls: &[String],
) -> Result<(), Report<CacheError>>;
/// A helper function which computes the hash of the url and formats and returns it as string.
@ -332,14 +334,33 @@ impl Cacher for RedisCache {
async fn cache_results(
&mut self,
search_results: &SearchResults,
url: &str,
search_results: &[SearchResults],
urls: &[String],
) -> Result<(), Report<CacheError>> {
use base64::Engine;
let bytes = self.pre_process_search_results(search_results)?;
let base64_string = base64::engine::general_purpose::STANDARD_NO_PAD.encode(bytes);
let hashed_url_string = self.hash_url(url);
self.cache_json(&base64_string, &hashed_url_string).await
// size of search_results is expected to be equal to size of urls -> key/value pairs for cache;
let search_results_len = search_results.len();
let mut bytes = Vec::with_capacity(search_results_len);
for result in search_results {
let processed = self.pre_process_search_results(result)?;
bytes.push(processed);
}
let base64_strings = bytes
.iter()
.map(|bytes_vec| base64::engine::general_purpose::STANDARD_NO_PAD.encode(bytes_vec));
let mut hashed_url_strings = Vec::with_capacity(search_results_len);
for url in urls {
let hash = self.hash_url(url);
hashed_url_strings.push(hash);
}
self.cache_json(base64_strings, hashed_url_strings.into_iter())
.await
}
}
/// TryInto implementation for SearchResults from Vec<u8>
@ -391,12 +412,16 @@ impl Cacher for InMemoryCache {
async fn cache_results(
&mut self,
search_results: &SearchResults,
url: &str,
search_results: &[SearchResults],
urls: &[String],
) -> Result<(), Report<CacheError>> {
for (url, search_result) in urls.iter().zip(search_results.iter()) {
let hashed_url_string = self.hash_url(url);
let bytes = self.pre_process_search_results(search_results)?;
let bytes = self.pre_process_search_results(search_result)?;
self.cache.insert(hashed_url_string, bytes);
}
self.cache.sync();
Ok(())
}
}
@ -434,11 +459,13 @@ impl Cacher for HybridCache {
async fn cache_results(
&mut self,
search_results: &SearchResults,
url: &str,
search_results: &[SearchResults],
urls: &[String],
) -> Result<(), Report<CacheError>> {
self.redis_cache.cache_results(search_results, url).await?;
self.memory_cache.cache_results(search_results, url).await?;
self.redis_cache.cache_results(search_results, urls).await?;
self.memory_cache
.cache_results(search_results, urls)
.await?;
Ok(())
}
@ -460,8 +487,8 @@ impl Cacher for DisabledCache {
async fn cache_results(
&mut self,
_search_results: &SearchResults,
_url: &str,
_search_results: &[SearchResults],
_urls: &[String],
) -> Result<(), Report<CacheError>> {
Ok(())
}
@ -519,11 +546,11 @@ impl SharedCache {
/// on a failure.
pub async fn cache_results(
&self,
search_results: &SearchResults,
url: &str,
search_results: &[SearchResults],
urls: &[String],
) -> Result<(), Report<CacheError>> {
let mut mut_cache = self.cache.lock().await;
mut_cache.cache_results(search_results, url).await
mut_cache.cache_results(search_results, urls).await
}
}

View File

@ -118,14 +118,18 @@ impl RedisCache {
/// on a failure.
pub async fn cache_json(
&mut self,
json_results: &str,
key: &str,
json_results: impl Iterator<Item = String>,
keys: impl Iterator<Item = String>,
) -> Result<(), Report<CacheError>> {
self.current_connection = Default::default();
let mut pipeline = redis::Pipeline::with_capacity(3);
let mut result: Result<(), RedisError> = self.connection_pool
[self.current_connection as usize]
.set_ex(key, json_results, self.cache_ttl.into())
for (key, json_result) in keys.zip(json_results) {
pipeline.set_ex(key, json_result, self.cache_ttl.into());
}
let mut result: Result<(), RedisError> = pipeline
.query_async(&mut self.connection_pool[self.current_connection as usize])
.await;
// Code to check whether the current connection being used is dropped with connection error
@ -145,8 +149,10 @@ impl RedisCache {
CacheError::PoolExhaustionWithConnectionDropError,
));
}
result = self.connection_pool[self.current_connection as usize]
.set_ex(key, json_results, 60)
result = pipeline
.query_async(
&mut self.connection_pool[self.current_connection as usize],
)
.await;
continue;
}

View File

@ -98,6 +98,7 @@ impl Config {
#[cfg(any(feature = "redis-cache", feature = "memory-cache"))]
let parsed_cet = globals.get::<_, u16>("cache_expiry_time")?;
#[cfg(any(feature = "redis-cache", feature = "memory-cache"))]
let cache_expiry_time = match parsed_cet {
0..=59 => {
log::error!(

View File

@ -86,6 +86,42 @@ pub trait SearchEngine: Sync + Send {
.change_context(EngineError::RequestError)?)
}
/// This helper function fetches/requests the json search results from the upstream search engine as a vector of bytes.
///
/// # Arguments
///
/// * `url` - It takes the url of the upstream search engine with the user requested search
/// query appended in the search parameters.
/// * `header_map` - It takes the http request headers to be sent to the upstream engine in
/// order to prevent being detected as a bot. It takes the header as a HeaderMap type.
/// * `request_timeout` - It takes the request timeout value as seconds which is used to limit
/// the amount of time for each request to remain connected when until the results can be provided
/// by the upstream engine.
///
/// # Error
///
/// It returns the html data as a vector of bytes if the upstream engine provides the data as expected
/// otherwise it returns a custom `EngineError`.
async fn fetch_json_as_bytes_from_upstream(
&self,
url: &str,
header_map: reqwest::header::HeaderMap,
client: &Client,
) -> Result<Vec<u8>, EngineError> {
// fetch the json response from upstream search engine
Ok(client
.get(url)
.headers(header_map) // add spoofed headers to emulate human behavior
.send()
.await
.change_context(EngineError::RequestError)?
.bytes()
.await
.change_context(EngineError::RequestError)?
.to_vec())
}
/// This function scrapes results from the upstream engine and puts all the scraped results like
/// title, visiting_url (href in html),engine (from which engine it was fetched from) and description
/// in a RawSearchResult and then adds that to HashMap whose keys are url and values are RawSearchResult

View File

@ -1,7 +1,11 @@
//! This module provides the models to parse cookies and search parameters from the search
//! engine website.
use std::borrow::Cow;
use serde::Deserialize;
use super::parser_models::Style;
/// A named struct which deserializes all the user provided search parameters and stores them.
#[derive(Deserialize)]
pub struct SearchParams {
@ -21,11 +25,24 @@ pub struct SearchParams {
#[derive(Deserialize)]
pub struct Cookie<'a> {
/// It stores the theme name used in the website.
pub theme: &'a str,
pub theme: Cow<'a, str>,
/// It stores the colorscheme name used for the website theme.
pub colorscheme: &'a str,
pub colorscheme: Cow<'a, str>,
/// It stores the user selected upstream search engines selected from the UI.
pub engines: Vec<&'a str>,
pub engines: Cow<'a, Vec<Cow<'a, str>>>,
/// It stores the user selected safe search level from the UI.
pub safe_search_level: u8,
}
impl<'a> Cookie<'a> {
/// server_models::Cookie contructor function
pub fn build(style: &'a Style, mut engines: Vec<Cow<'a, str>>, safe_search_level: u8) -> Self {
engines.sort();
Self {
theme: Cow::Borrowed(&style.theme),
colorscheme: Cow::Borrowed(&style.colorscheme),
engines: Cow::Owned(engines),
safe_search_level,
}
}
}

View File

@ -6,14 +6,15 @@ use crate::{
handler::{file_path, FileType},
models::{
aggregation_models::SearchResults,
engine_models::{EngineError, EngineHandler},
server_models::{Cookie, SearchParams},
engine_models::EngineHandler,
server_models::{self, SearchParams},
},
results::aggregator::aggregate,
};
use actix_web::{get, http::header::ContentType, web, HttpRequest, HttpResponse};
use regex::Regex;
use std::{
borrow::Cow,
fs::File,
io::{BufRead, BufReader, Read},
};
@ -39,6 +40,7 @@ pub async fn search(
config: web::Data<Config>,
cache: web::Data<SharedCache>,
) -> Result<HttpResponse, Box<dyn std::error::Error>> {
use std::sync::Arc;
let params = web::Query::<SearchParams>::from_query(req.query_string())?;
match &params.q {
Some(query) => {
@ -48,25 +50,80 @@ pub async fn search(
.finish());
}
let get_results = |page| {
results(
&config,
&cache,
query,
page,
req.clone(),
&params.safesearch,
let cookie = req.cookie("appCookie");
// Get search settings using the user's cookie or from the server's config
let mut search_settings: server_models::Cookie<'_> = cookie
.and_then(|cookie_value| serde_json::from_str(cookie_value.value()).ok())
.unwrap_or_else(|| {
server_models::Cookie::build(
&config.style,
config
.upstream_search_engines
.iter()
.filter_map(|(engine, enabled)| {
enabled.then_some(Cow::Borrowed(engine.as_str()))
})
.collect(),
config.safe_search,
)
};
});
search_settings.safe_search_level = get_safesearch_level(
&Some(search_settings.safe_search_level),
&params.safesearch,
config.safe_search,
);
// Closure wrapping the results function capturing local references
let get_results = |page| results(&config, &cache, query, page, &search_settings);
// .max(1) makes sure that the page >= 0.
let page = params.page.unwrap_or(1).max(1) - 1;
let previous_page = page.saturating_sub(1);
let next_page = page + 1;
let (_, results, _) = join!(
get_results(page.saturating_sub(1)),
let mut results = Arc::new((SearchResults::default(), String::default()));
if page != previous_page {
let (previous_results, current_results, next_results) = join!(
get_results(previous_page),
get_results(page),
get_results(page + 1)
get_results(next_page)
);
let (parsed_previous_results, parsed_next_results) =
(previous_results?, next_results?);
let (cache_keys, results_list) = (
[
parsed_previous_results.1,
results.1.clone(),
parsed_next_results.1,
],
[
parsed_previous_results.0,
results.0.clone(),
parsed_next_results.0,
],
);
results = Arc::new(current_results?);
tokio::spawn(async move { cache.cache_results(&results_list, &cache_keys).await });
} else {
let (current_results, next_results) =
join!(get_results(page), get_results(page + 1));
let parsed_next_results = next_results?;
results = Arc::new(current_results?);
let (cache_keys, results_list) = (
[results.1.clone(), parsed_next_results.1.clone()],
[results.0.clone(), parsed_next_results.0],
);
tokio::spawn(async move { cache.cache_results(&results_list, &cache_keys).await });
}
Ok(HttpResponse::Ok().content_type(ContentType::html()).body(
crate::templates::views::search::search(
@ -74,7 +131,7 @@ pub async fn search(
&config.style.theme,
&config.style.animation,
query,
&results?,
&results.0,
)
.0,
))
@ -105,25 +162,19 @@ async fn results(
cache: &web::Data<SharedCache>,
query: &str,
page: u32,
req: HttpRequest,
safe_search: &Option<u8>,
) -> Result<SearchResults, Box<dyn std::error::Error>> {
search_settings: &server_models::Cookie<'_>,
) -> Result<(SearchResults, String), Box<dyn std::error::Error>> {
// eagerly parse cookie value to evaluate safe search level
let cookie_value = req.cookie("appCookie");
let cookie_value: Option<Cookie<'_>> = cookie_value
.as_ref()
.and_then(|cv| serde_json::from_str(cv.name_value().1).ok());
let safe_search_level = get_safesearch_level(
safe_search,
&cookie_value.as_ref().map(|cv| cv.safe_search_level),
config.safe_search,
);
let safe_search_level = search_settings.safe_search_level;
let cache_key = format!(
"http://{}:{}/search?q={}&page={}&safesearch={}",
config.binding_ip, config.port, query, page, safe_search_level
"http://{}:{}/search?q={}&page={}&safesearch={}&engines={}",
config.binding_ip,
config.port,
query,
page,
safe_search_level,
search_settings.engines.join(",")
);
// fetch the cached results json.
@ -131,7 +182,7 @@ async fn results(
// check if fetched cache results was indeed fetched or it was an error and if so
// handle the data accordingly.
match cached_results {
Ok(results) => Ok(results),
Ok(results) => Ok((results, cache_key)),
Err(_) => {
if safe_search_level == 4 {
let mut results: SearchResults = SearchResults::default();
@ -141,9 +192,11 @@ async fn results(
// Return early when query contains disallowed words,
if flag {
results.set_disallowed();
cache.cache_results(&results, &cache_key).await?;
cache
.cache_results(&[results.clone()], &[cache_key.clone()])
.await?;
results.set_safe_search_level(safe_search_level);
return Ok(results);
return Ok((results, cache_key));
}
}
@ -151,22 +204,18 @@ async fn results(
// default selected upstream search engines from the config file otherwise
// parse the non-empty cookie and grab the user selected engines from the
// UI and use that.
let mut results: SearchResults = match cookie_value {
Some(cookie_value) => {
let engines: Vec<EngineHandler> = cookie_value
.engines
.iter()
.filter_map(|name| EngineHandler::new(name).ok())
.collect();
match engines.is_empty() {
let mut results: SearchResults = match search_settings.engines.is_empty() {
false => {
aggregate(
query,
page,
config.aggregator.random_delay,
config.debug,
&engines,
&search_settings
.engines
.iter()
.filter_map(|engine| EngineHandler::new(engine).ok())
.collect::<Vec<EngineHandler>>(),
config.request_timeout,
safe_search_level,
)
@ -177,25 +226,6 @@ async fn results(
search_results.set_no_engines_selected();
search_results
}
}
}
None => aggregate(
query,
page,
config.aggregator.random_delay,
config.debug,
&config
.upstream_search_engines
.clone()
.into_iter()
.filter_map(|(key, value)| value.then_some(key))
.map(|engine| EngineHandler::new(&engine))
.collect::<Result<Vec<EngineHandler>, error_stack::Report<EngineError>>>(
)?,
config.request_timeout,
safe_search_level,
)
.await?,
};
if results.engine_errors_info().is_empty()
&& results.results().is_empty()
@ -203,9 +233,11 @@ async fn results(
{
results.set_filtered();
}
cache.cache_results(&results, &cache_key).await?;
cache
.cache_results(&[results.clone()], &[cache_key.clone()])
.await?;
results.set_safe_search_level(safe_search_level);
Ok(results)
Ok((results, cache_key))
}
}
}
@ -237,23 +269,24 @@ fn is_match_from_filter_list(
Ok(false)
}
/// A helper function which returns the safe search level based on the url params
/// and cookie value.
/// A helper function to modify the safe search level based on the url params.
/// The `safe_search` is the one in the user's cookie or
/// the default set by the server config if the cookie was missing.
///
/// # Argurments
///
/// * `safe_search` - Safe search level from the url.
/// * `cookie` - User's cookie
/// * `default` - Safe search level to fall back to
fn get_safesearch_level(safe_search: &Option<u8>, cookie: &Option<u8>, default: u8) -> u8 {
match safe_search {
Some(ss) => {
if *ss >= 3 {
default
/// * `url_level` - Safe search level from the url.
/// * `safe_search` - User's cookie, or the safe search level set by the server
/// * `config_level` - Safe search level to fall back to
fn get_safesearch_level(cookie_level: &Option<u8>, url_level: &Option<u8>, config_level: u8) -> u8 {
match url_level {
Some(url_level) => {
if *url_level >= 3 {
config_level
} else {
*ss
*url_level
}
}
None => cookie.unwrap_or(default),
None => cookie_level.unwrap_or(config_level),
}
}