diff --git a/.cspell.json b/.cspell.json new file mode 100644 index 0000000..9d5ec5b --- /dev/null +++ b/.cspell.json @@ -0,0 +1,20 @@ +{ + "ignorePaths": [ + "**/node_modules/**", + "**/vscode-extension/**", + "**/.git/**", + "**/.pnpm-lock.json", + ".vscode", + "megalinter", + "package-lock.json", + "report" + ], + "language": "en", + "noConfigSearch": true, + "words": [ + "megalinter", + "oxsecurity", + "websurfx" + ], + "version": "0.2" +} diff --git a/.github/ISSUE_TEMPLATE/bug.yml b/.github/ISSUE_TEMPLATE/bug.yml new file mode 100644 index 0000000..e59bf93 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug.yml @@ -0,0 +1,40 @@ +name: 🐛 Bug +description: Report an issue to help improve the project. +title: "🐛 " +labels: ["🛠ī¸ goal: fix","đŸšĻ status: awaiting triage"] +body: + - type: textarea + id: description + attributes: + label: Description + description: A brief description of the question or issue, also include what you tried and what didn't work + validations: + required: true + - type: textarea + id: screenshots + attributes: + label: Screenshots + description: Please add screenshots if applicable + validations: + required: false + - type: dropdown + id: assignee + attributes: + label: Do you want to work on this issue? + multiple: false + options: + - "Yes" + - "No" + validations: + required: false + - type: textarea + id: extrainfo + attributes: + label: Additional information + description: Is there anything else we should know about this bug? + validations: + required: false + - type: markdown + attributes: + value: | + You can also join our Discord community [here](https://discord.gg/SWnda7Mw5u) diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md deleted file mode 100644 index 74f5722..0000000 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ /dev/null @@ -1,36 +0,0 @@ ---- -name: Bug report -about: Create a report to help us improve -title: "[BUG] " -labels: bug -assignees: '' - ---- - - - -**Version of Websurfx, commit number if you are using on master branch** - - -**How did you install Websurfx?** - - -**What happened?** - - -**Steps To Reproduce** - - -**Expected behavior** - - -**Screenshots** - - -**Additional context** - diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index 0086358..97ba6bf 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -1 +1,5 @@ -blank_issues_enabled: true +blank_issues_enabled: false +contact_links: + - name: Question? + url: https://discord.gg/SWnda7Mw5u + about: Feel free to ask your question by joining our Discord server. diff --git a/.github/ISSUE_TEMPLATE/docs.yml b/.github/ISSUE_TEMPLATE/docs.yml new file mode 100644 index 0000000..d8f1c83 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/docs.yml @@ -0,0 +1,40 @@ +name: 📝 Documentation issue +description: Found an issue in the documentation? You can use this one! +title: "📝 " +labels: ["📄 aspect: text","đŸšĻ status: awaiting triage"] +body: + - type: textarea + id: description + attributes: + label: Description + description: A brief description of the question or issue, also include what you tried and what didn't work + validations: + required: true + - type: textarea + id: screenshots + attributes: + label: Screenshots + description: Please add screenshots if applicable + validations: + required: false + - type: dropdown + id: assignee + attributes: + label: Do you want to work on this issue? + multiple: false + options: + - "Yes" + - "No" + validations: + required: false + - type: textarea + id: extrainfo + attributes: + label: Additional information + description: Is there anything else we should know about this issue? + validations: + required: false + - type: markdown + attributes: + value: | + You can also join our Discord community [here](https://discord.gg/SWnda7Mw5u) diff --git a/.github/ISSUE_TEMPLATE/engine-request.md b/.github/ISSUE_TEMPLATE/engine-request.md deleted file mode 100644 index faacb92..0000000 --- a/.github/ISSUE_TEMPLATE/engine-request.md +++ /dev/null @@ -1,27 +0,0 @@ ---- -name: Engine request -about: 'Suggest a new engine to be add ' -title: "[ENGINE] " -labels: engine -assignees: '' - ---- - - - -**Working URL of the engine** - - -**Why do you want to add this engine?** - - -**Features of this engine** - - -**Applicable category of this engine** - - -**Additional context** - diff --git a/.github/ISSUE_TEMPLATE/engine.yml b/.github/ISSUE_TEMPLATE/engine.yml new file mode 100644 index 0000000..ee55d5e --- /dev/null +++ b/.github/ISSUE_TEMPLATE/engine.yml @@ -0,0 +1,72 @@ +name: ✨ Engine +description: Have a new engine to suggest for Websurfx? Please suggest! +title: '✨ ' +labels: ['⭐ goal: addition', 'đŸšĻ status: awaiting triage'] +body: + - type: textarea + id: workingUrl + attributes: + label: Working URL of the engine + description: Please check if the engine is responding correctly before submitting it. + validations: + required: true + - type: textarea + id: reason + attributes: + label: Why do you want to add this engine? + description: What's special about this engine? Is it open source or libre? + validations: + required: true + - type: textarea + id: features + attributes: + label: Features of this engine + description: Features of this engine: Doesn't track its users, fast, easy to integrate, or anything else that we can know about. + validations: + required: true + - type: textarea + id: screenshots + attributes: + label: Screenshots + description: Please add screenshots if applicable + validations: + required: false + - type: dropdown + id: assignee + attributes: + label: Do you want to work on this issue? + multiple: true + options: + - 'General' + - 'Files' + - 'Images' + - 'IT' + - 'Map' + - 'Music' + - 'News' + - 'Science' + - 'Social Media' + - 'Videos' + validations: + required: true + - type: dropdown + id: assignee + attributes: + label: Do you want to work on this issue? + multiple: false + options: + - 'Yes' + - 'No' + validations: + required: false + - type: textarea + id: extrainfo + attributes: + label: Additional information + description: Is there anything else we should know about this idea? + validations: + required: false + - type: markdown + attributes: + value: | + You can also join our Discord community [here](https://discord.gg/SWnda7Mw5u) diff --git a/.github/ISSUE_TEMPLATE/feature.yml b/.github/ISSUE_TEMPLATE/feature.yml new file mode 100644 index 0000000..8302bbf --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature.yml @@ -0,0 +1,40 @@ +name: 💡 General Feature Request +description: Have a new idea/feature for Websurfx? Please suggest! +title: "✨ " +labels: ["⭐ goal: addition", "đŸšĻ status: awaiting triage"] +body: + - type: textarea + id: description + attributes: + label: Description + description: A brief description of the enhancement you propose, also include what you tried and what worked. + validations: + required: true + - type: textarea + id: screenshots + attributes: + label: Screenshots + description: Please add screenshots if applicable + validations: + required: false + - type: dropdown + id: assignee + attributes: + label: Do you want to work on this issue? + multiple: false + options: + - "Yes" + - "No" + validations: + required: false + - type: textarea + id: extrainfo + attributes: + label: Additional information + description: Is there anything else we should know about this idea? + validations: + required: false + - type: markdown + attributes: + value: | + You can also join our Discord community [here](https://discord.gg/SWnda7Mw5u) diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md deleted file mode 100644 index 06c1ee5..0000000 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ /dev/null @@ -1,20 +0,0 @@ ---- -name: Feature request -about: Suggest an idea for this project -title: "[FEATURE] " -labels: enhancement -assignees: '' - ---- - -**Is your feature request related to a problem? Please describe.** -A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] - -**Describe the solution you'd like** -A clear and concise description of what you want to happen. - -**Describe alternatives you've considered** -A clear and concise description of any alternative solutions or features you've considered. - -**Additional context** -Add any other context or screenshots about the feature request here. diff --git a/.github/ISSUE_TEMPLATE/other.yml b/.github/ISSUE_TEMPLATE/other.yml new file mode 100644 index 0000000..a3d626c --- /dev/null +++ b/.github/ISSUE_TEMPLATE/other.yml @@ -0,0 +1,36 @@ +name: 🧱 Other +description: Use this for any other issues. Please do NOT create blank issues +title: "🧱 " +labels: ["đŸšĻ status: awaiting triage"] +body: + - type: markdown + attributes: + value: "# Other issue" + - type: textarea + id: issuedescription + attributes: + label: What would you like to share? + description: Provide a clear and concise explanation of your issue. + validations: + required: true + - type: dropdown + id: assignee + attributes: + label: Do you want to work on this issue? + multiple: false + options: + - "Yes" + - "No" + validations: + required: false + - type: textarea + id: extrainfo + attributes: + label: Additional information + description: Is there anything else we should know about this issue? + validations: + required: false + - type: markdown + attributes: + value: | + You can also join our Discord community [here](https://discord.gg/SWnda7Mw5u) diff --git a/.github/label-actions.yml b/.github/label-actions.yml new file mode 100644 index 0000000..4729315 --- /dev/null +++ b/.github/label-actions.yml @@ -0,0 +1,11 @@ +"đŸšĻ status: awaiting triage": + issues: + comment: > + To reduce notifications, issues are locked until they are https://github.com/neon-mmd/websurfx/labels/%F0%9F%8F%81%20status%3A%20ready%20for%20dev and to be assigned. You can learn more in our contributing guide https://github.com/neon-mmd/websurfx/blob/rolling/CONTRIBUTING.md + lock: true + +"🏁 status: ready for dev": + issues: + comment: > + The issue has been unlocked and is now ready for dev. If you would like to work on this issue, you can comment to have it assigned to you. You can learn more in our contributing guide https://github.com/neon-mmd/websurfx/blob/rolling/CONTRIBUTING.md + unlock: true diff --git a/.github/labeler.yml b/.github/labeler.yml new file mode 100644 index 0000000..cb9d0fb --- /dev/null +++ b/.github/labeler.yml @@ -0,0 +1,27 @@ +'đŸ’ģ aspect: code': +- src/* +- Cargo.toml +- Cargo.lock +- Dockerfile +- docker-compose.yml +- websurfx/* + +'🤖 aspect: dx': +- '**/*.json' +- .dockerignore +- .gitignore +- .gitpod.Dockerfile +- .gitpod.yml +- .rusty-hook.toml +- PULL_REQUEST_TEMPLATE.md +- SECURITY.md +- .github/* +- .mega-linter.yml +- tests/* + +'📄 aspect: text': +- any: ['**/*.md', '!PULL_REQUEST_TEMPLATE.md', '!SECURITY.md'] +- LICENSE + +'🕹ī¸ aspect: interface': +- public/* diff --git a/.github/workflows/contributors.yml b/.github/workflows/contributors.yml new file mode 100644 index 0000000..d57a508 --- /dev/null +++ b/.github/workflows/contributors.yml @@ -0,0 +1,48 @@ +--- +name: Contributors List + +on: + workflow_dispatch: + + schedule: + - cron: "0 1 * * *" + +jobs: + contributors: + permissions: + contents: write + pull-requests: write + + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3 # v3.5.0 + with: + fetch-depth: 0 + ref: ${{ github.event.repository.default_branch }} + + - name: Update contributors list + uses: wow-actions/contributors-list@b9e91f91a51a55460fdcae64daad0cb8122cdd53 # v1.1.0 + with: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + svgPath: images/contributors_list.svg + round: true + includeBots: false + noCommit: true + + - name: Commit & PR + uses: peter-evans/create-pull-request@38e0b6e68b4c852a5500a94740f0e535e0d7ba54 # v4.2.4 + with: + token: ${{ secrets.GITHUB_TOKEN }} + add-paths: .github/assets/CONTRIBUTORS.svg + commit-message: 'chore: update contributors-list' + committer: GitHub + author: ${{ github.actor }} <${{ github.actor }}@users.noreply.github.com> + signoff: false + branch: workflow/update-contributors-list + base: main + delete-branch: true + title: 'chore: update contributors-list' + body: | + Automated update to `images/contributors_list.svg` diff --git a/.github/workflows/hello.yml b/.github/workflows/hello.yml index 9d3018e..c10105b 100644 --- a/.github/workflows/hello.yml +++ b/.github/workflows/hello.yml @@ -1,3 +1,4 @@ +--- name: Welcome first time contributors on: diff --git a/.github/workflows/issue-lock-unlock.yml b/.github/workflows/issue-lock-unlock.yml new file mode 100644 index 0000000..29fb3f5 --- /dev/null +++ b/.github/workflows/issue-lock-unlock.yml @@ -0,0 +1,16 @@ +name: "lock/unlock issue" + +on: + issues: + types: labeled + +permissions: + issues: write + +jobs: + action: + runs-on: ubuntu-latest + steps: + - uses: dessant/label-actions@v3 + with: + process-only: issues diff --git a/.github/workflows/labels.yml b/.github/workflows/labels.yml index c8adbf7..ab9f2fe 100644 --- a/.github/workflows/labels.yml +++ b/.github/workflows/labels.yml @@ -1,9 +1,10 @@ +--- name: Import open source standard labels on: push: branches: - - master + - rolling jobs: labels: diff --git a/.github/workflows/mega-linter.yml b/.github/workflows/mega-linter.yml new file mode 100644 index 0000000..c8f6cec --- /dev/null +++ b/.github/workflows/mega-linter.yml @@ -0,0 +1,89 @@ +--- +# MegaLinter GitHub Action configuration file +# More info at https://megalinter.io +name: MegaLinter + +on: + # Trigger mega-linter at every push. Action will also be visible from Pull Requests to rolling + push: # Comment this line to trigger action only on pull-requests (not recommended if you don't pay for GH Actions) + pull_request: + branches: [rolling] + +env: # Comment env block if you do not want to apply fixes + # Apply linter fixes configuration + APPLY_FIXES: all # When active, APPLY_FIXES must also be defined as environment variable (in github/workflows/mega-linter.yml or other CI tool) + APPLY_FIXES_EVENT: pull_request # Decide which event triggers application of fixes in a commit or a PR (pull_request, push, all) + APPLY_FIXES_MODE: commit # If APPLY_FIXES is used, defines if the fixes are directly committed (commit) or posted in a PR (pull_request) + +concurrency: + group: ${{ github.ref }}-${{ github.workflow }} + cancel-in-progress: true + +jobs: + build: + name: MegaLinter + runs-on: ubuntu-latest + permissions: + # Give the default GITHUB_TOKEN write permission to commit and push, comment issues & post new PR + # Remove the ones you do not need + contents: write + issues: write + pull-requests: write + steps: + # Git Checkout + - name: Checkout Code + uses: actions/checkout@v3 + with: + token: ${{ secrets.PAT || secrets.GITHUB_TOKEN }} + + # MegaLinter + - name: MegaLinter + id: ml + # You can override MegaLinter flavor used to have faster performances + # More info at https://megalinter.io/flavors/ + uses: oxsecurity/megalinter/flavors/cupcake@v7.1.0 + env: + # All available variables are described in documentation + # https://megalinter.io/configuration/ + VALIDATE_ALL_CODEBASE: true # Set ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }} to validate only diff with main branch + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + # ADD YOUR CUSTOM ENV VARIABLES HERE TO OVERRIDE VALUES OF .mega-linter.yml AT THE ROOT OF YOUR REPOSITORY + + # Upload MegaLinter artifacts + - name: Archive production artifacts + if: ${{ success() }} || ${{ failure() }} + uses: actions/upload-artifact@v3 + with: + name: MegaLinter reports + path: | + megalinter-reports + mega-linter.log + + # Create pull request if applicable (for now works only on PR from same repository, not from forks) + - name: Create Pull Request with applied fixes + id: cpr + if: steps.ml.outputs.has_updated_sources == 1 && (env.APPLY_FIXES_EVENT == 'all' || env.APPLY_FIXES_EVENT == github.event_name) && env.APPLY_FIXES_MODE == 'pull_request' && (github.event_name == 'push' || github.event.pull_request.head.repo.full_name == github.repository) && !contains(github.event.head_commit.message, 'skip fix') + uses: peter-evans/create-pull-request@v5 + with: + token: ${{ secrets.PAT || secrets.GITHUB_TOKEN }} + commit-message: "[MegaLinter] Apply linters automatic fixes" + title: "[MegaLinter] Apply linters automatic fixes" + labels: bot + - name: Create PR output + if: steps.ml.outputs.has_updated_sources == 1 && (env.APPLY_FIXES_EVENT == 'all' || env.APPLY_FIXES_EVENT == github.event_name) && env.APPLY_FIXES_MODE == 'pull_request' && (github.event_name == 'push' || github.event.pull_request.head.repo.full_name == github.repository) && !contains(github.event.head_commit.message, 'skip fix') + run: | + echo "Pull Request Number - ${{ steps.cpr.outputs.pull-request-number }}" + echo "Pull Request URL - ${{ steps.cpr.outputs.pull-request-url }}" + + # Push new commit if applicable (for now works only on PR from same repository, not from forks) + - name: Prepare commit + if: steps.ml.outputs.has_updated_sources == 1 && (env.APPLY_FIXES_EVENT == 'all' || env.APPLY_FIXES_EVENT == github.event_name) && env.APPLY_FIXES_MODE == 'commit' && github.ref != 'refs/heads/main' && (github.event_name == 'push' || github.event.pull_request.head.repo.full_name == github.repository) && !contains(github.event.head_commit.message, 'skip fix') + run: sudo chown -Rc $UID .git/ + - name: Commit and push applied linter fixes + if: steps.ml.outputs.has_updated_sources == 1 && (env.APPLY_FIXES_EVENT == 'all' || env.APPLY_FIXES_EVENT == github.event_name) && env.APPLY_FIXES_MODE == 'commit' && github.ref != 'refs/heads/main' && (github.event_name == 'push' || github.event.pull_request.head.repo.full_name == github.repository) && !contains(github.event.head_commit.message, 'skip fix') + uses: stefanzweifel/git-auto-commit-action@v4 + with: + branch: ${{ github.event.pull_request.head.ref || github.head_ref || github.ref }} + commit_message: "[MegaLinter] Apply linters fixes" + commit_user_name: megalinter-bot + commit_user_email: nicolas.vuillamy@ox.security diff --git a/.github/workflows/pr_labeler.yml b/.github/workflows/pr_labeler.yml new file mode 100644 index 0000000..bc7e72b --- /dev/null +++ b/.github/workflows/pr_labeler.yml @@ -0,0 +1,15 @@ +name: "Pull Request Auto Labeler" +on: +- pull_request_target + +jobs: + triage: + permissions: + contents: read + pull-requests: write + runs-on: ubuntu-latest + steps: + - uses: actions/labeler@v4 + with: + sync-labels: true + dot: true diff --git a/.github/workflows/releases.yml b/.github/workflows/releases.yml deleted file mode 100644 index 8e1e46b..0000000 --- a/.github/workflows/releases.yml +++ /dev/null @@ -1,78 +0,0 @@ -name: Releases -on: - push: - branches: - - "rolling" - -concurrency: - group: "rolling-branch" - -jobs: - changelog: - if: github.repository == 'neon-mmd/websurfx' - runs-on: ubuntu-latest - - steps: - # Create a temporary, uniquely named branch to push release info to - - name: create temporary branch - uses: peterjgrainger/action-create-branch@v2.3.0 - id: create-branch - with: - branch: "release-from-${{ github.sha }}" - sha: "${{ github.sha }}" - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - # check out the repository afterwards - - uses: actions/checkout@v3 - - # fetch branches and switch to the temporary branch - - name: switch to new branch - run: git fetch --all && git checkout --track origin/release-from-${{ github.sha }} - - # update app config with version - - name: Get current rust app version from its Cargo.toml. - id: foo - uses: dante-signal31/rust-app-version@v1.2.0 - with: - cargo_toml_folder: rust_app_folder/ - - - name: Use the version to update the Cargo.toml version. - shell: bash - run: sed -i "3s/version = \"[0-9]*.[0-9]*.[0-9]*\"/version = \"${{ steps.foo.outputs.app_version }}\"/g" Cargo.toml - - # create release info and push it upstream - - name: conventional Changelog Action - id: changelog - uses: TriPSs/conventional-changelog-action@v3 - with: - github-token: ${{ secrets.GITHUB_TOKEN }} - version-file: "./Cargo.toml" - git-branch: "release-from-${{ github.sha }}" - skip-on-empty: false - skip-git-pull: true - - # create PR using GitHub CLI - - name: create PR with release info - id: create-pr - run: gh pr create --base main --head release-from-${{ github.sha }} --title 'Merge new release into rolling' --body 'Created by Github action' - env: - GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - # merge PR using GitHub CLI - - name: merge PR with release info - id: merge-pr - run: gh pr merge --admin --merge --subject 'Merge release info' --delete-branch - env: - GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - # release info is now in main so we can continue as before - - name: create release with last commit - uses: actions/create-release@v1 - if: steps.changelog.outputs.skipped == 'false' - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - tag_name: ${{ steps.changelog.outputs.tag }} - release_name: ${{ steps.changelog.outputs.tag }} - body: ${{ steps.changelog.outputs.clean_changelog }} diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index bc62ede..5d538ea 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -1,12 +1,13 @@ +--- name: Rust on: push: branches: - - "**" + - '**' pull_request: branches: - - "rolling" + - 'rolling' env: CARGO_TERM_COLOR: always @@ -20,23 +21,27 @@ jobs: - stable steps: - - uses: actions/checkout@v3 - - run: rustup toolchain install stable --profile minimal - - uses: Swatinem/rust-cache@v2 - with: - prefix-key: "" - shared-key: "" - key: "" - env-vars: "" - workspaces: "" - cache-directories: "" - cache-targets: "" - cache-on-failure: "" - cache-all-crates: "" - save-if: "" - - uses: actions/checkout@v3 - - run: rustup update ${{ matrix.toolchain }} && rustup default ${{ matrix.toolchain }} - - name: Build - run: cargo build --verbose - - name: Run tests - run: cargo test --verbose + - name: Install LuaJIT and Lua + run: | + sudo apt-get update + sudo apt-get install -y --no-install-recommends liblua5.4-dev liblua5.3-dev liblua5.2-dev liblua5.1-0-dev libluajit-5.1-dev + - uses: actions/checkout@v3 + - run: rustup toolchain install stable --profile minimal + - uses: Swatinem/rust-cache@v2 + with: + prefix-key: '' + shared-key: '' + key: '' + env-vars: '' + workspaces: '' + cache-directories: '' + cache-targets: '' + cache-on-failure: '' + cache-all-crates: '' + save-if: '' + - uses: actions/checkout@v3 + - run: rustup update ${{ matrix.toolchain }} && rustup default ${{ matrix.toolchain }} + - name: Build + run: cargo build --verbose + - name: Run tests + run: cargo test --verbose diff --git a/.github/workflows/rust_format.yml b/.github/workflows/rust_format.yml index d865c8c..1c1e16e 100644 --- a/.github/workflows/rust_format.yml +++ b/.github/workflows/rust_format.yml @@ -1,3 +1,4 @@ +--- name: Rust format and clippy checks on: push: @@ -12,6 +13,10 @@ jobs: name: Rust project runs-on: ubuntu-latest steps: + - name: Install LuaJIT and Lua + run: | + sudo apt-get update + sudo apt-get install -y --no-install-recommends liblua5.4-dev liblua5.3-dev liblua5.2-dev liblua5.1-0-dev libluajit-5.1-dev - uses: actions/checkout@v2 - name: Install minimal stable with clippy and rustfmt uses: actions-rs/toolchain@v1 @@ -19,7 +24,16 @@ jobs: profile: minimal toolchain: stable components: rustfmt, clippy - + - name: Format + uses: actions-rs/cargo@v1 + with: + command: fmt + args: -- --check + - name: Clippy + uses: actions-rs/cargo@v1 + with: + command: clippy + args: --all-targets --all - name: Run cargo check uses: actions-rs/cargo@v1 with: diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index 1e1e6f0..5bae815 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -1,3 +1,4 @@ +--- # This workflow warns and then closes issues and PRs that have had no activity for a specified amount of time. # # You can adjust the behavior by modifying this file. diff --git a/.gitignore b/.gitignore index ea8c4bf..2ab2745 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,8 @@ +.vscode /target +dhat-heap.json +dump.rdb +megalinter-reports/ +package-lock.json +package.json +result \ No newline at end of file diff --git a/.gitpod.Dockerfile b/.gitpod.Dockerfile new file mode 100644 index 0000000..f64d765 --- /dev/null +++ b/.gitpod.Dockerfile @@ -0,0 +1,3 @@ +FROM gitpod/workspace-rust + +RUN sudo install-packages redis-server nodejs npm liblua5.4-dev liblua5.3-dev liblua5.2-dev liblua5.1-0-dev libluajit-5.1-dev diff --git a/.gitpod.yml b/.gitpod.yml new file mode 100644 index 0000000..b3c8ea0 --- /dev/null +++ b/.gitpod.yml @@ -0,0 +1,50 @@ +--- +image: + file: .gitpod.Dockerfile + +# Commands that will run on workspace start +tasks: + - name: Start Redis Server + command: redis-server --port 8082 + - name: Run The App + init: cargo build + command: PKG_ENV=dev ./target/debug/websurfx + - name: Tests + command: cargo test + - name: Clippy Checks + command: cargo clippy + +# vscode IDE setup +vscode: + extensions: + - vadimcn.vscode-lldb + - cschleiden.vscode-github-actions + - rust-lang.rust-analyzer + - bungcip.better-toml + - serayuzgur.crates + - usernamehw.errorlens + - DavidAnson.vscode-markdownlint + - esbenp.prettier-vscode + - stylelint.vscode-stylelint + - dbaeumer.vscode-eslint + - evgeniypeshkov.syntax-highlighter + - ms-azuretools.vscode-docker + - Catppuccin.catppuccin-vsc + - PKief.material-icon-theme + - oderwat.indent-rainbow + - formulahendry.auto-rename-tag + - swellaby.vscode-rust-test-adapter + - belfz.search-crates-io + - hbenl.test-adapter-converter + - hbenl.vscode-test-explorer + - eamodio.gitlens + +github: + prebuilds: + master: true + branches: true + pullRequests: true + pullRequestsFromForks: true + addCheck: true + addComment: false + addBadge: true diff --git a/.mega-linter.yml b/.mega-linter.yml new file mode 100644 index 0000000..b117b53 --- /dev/null +++ b/.mega-linter.yml @@ -0,0 +1,22 @@ +--- +# Configuration file for MegaLinter +# See all available variables at https://megalinter.io/configuration/ and in linters documentation + +APPLY_FIXES: all # all, none, or list of linter keys +# ENABLE: # If you use ENABLE variable, all other languages/formats/tooling-formats will be disabled by default +ENABLE_LINTERS: # If you use ENABLE_LINTERS variable, all other linters will be disabled by default + - RUST_CLIPPY + - JAVASCRIPT_ES + - CSS_STYLELINT + - MARKDOWN_MARKDOWNLINT + - YAML_YAMLLINT + - HTML_DJLINT + - ACTION_ACTIONLINT + - DOCKERFILE_HADOLINT + - SPELL_CSPELL +# DISABLE: + # - COPYPASTE # Uncomment to disable checks of excessive copy-pastes + # - SPELL # Uncomment to disable checks of spelling mistakes +SHOW_ELAPSED_TIME: true +FILEIO_REPORTER: false +# DISABLE_ERRORS: true # Uncomment if you want MegaLinter to detect errors but not block CI to pass diff --git a/.rusty-hook.toml b/.rusty-hook.toml new file mode 100644 index 0000000..105b318 --- /dev/null +++ b/.rusty-hook.toml @@ -0,0 +1,5 @@ +[hooks] +pre-commit = "cargo test && cargo fmt -- --check && cargo clippy && stylelint ./public/static/themes/*.css ./public/static/colorschemes/*.css ./public/static/*.js" + +[logging] +verbose = true diff --git a/.stylelintrc.json b/.stylelintrc.json new file mode 100644 index 0000000..9019f4f --- /dev/null +++ b/.stylelintrc.json @@ -0,0 +1,13 @@ +{ + "extends": "stylelint-config-standard", + "rules": { + "alpha-value-notation": "number", + "selector-class-pattern": null + }, +"overrides": [ + { + "files": ["*.js"], + "customSyntax": "postcss-lit" + } + ] +} diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index d630e2a..41ff84e 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -2,7 +2,7 @@ ## Documentation/Wiki -Found a typo, or something that isn't as clear as it could be? Maybe I've missed something off altogether, or you hit a roadblock that took you a while to figure out. Edit the [wiki](https://github.com/neon-mmd/websurfx/wiki) to add to or improve the documentation. This will help future users get Websurfx up and running more easily. +Found a typo, or something that isn't as clear as it could be? Maybe I've missed something off altogether, or you hit a roadblock that took you a while to figure out. Edit the [docs](./docs/) to add to or improve the documentation. This will help future users get Websurfx up and running more easily. ## Readme @@ -14,7 +14,7 @@ Know how to fix or improve a github action?. Consider Submitting a Pull request ## Source Code -You should know atleast one of the things below to start contributing: +You should know at least one of the things below to start contributing: - Rust basics - Actix-web crate basics @@ -48,23 +48,7 @@ We have a [Discord](https://discord.gg/SWnda7Mw5u) channel, feel free to join an # Where To Contribute? -## For Source Code Contributions - -The _rolling branch_ is where we intend all source code contributions should go. - -## For Readme Contributions - -The _master branch_ is where we intend all source code contributions should go. - -# How To Fork - -![image](./images/fork_button.png) - -![image](./images/fork_options_page.png) - -Please make sure to leave the `Copy the master branch only` option ticked off. - -![image](./images/create_fork_button.png) +The _rolling branch_ is where we intend all contributions should go. We appreciate any contributions whether be of any size or topic and suggestions to help improve the Websurfx project. Please keep in mind the above requirements and guidelines before submitting a pull request and also if you have any doubts/concerns/questions about the project, its source code or anything related to the project than feel free to ask by opening an [issue](https://github.com/neon-mmd/websurfx/issues) or by asking us on our [Discord](https://discord.gg/SWnda7Mw5u) channel. diff --git a/Cargo.lock b/Cargo.lock index 85310e9..c4ed1e3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4,19 +4,34 @@ version = 3 [[package]] name = "actix-codec" -version = "0.5.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57a7559404a7f3573127aab53c08ce37a6c6a315c374a31070f3c91cd1b4a7fe" +checksum = "617a8268e3537fe1d8c9ead925fca49ef6400927ee7bc26750e90ecee14ce4b8" dependencies = [ - "bitflags", - "bytes 1.4.0", + "bitflags 1.3.2", + "bytes 1.5.0", "futures-core", "futures-sink", - "log", "memchr", "pin-project-lite", - "tokio 1.28.0", + "tokio 1.32.0", "tokio-util", + "tracing", +] + +[[package]] +name = "actix-cors" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b340e9cfa5b08690aae90fb61beb44e9b06f44fe3d0f93781aaa58cfba86245e" +dependencies = [ + "actix-utils", + "actix-web", + "derive_more", + "futures-util", + "log", + "once_cell", + "smallvec 1.11.1", ] [[package]] @@ -30,52 +45,64 @@ dependencies = [ "actix-utils", "actix-web", "askama_escape", - "bitflags", - "bytes 1.4.0", + "bitflags 1.3.2", + "bytes 1.5.0", "derive_more", "futures-core", "http-range", "log", "mime", "mime_guess", - "percent-encoding 2.2.0", + "percent-encoding 2.3.0", "pin-project-lite", ] [[package]] -name = "actix-http" -version = "3.3.1" +name = "actix-governor" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2079246596c18b4a33e274ae10c0e50613f4d32a4198e09c7b93771013fed74" +checksum = "46ff2d40f2bc627b8054c5e20fa6b0b0cf9428699b54bd41634e9ae3098ad555" +dependencies = [ + "actix-http", + "actix-web", + "futures 0.3.28", + "governor", +] + +[[package]] +name = "actix-http" +version = "3.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a92ef85799cba03f76e4f7c10f533e66d87c9a7e7055f3391f09000ad8351bc9" dependencies = [ "actix-codec", "actix-rt", "actix-service", "actix-utils", - "ahash 0.8.3", - "base64 0.21.0", - "bitflags", + "ahash", + "base64 0.21.4", + "bitflags 2.4.0", "brotli", - "bytes 1.4.0", + "bytes 1.5.0", "bytestring", "derive_more", "encoding_rs", "flate2", "futures-core", - "h2 0.3.18", + "h2 0.3.21", "http 0.2.9", "httparse", "httpdate", - "itoa 1.0.6", + "itoa 1.0.9", "language-tags", "local-channel", "mime", - "percent-encoding 2.2.0", + "percent-encoding 2.3.0", "pin-project-lite", "rand 0.8.5", "sha1", - "smallvec 1.10.0", - "tokio 1.28.0", + "smallvec 1.11.1", + "tokio 1.32.0", "tokio-util", "tracing", "zstd", @@ -83,12 +110,12 @@ dependencies = [ [[package]] name = "actix-macros" -version = "0.2.3" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "465a6172cf69b960917811022d8f29bc0b7fa1398bc4f78b3c466673db1213b6" +checksum = "e01ed3140b2f8d422c68afa1ed2e85d996ea619c988ac834d255db32138655cb" dependencies = [ - "quote 1.0.27", - "syn 1.0.109", + "quote 1.0.33", + "syn 2.0.37", ] [[package]] @@ -106,29 +133,28 @@ dependencies = [ [[package]] name = "actix-rt" -version = "2.8.0" +version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15265b6b8e2347670eb363c47fc8c75208b4a4994b27192f345fcbe707804f3e" +checksum = "28f32d40287d3f402ae0028a9d54bef51af15c8769492826a69d28f81893151d" dependencies = [ "futures-core", - "tokio 1.28.0", + "tokio 1.32.0", ] [[package]] name = "actix-server" -version = "2.2.0" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e8613a75dd50cc45f473cee3c34d59ed677c0f7b44480ce3b8247d7dc519327" +checksum = "3eb13e7eef0423ea6eab0e59f6c72e7cb46d33691ad56a726b3cd07ddec2c2d4" dependencies = [ "actix-rt", "actix-service", "actix-utils", "futures-core", "futures-util", - "mio 0.8.6", - "num_cpus", - "socket2", - "tokio 1.28.0", + "mio 0.8.8", + "socket2 0.5.4", + "tokio 1.32.0", "tracing", ] @@ -155,9 +181,9 @@ dependencies = [ [[package]] name = "actix-web" -version = "4.3.1" +version = "4.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd3cb42f9566ab176e1ef0b8b3a896529062b4efc6be0123046095914c4c1c96" +checksum = "0e4a5b5e29603ca8c94a77c65cf874718ceb60292c5a5c3e5f4ace041af462b9" dependencies = [ "actix-codec", "actix-http", @@ -168,8 +194,8 @@ dependencies = [ "actix-service", "actix-utils", "actix-web-codegen", - "ahash 0.7.6", - "bytes 1.4.0", + "ahash", + "bytes 1.5.0", "bytestring", "cfg-if 1.0.0", "cookie 0.16.2", @@ -177,8 +203,7 @@ dependencies = [ "encoding_rs", "futures-core", "futures-util", - "http 0.2.9", - "itoa 1.0.6", + "itoa 1.0.9", "language-tags", "log", "mime", @@ -188,29 +213,29 @@ dependencies = [ "serde", "serde_json", "serde_urlencoded 0.7.1", - "smallvec 1.10.0", - "socket2", - "time 0.3.21", - "url 2.3.1", + "smallvec 1.11.1", + "socket2 0.5.4", + "time 0.3.29", + "url 2.4.1", ] [[package]] name = "actix-web-codegen" -version = "4.2.0" +version = "4.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2262160a7ae29e3415554a3f1fc04c764b1540c116aa524683208078b7a75bc9" +checksum = "eb1f50ebbb30eca122b188319a4398b3f7bb4a8cdf50ecfb73bfc6a3c3ce54f5" dependencies = [ "actix-router", - "proc-macro2 1.0.56", - "quote 1.0.27", - "syn 1.0.109", + "proc-macro2 1.0.67", + "quote 1.0.33", + "syn 2.0.37", ] [[package]] name = "addr2line" -version = "0.19.0" +version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a76fd60b23679b7d19bd066031410fb7e458ccc5e958eb5c325888ce4baedc97" +checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb" dependencies = [ "gimli", ] @@ -221,17 +246,6 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" -[[package]] -name = "ahash" -version = "0.7.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47" -dependencies = [ - "getrandom 0.2.9", - "once_cell", - "version_check", -] - [[package]] name = "ahash" version = "0.8.3" @@ -239,16 +253,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f" dependencies = [ "cfg-if 1.0.0", - "getrandom 0.2.9", + "getrandom", "once_cell", "version_check", ] [[package]] name = "aho-corasick" -version = "1.0.1" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67fc08ce920c31afb70f013dcce1bfc3a3195de6a228474e45e1f145b36f8d04" +checksum = "ea5d730647d4fadd988536d06fecce94b7b4f2a7efdae548f1cf4b63205518ab" dependencies = [ "memchr", ] @@ -268,12 +282,53 @@ dependencies = [ "alloc-no-stdlib", ] +[[package]] +name = "anes" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" + +[[package]] +name = "anstyle" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7079075b41f533b8c61d2a4d073c4676e1f8b249ff94a393b0595db304e0dd87" + +[[package]] +name = "anyhow" +version = "1.0.75" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4668cab20f66d8d020e1fbc0ebe47217433c1b6c8f2040faf858554e394ace6" + +[[package]] +name = "arc-swap" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bddcadddf5e9015d310179a59bb28c4d4b9920ad0f11e8e14dbadf654890c9a6" + [[package]] name = "askama_escape" version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "619743e34b5ba4e9703bba34deac3427c72507c7159f5fd030aea8cac0cfe341" +[[package]] +name = "async-once-cell" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9338790e78aa95a416786ec8389546c4b6a1dfc3dc36071ed9518a9413a542eb" + +[[package]] +name = "async-trait" +version = "0.1.73" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc00ceb34980c03614e35a3a4e218276a0a824e911d07651cd0d858a51e8c0f0" +dependencies = [ + "proc-macro2 1.0.67", + "quote 1.0.33", + "syn 2.0.37", +] + [[package]] name = "autocfg" version = "0.1.8" @@ -291,15 +346,15 @@ checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" [[package]] name = "backtrace" -version = "0.3.67" +version = "0.3.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "233d376d6d185f2a3093e58f283f60f880315b6c60075b01f36b3b85154564ca" +checksum = "2089b7e3f35b9dd2d0ed921ead4f6d318c27680d4a5bd167b3ee120edb105837" dependencies = [ "addr2line", "cc", "cfg-if 1.0.0", "libc", - "miniz_oxide 0.6.2", + "miniz_oxide", "object", "rustc-demangle", ] @@ -315,9 +370,9 @@ dependencies = [ [[package]] name = "base64" -version = "0.21.0" +version = "0.21.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4a4ddaa51a5bc52a6948f74c06d20aaaddb71924eab79b8c97a8c556e942d6a" +checksum = "9ba43ea6f343b788c8764558649e08df62f86c6ef251fdaeb1ffd010a9ae50a2" [[package]] name = "bit-set" @@ -340,6 +395,12 @@ version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" +[[package]] +name = "bitflags" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4682ae6287fcf752ecaabbfcc7b6f9b72aa33933dc23a554d853aea8eea8635" + [[package]] name = "block-buffer" version = "0.10.4" @@ -351,9 +412,9 @@ dependencies = [ [[package]] name = "brotli" -version = "3.3.4" +version = "3.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1a0b1dbcc8ae29329621f8d4f0d835787c1c38bb1401979b49d13b0b305ff68" +checksum = "516074a47ef4bce09577a3b379392300159ce5b1ba2e501ff1c819950066100f" dependencies = [ "alloc-no-stdlib", "alloc-stdlib", @@ -362,9 +423,9 @@ dependencies = [ [[package]] name = "brotli-decompressor" -version = "2.3.4" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b6561fd3f895a11e8f72af2cb7d22e08366bebc2b6b57f7744c4bda27034744" +checksum = "da74e2b81409b1b743f8f0c62cc6254afefb8b8e50bbfe3735550f7aeefa3448" dependencies = [ "alloc-no-stdlib", "alloc-stdlib", @@ -381,9 +442,15 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.12.1" +version = "3.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b1ce199063694f33ffb7dd4e0ee620741495c32833cde5aa08f02a0bf96f0c8" +checksum = "7f30e7476521f6f8af1a1c4c0b8cc94f0bee37d91763d0ca2665f299b6cd8aec" + +[[package]] +name = "bytecount" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c676a478f63e9fa2dd5368a42f28bba0d6c560b775f38583c8bbaa7fcd67c9c" [[package]] name = "byteorder" @@ -404,9 +471,9 @@ dependencies = [ [[package]] name = "bytes" -version = "1.4.0" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be" +checksum = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223" [[package]] name = "bytestring" @@ -414,16 +481,54 @@ version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "238e4886760d98c4f899360c834fa93e62cf7f721ac3c2da375cbdf4b8679aae" dependencies = [ - "bytes 1.4.0", + "bytes 1.5.0", ] [[package]] -name = "cc" -version = "1.0.79" +name = "camino" +version = "1.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f" +checksum = "c59e92b5a388f549b863a7bea62612c09f24c8393560709a54558a9abdfb3b9c" +dependencies = [ + "serde", +] + +[[package]] +name = "cargo-platform" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2cfa25e60aea747ec7e1124f238816749faa93759c6ff5b31f1ccdda137f4479" +dependencies = [ + "serde", +] + +[[package]] +name = "cargo_metadata" +version = "0.14.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4acbb09d9ee8e23699b9634375c72795d095bf268439da88562cf9b501f181fa" +dependencies = [ + "camino", + "cargo-platform", + "semver 1.0.19", + "serde", + "serde_json", +] + +[[package]] +name = "cast" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" + +[[package]] +name = "cc" +version = "1.0.83" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1174fb0b6ec23863f8b971027804a42614e347eafb0a95bf0b12cdae21fc4d0" dependencies = [ "jobserver", + "libc", ] [[package]] @@ -438,13 +543,74 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" +[[package]] +name = "ci_info" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24f638c70e8c5753795cc9a8c07c44da91554a09e4cf11a7326e8161b0a3c45e" +dependencies = [ + "envmnt", +] + +[[package]] +name = "ciborium" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "effd91f6c78e5a4ace8a5d3c0b6bfaec9e2baaef55f3efc00e45fb2e477ee926" +dependencies = [ + "ciborium-io", + "ciborium-ll", + "serde", +] + +[[package]] +name = "ciborium-io" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdf919175532b369853f5d5e20b26b43112613fd6fe7aee757e35f7a44642656" + +[[package]] +name = "ciborium-ll" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "defaa24ecc093c77630e6c15e17c51f5e187bf35ee514f4e2d67baaa96dae22b" +dependencies = [ + "ciborium-io", + "half", +] + +[[package]] +name = "clap" +version = "4.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d04704f56c2cde07f43e8e2c154b43f216dc5c92fc98ada720177362f953b956" +dependencies = [ + "clap_builder", +] + +[[package]] +name = "clap_builder" +version = "4.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e231faeaca65ebd1ea3c737966bf858971cd38c3849107aa3ea7de90a804e45" +dependencies = [ + "anstyle", + "clap_lex", +] + +[[package]] +name = "clap_lex" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd7cc57abe963c6d3b9d8be5b06ba7c8957a930305ca90304f24ef040aa6f961" + [[package]] name = "cloudabi" version = "0.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f" dependencies = [ - "bitflags", + "bitflags 1.3.2", ] [[package]] @@ -453,8 +619,12 @@ version = "4.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "35ed6e9d84f0b51a7f52daf1c7d71dd136fd7a3f41a8462b8cdb8c78d920fad4" dependencies = [ - "bytes 1.4.0", + "bytes 1.5.0", + "futures-core", "memchr", + "pin-project-lite", + "tokio 1.32.0", + "tokio-util", ] [[package]] @@ -469,7 +639,7 @@ version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "888604f00b3db336d2af898ec3c1d5d0ddf5e6d462220f2ededc33a87ac4bbd5" dependencies = [ - "time 0.1.45", + "time 0.1.43", "url 1.7.2", ] @@ -479,8 +649,8 @@ version = "0.16.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e859cd57d0710d9e06c381b550c06e76992472a8c6d527aecd2fc673dcc231fb" dependencies = [ - "percent-encoding 2.2.0", - "time 0.3.21", + "percent-encoding 2.3.0", + "time 0.3.29", "version_check", ] @@ -497,7 +667,7 @@ dependencies = [ "publicsuffix", "serde", "serde_json", - "time 0.1.45", + "time 0.1.43", "try_from", "url 1.7.2", ] @@ -520,9 +690,9 @@ checksum = "e496a50fda8aacccc86d7529e2c1e0892dbd0f898a6b5645b5561b89c3210efa" [[package]] name = "cpufeatures" -version = "0.2.7" +version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e4c1eaa2012c47becbbad2ab175484c2a84d1185b566fb2cc5b8707343dfe58" +checksum = "a17b76ff3a4162b0b27f354a0c87015ddad39d35f9c0c36607a3bdd175dde1f1" dependencies = [ "libc", ] @@ -536,17 +706,74 @@ dependencies = [ "cfg-if 1.0.0", ] +[[package]] +name = "criterion" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2b12d017a929603d80db1831cd3a24082f8137ce19c69e6447f54f5fc8d692f" +dependencies = [ + "anes", + "cast", + "ciborium", + "clap", + "criterion-plot", + "is-terminal", + "itertools", + "num-traits", + "once_cell", + "oorandom", + "plotters", + "rayon", + "regex", + "serde", + "serde_derive", + "serde_json", + "tinytemplate", + "walkdir", +] + +[[package]] +name = "criterion-plot" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1" +dependencies = [ + "cast", + "itertools", +] + +[[package]] +name = "crossbeam-channel" +version = "0.5.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a33c2bf77f2df06183c3aa30d1e96c0695a313d4f9c453cc3762a6db39f99200" +dependencies = [ + "cfg-if 1.0.0", + "crossbeam-utils 0.8.16", +] + [[package]] name = "crossbeam-deque" version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c20ff29ded3204c5106278a81a38f4b482636ed4fa1e6cfbeef193291beb29ed" dependencies = [ - "crossbeam-epoch", - "crossbeam-utils", + "crossbeam-epoch 0.8.2", + "crossbeam-utils 0.7.2", "maybe-uninit", ] +[[package]] +name = "crossbeam-deque" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce6fd6f855243022dcecf8702fef0c297d4338e226845fe067f6341ad9fa0cef" +dependencies = [ + "cfg-if 1.0.0", + "crossbeam-epoch 0.9.15", + "crossbeam-utils 0.8.16", +] + [[package]] name = "crossbeam-epoch" version = "0.8.2" @@ -555,10 +782,23 @@ checksum = "058ed274caafc1f60c4997b5fc07bf7dc7cca454af7c6e81edffe5f33f70dace" dependencies = [ "autocfg 1.1.0", "cfg-if 0.1.10", - "crossbeam-utils", + "crossbeam-utils 0.7.2", "lazy_static", "maybe-uninit", - "memoffset", + "memoffset 0.5.6", + "scopeguard", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.9.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae211234986c545741a7dc064309f67ee1e5ad243d0e48335adc0484d960bcc7" +dependencies = [ + "autocfg 1.1.0", + "cfg-if 1.0.0", + "crossbeam-utils 0.8.16", + "memoffset 0.9.0", "scopeguard", ] @@ -569,7 +809,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "774ba60a54c213d409d5353bda12d49cd68d14e45036a285234c8d6f91f92570" dependencies = [ "cfg-if 0.1.10", - "crossbeam-utils", + "crossbeam-utils 0.7.2", "maybe-uninit", ] @@ -584,6 +824,15 @@ dependencies = [ "lazy_static", ] +[[package]] +name = "crossbeam-utils" +version = "0.8.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a22b2d63d4d1dc0b7f1b6b2747dd0088008a9be28b6ddf0b1e7d335e3037294" +dependencies = [ + "cfg-if 1.0.0", +] + [[package]] name = "crypto-common" version = "0.1.6" @@ -596,31 +845,46 @@ dependencies = [ [[package]] name = "cssparser" -version = "0.29.6" +version = "0.31.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f93d03419cb5950ccfd3daf3ff1c7a36ace64609a1a8746d493df1ca0afde0fa" +checksum = "5b3df4f93e5fbbe73ec01ec8d3f68bba73107993a5b1e7519273c32db9b0d5be" dependencies = [ "cssparser-macros", "dtoa-short", - "itoa 1.0.6", - "matches", - "phf 0.10.1", - "proc-macro2 1.0.56", - "quote 1.0.27", - "smallvec 1.10.0", - "syn 1.0.109", + "itoa 1.0.9", + "phf 0.11.2", + "smallvec 1.11.1", ] [[package]] name = "cssparser-macros" -version = "0.6.0" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfae75de57f2b2e85e8768c3ea840fd159c8f33e2b6522c7835b7abac81be16e" +checksum = "13b588ba4ac1a99f7f2964d24b3d896ddc6bf847ee3855dbd4366f058cfcd331" dependencies = [ - "quote 1.0.27", - "syn 1.0.109", + "quote 1.0.33", + "syn 2.0.37", ] +[[package]] +name = "dashmap" +version = "5.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856" +dependencies = [ + "cfg-if 1.0.0", + "hashbrown 0.14.1", + "lock_api 0.4.10", + "once_cell", + "parking_lot_core 0.9.8", +] + +[[package]] +name = "deranged" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2696e8a945f658fd14dc3b87242e6b80cd0f36ff04ea560fa39082368847946" + [[package]] name = "derive_more" version = "0.99.17" @@ -628,17 +892,33 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" dependencies = [ "convert_case", - "proc-macro2 1.0.56", - "quote 1.0.27", + "proc-macro2 1.0.67", + "quote 1.0.33", "rustc_version 0.4.0", "syn 1.0.109", ] [[package]] -name = "digest" -version = "0.10.6" +name = "dhat" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8168378f4e5023e7218c89c891c0fd8ecdb5e5e4f18cb78f38cf245dd021e76f" +checksum = "4f2aaf837aaf456f6706cb46386ba8dffd4013a757e36f4ea05c20dd46b209a3" +dependencies = [ + "backtrace", + "lazy_static", + "mintex", + "parking_lot 0.12.1", + "rustc-hash", + "serde", + "serde_json", + "thousands", +] + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" dependencies = [ "block-buffer", "crypto-common", @@ -651,12 +931,18 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "56899898ce76aaf4a0f24d914c97ea6ed976d42fec6ad33fcbb0a1103e07b2b0" [[package]] -name = "dtoa-short" -version = "0.3.3" +name = "dtoa" +version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bde03329ae10e79ede66c9ce4dc930aa8599043b0743008548680f25b91502d6" +checksum = "dcbb2bf8e87535c23f7a8a321e364ce21462d0ff10cb6407820e8e96dfff6653" + +[[package]] +name = "dtoa-short" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dbaceec3c6e4211c79e7b1800fb9680527106beb2f9c51904a3210c03a448c74" dependencies = [ - "dtoa", + "dtoa 1.0.9", ] [[package]] @@ -667,15 +953,15 @@ checksum = "3a68a4904193147e0a8dec3314640e6db742afd5f6e634f428a6af230d9b3591" [[package]] name = "either" -version = "1.8.1" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91" +checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" [[package]] name = "encoding_rs" -version = "0.8.32" +version = "0.8.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "071a31f4ee85403370b58aca746f01041ede6f0da2730960ad001edc2b71b394" +checksum = "7268b386296a025e474d5140678f75d6de9493ae55a5d709eeb9dd08149945e1" dependencies = [ "cfg-if 1.0.0", ] @@ -694,14 +980,24 @@ dependencies = [ ] [[package]] -name = "errno" -version = "0.3.1" +name = "envmnt" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4bcfec3a70f97c962c307b2d2c56e358cf1d00b558d74262b5f929ee8cc7e73a" +checksum = "a2d328fc287c61314c4a61af7cfdcbd7e678e39778488c7cb13ec133ce0f4059" +dependencies = [ + "fsio", + "indexmap", +] + +[[package]] +name = "errno" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "136526188508e25c6fef639d7927dfb3e0e3084488bf202267829cf7fc23dbdd" dependencies = [ "errno-dragonfly", "libc", - "windows-sys 0.48.0", + "windows-sys", ] [[package]] @@ -714,6 +1010,25 @@ dependencies = [ "libc", ] +[[package]] +name = "error-chain" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d2f06b9cac1506ece98fe3231e3cc9c4410ec3d5b1f24ae1c8946f0742cdefc" +dependencies = [ + "version_check", +] + +[[package]] +name = "error-stack" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "27a72baa257b5e0e2de241967bc5ee8f855d6072351042688621081d66b2a76b" +dependencies = [ + "anyhow", + "rustc_version 0.4.0", +] + [[package]] name = "failure" version = "0.1.8" @@ -730,8 +1045,8 @@ version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aa4da3c766cd7a0db8242e326e9e4e081edd567072893ed320008189715366a4" dependencies = [ - "proc-macro2 1.0.56", - "quote 1.0.27", + "proc-macro2 1.0.67", + "quote 1.0.33", "syn 1.0.109", "synstructure", ] @@ -749,21 +1064,18 @@ dependencies = [ [[package]] name = "fastrand" -version = "1.9.0" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e51093e27b0797c359783294ca4f0a911c270184cb10f85783b118614a1501be" -dependencies = [ - "instant", -] +checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5" [[package]] name = "flate2" -version = "1.0.26" +version = "1.0.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b9429470923de8e8cbd4d2dc513535400b4b3fef0319fb5c4e1f520a7bef743" +checksum = "c6c98ee8095e9d1dcbf2fcc6d95acccb90d1c81db1e44725c6a984b1dbdfb010" dependencies = [ "crc32fast", - "miniz_oxide 0.7.1", + "miniz_oxide", ] [[package]] @@ -789,13 +1101,19 @@ checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" [[package]] name = "form_urlencoded" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9c384f161156f5260c24a097c56119f9be8c798586aecc13afbcbe7b7e26bf8" +checksum = "a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652" dependencies = [ - "percent-encoding 2.2.0", + "percent-encoding 2.3.0", ] +[[package]] +name = "fsio" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1fd087255f739f4f1aeea69f11b72f8080e9c2e7645cd06955dad4a178a49e3" + [[package]] name = "fuchsia-cprng" version = "0.1.1" @@ -808,7 +1126,7 @@ version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2e9763c69ebaae630ba35f74888db465e49e259ba1bc0eda7d06f4a067615d82" dependencies = [ - "bitflags", + "bitflags 1.3.2", "fuchsia-zircon-sys", ] @@ -834,6 +1152,21 @@ version = "0.1.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3a471a38ef8ed83cd6e40aa59c1ffe17db6855c18e3604d9c4ed8c08ebc28678" +[[package]] +name = "futures" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23342abe12aba583913b2e62f22225ff9c950774065e4bfb61a19cd9770fec40" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + [[package]] name = "futures-channel" version = "0.3.28" @@ -841,6 +1174,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "955518d47e09b25bbebc7a18df10b81f0c766eaf4c4f1cccef2fca5f2a4fb5f2" dependencies = [ "futures-core", + "futures-sink", ] [[package]] @@ -855,10 +1189,38 @@ version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ab90cde24b3319636588d0c35fe03b1333857621051837ed769faefb4c2162e4" dependencies = [ - "futures", + "futures 0.1.31", "num_cpus", ] +[[package]] +name = "futures-executor" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccecee823288125bd88b4d7f565c9e58e41858e47ab72e8ea2d64e93624386e0" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fff74096e71ed47f8e023204cfd0aa1289cd54ae5430a9523be060cdb849964" + +[[package]] +name = "futures-macro" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" +dependencies = [ + "proc-macro2 1.0.67", + "quote 1.0.33", + "syn 2.0.37", +] + [[package]] name = "futures-sink" version = "0.3.28" @@ -871,16 +1233,28 @@ version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "76d3d132be6c0e6aa1534069c705a74a5997a356c0dc2f86a47765e5617c5b65" +[[package]] +name = "futures-timer" +version = "3.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e64b03909df88034c26dc1547e8970b91f98bdb65165d6a4e9110d94263dbb2c" + [[package]] name = "futures-util" version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26b01e40b772d54cf6c6d721c1d1abd0647a0106a12ecaa1c186273392a69533" dependencies = [ + "futures-channel", "futures-core", + "futures-io", + "futures-macro", + "futures-sink", "futures-task", + "memchr", "pin-project-lite", "pin-utils", + "slab", ] [[package]] @@ -913,20 +1287,9 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.1.16" +version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" -dependencies = [ - "cfg-if 1.0.0", - "libc", - "wasi 0.9.0+wasi-snapshot-preview1", -] - -[[package]] -name = "getrandom" -version = "0.2.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c85e1d9ab2eadba7e5040d4e09cbd6d072b76a557ad64e797c2cb9d4da21d7e4" +checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427" dependencies = [ "cfg-if 1.0.0", "libc", @@ -935,9 +1298,33 @@ dependencies = [ [[package]] name = "gimli" -version = "0.27.2" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad0a93d233ebf96623465aad4046a8d3aa4da22d4f4beba5388838c8a434bbb4" +checksum = "6fb8d784f27acf97159b40fc4db5ecd8aa23b9ad5ef69cdd136d3bc80665f0c0" + +[[package]] +name = "glob" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" + +[[package]] +name = "governor" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c390a940a5d157878dd057c78680a33ce3415bcd05b4799509ea44210914b4d5" +dependencies = [ + "cfg-if 1.0.0", + "dashmap", + "futures 0.3.28", + "futures-timer", + "no-std-compat", + "nonzero_ext", + "parking_lot 0.12.1", + "quanta", + "rand 0.8.5", + "smallvec 1.11.1", +] [[package]] name = "h2" @@ -948,7 +1335,7 @@ dependencies = [ "byteorder", "bytes 0.4.12", "fnv", - "futures", + "futures 0.1.31", "http 0.1.21", "indexmap", "log", @@ -959,11 +1346,11 @@ dependencies = [ [[package]] name = "h2" -version = "0.3.18" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17f8a914c2987b688368b5138aa05321db91f4090cf26118185672ad588bce21" +checksum = "91fc23aa11be92976ef4729127f1a74adf36d8436f7816b185d18df956790833" dependencies = [ - "bytes 1.4.0", + "bytes 1.5.0", "fnv", "futures-core", "futures-sink", @@ -971,16 +1358,22 @@ dependencies = [ "http 0.2.9", "indexmap", "slab", - "tokio 1.28.0", + "tokio 1.32.0", "tokio-util", "tracing", ] [[package]] -name = "handlebars" -version = "4.3.6" +name = "half" +version = "1.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "035ef95d03713f2c347a72547b7cd38cbc9af7cd51e6099fb62d586d4a6dee3a" +checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7" + +[[package]] +name = "handlebars" +version = "4.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c39b3bc2a8f715298032cf5087e58573809374b08160aa7d750582bdb82d2683" dependencies = [ "log", "pest", @@ -998,19 +1391,25 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" [[package]] -name = "hermit-abi" -version = "0.2.6" +name = "hashbrown" +version = "0.14.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee512640fe35acbfb4bb779db6f0d80704c2cacfa2e39b601ef3e3f47d1ae4c7" -dependencies = [ - "libc", -] +checksum = "7dfda62a12f55daeae5015f81b0baea145391cb4520f86c248fc615d72640d12" [[package]] name = "hermit-abi" -version = "0.3.1" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fed44880c466736ef9a5c5b5facefb5ed0785676d0c02d612db14e54f0d84286" +checksum = "d77f7ec81a6d05a3abb01ab6eb7590f6083d08449fe5a1c8b1e620283546ccb7" + +[[package]] +name = "home" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5444c27eef6923071f7ebcc33e3444508466a76f7a2b93da00ed6e19f30c1ddb" +dependencies = [ + "windows-sys", +] [[package]] name = "html5ever" @@ -1035,8 +1434,8 @@ dependencies = [ "log", "mac", "markup5ever 0.11.0", - "proc-macro2 1.0.56", - "quote 1.0.27", + "proc-macro2 1.0.67", + "quote 1.0.33", "syn 1.0.109", ] @@ -1057,9 +1456,9 @@ version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bd6effc99afb63425aff9b05836f029929e345a6148a14b7ecd5ab67af944482" dependencies = [ - "bytes 1.4.0", + "bytes 1.5.0", "fnv", - "itoa 1.0.6", + "itoa 1.0.9", ] [[package]] @@ -1069,7 +1468,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6741c859c1b2463a423a1dbce98d418e6c3c3fc720fb0d45528657320920292d" dependencies = [ "bytes 0.4.12", - "futures", + "futures 0.1.31", "http 0.1.21", "tokio-buf", ] @@ -1080,7 +1479,7 @@ version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1" dependencies = [ - "bytes 1.4.0", + "bytes 1.5.0", "http 0.2.9", "pin-project-lite", ] @@ -1099,9 +1498,9 @@ checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" [[package]] name = "httpdate" -version = "1.0.2" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" [[package]] name = "humantime" @@ -1116,7 +1515,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c843caf6296fc1f93444735205af9ed4e109a539005abb2564ae1d6fad34c52" dependencies = [ "bytes 0.4.12", - "futures", + "futures 0.1.31", "futures-cpupool", "h2 0.1.26", "http 0.1.21", @@ -1127,7 +1526,7 @@ dependencies = [ "log", "net2", "rustc_version 0.2.3", - "time 0.1.45", + "time 0.1.43", "tokio 0.1.22", "tokio-buf", "tokio-executor", @@ -1141,26 +1540,26 @@ dependencies = [ [[package]] name = "hyper" -version = "0.14.26" +version = "0.14.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab302d72a6f11a3b910431ff93aae7e773078c769f0a3ef15fb9ec692ed147d4" +checksum = "ffb1cfd654a8219eaef89881fdb3bb3b1cdc5fa75ded05d6933b2b382e395468" dependencies = [ - "bytes 1.4.0", + "bytes 1.5.0", "futures-channel", "futures-core", "futures-util", - "h2 0.3.18", + "h2 0.3.21", "http 0.2.9", "http-body 0.4.5", "httparse", "httpdate", - "itoa 1.0.6", + "itoa 1.0.9", "pin-project-lite", - "socket2", - "tokio 1.28.0", + "socket2 0.4.9", + "tokio 1.32.0", "tower-service", "tracing", - "want 0.3.0", + "want 0.3.1", ] [[package]] @@ -1170,7 +1569,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3a800d6aa50af4b5850b2b0f659625ce9504df908e9733b635720483be26174f" dependencies = [ "bytes 0.4.12", - "futures", + "futures 0.1.31", "hyper 0.12.36", "native-tls", "tokio-io", @@ -1182,10 +1581,10 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" dependencies = [ - "bytes 1.4.0", - "hyper 0.14.26", + "bytes 1.5.0", + "hyper 0.14.27", "native-tls", - "tokio 1.28.0", + "tokio 1.32.0", "tokio-native-tls", ] @@ -1213,9 +1612,9 @@ dependencies = [ [[package]] name = "idna" -version = "0.3.0" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e14ddfc70884202db2244c223200c204c2bda1bc6e0998d11b5e024d657209e6" +checksum = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c" dependencies = [ "unicode-bidi", "unicode-normalization", @@ -1228,27 +1627,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" dependencies = [ "autocfg 1.1.0", - "hashbrown", -] - -[[package]] -name = "instant" -version = "0.1.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" -dependencies = [ - "cfg-if 1.0.0", -] - -[[package]] -name = "io-lifetimes" -version = "1.0.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c66c74d2ae7e79a5a8f7ac924adbe38ee42a859c6539ad869eb51f0b52dc220" -dependencies = [ - "hermit-abi 0.3.1", - "libc", - "windows-sys 0.48.0", + "hashbrown 0.12.3", ] [[package]] @@ -1262,20 +1641,28 @@ dependencies = [ [[package]] name = "ipnet" -version = "2.7.2" +version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12b6ee2129af8d4fb011108c73d99a1b83a85977f23b82460c0ae2e25bb4b57f" +checksum = "28b29a3cd74f0f4598934efe3aeba42bae0eb4680554128851ebbecb02af14e6" [[package]] name = "is-terminal" -version = "0.4.7" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adcf93614601c8129ddf72e2d5633df827ba6551541c6d8c59520a371475be1f" +checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b" dependencies = [ - "hermit-abi 0.3.1", - "io-lifetimes", + "hermit-abi", "rustix", - "windows-sys 0.48.0", + "windows-sys", +] + +[[package]] +name = "itertools" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" +dependencies = [ + "either", ] [[package]] @@ -1286,9 +1673,9 @@ checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4" [[package]] name = "itoa" -version = "1.0.6" +version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "453ad9f582a441959e5f0d088b02ce04cfe8d51a8eaf077f12ac6d3e94164ca6" +checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" [[package]] name = "jobserver" @@ -1301,9 +1688,9 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.62" +version = "0.3.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68c16e1bfd491478ab155fd8b4896b86f9ede344949b641e61501e07c2b8b4d5" +checksum = "c5f195fe497f702db0f318b07fdd68edb16955aed830df8363d837542f8f935a" dependencies = [ "wasm-bindgen", ] @@ -1332,25 +1719,34 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" [[package]] name = "libc" -version = "0.2.144" +version = "0.2.148" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b00cc1c228a6782d0f076e7b232802e0c5689d41bb5df366f2a6b6621cfdfe1" +checksum = "9cdc71e17332e86d2e1d38c1f99edcb6288ee11b815fb1a4b049eaa2114d369b" + +[[package]] +name = "libmimalloc-sys" +version = "0.1.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3979b5c37ece694f1f5e51e7ecc871fdb0f517ed04ee45f88d15d6d553cb9664" +dependencies = [ + "cc", + "libc", +] [[package]] name = "linux-raw-sys" -version = "0.3.7" +version = "0.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ece97ea872ece730aed82664c424eb4c8291e1ff2480247ccf7409044bc6479f" +checksum = "3852614a3bd9ca9804678ba6be5e3b8ce76dfc902cae004e3e0c44051b6e88db" [[package]] name = "local-channel" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f303ec0e94c6c54447f84f3b0ef7af769858a9c4ef56ef2a986d3dcd4c3fc9c" +checksum = "e0a493488de5f18c8ffcba89eebb8532ffc562dc400490eb65b84893fae0b178" dependencies = [ "futures-core", "futures-sink", - "futures-util", "local-waker", ] @@ -1371,9 +1767,9 @@ dependencies = [ [[package]] name = "lock_api" -version = "0.4.9" +version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "435011366fe56583b16cf956f9df0095b405b82d76425bc8981c0e22e60ec4df" +checksum = "c1cc9717a20b1bb222f333e6a92fd32f7d8a18ddc5a3191a11af45dcbf4dcd16" dependencies = [ "autocfg 1.1.0", "scopeguard", @@ -1381,11 +1777,27 @@ dependencies = [ [[package]] name = "log" -version = "0.4.17" +version = "0.4.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e" +checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" + +[[package]] +name = "lua-src" +version = "546.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c26d4af78361e025a3d03a2b964cd1592aff7495f4d4f7947218c084c6fdca8" dependencies = [ - "cfg-if 1.0.0", + "cc", +] + +[[package]] +name = "luajit-src" +version = "210.4.8+resty107baaf" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e05167e8b2a2185758d83ed23541e5bd8bce37072e4204e0ef2c9b322bc87c4e" +dependencies = [ + "cc", + "which", ] [[package]] @@ -1394,6 +1806,15 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c41e0c4fef86961ac6d6f8a82609f55f31b05e4fce149ac5710e439df7619ba4" +[[package]] +name = "mach" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b823e83b2affd8f40a9ee8c29dbc56404c1e34cd2710921f2801e2cf29527afa" +dependencies = [ + "libc", +] + [[package]] name = "markup5ever" version = "0.8.1" @@ -1445,9 +1866,9 @@ checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771" [[package]] name = "memchr" -version = "2.5.0" +version = "2.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" +checksum = "8f232d6ef707e1956a43342693d2a31e72989554d58299d7a88738cc95b0d35c" [[package]] name = "memoffset" @@ -1458,6 +1879,24 @@ dependencies = [ "autocfg 1.1.0", ] +[[package]] +name = "memoffset" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c" +dependencies = [ + "autocfg 1.1.0", +] + +[[package]] +name = "mimalloc" +version = "0.1.39" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa01922b5ea280a911e323e4d2fd24b7fe5cc4042e0d2cda3c40775cdc4bdc9c" +dependencies = [ + "libmimalloc-sys", +] + [[package]] name = "mime" version = "0.3.17" @@ -1475,12 +1914,18 @@ dependencies = [ ] [[package]] -name = "miniz_oxide" -version = "0.6.2" +name = "mini-moka" +version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b275950c28b37e794e8c55d88aeb5e139d0ce23fdbbeda68f8d7174abdf9e8fa" +checksum = "23e0b72e7c9042467008b10279fc732326bd605459ae03bda88825909dd19b56" dependencies = [ - "adler", + "crossbeam-channel", + "crossbeam-utils 0.8.16", + "dashmap", + "skeptic", + "smallvec 1.11.1", + "tagptr", + "triomphe", ] [[package]] @@ -1492,6 +1937,16 @@ dependencies = [ "adler", ] +[[package]] +name = "mintex" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd7c5ba1c3b5a23418d7bbf98c71c3d4946a0125002129231da8d6b723d559cb" +dependencies = [ + "once_cell", + "sys-info", +] + [[package]] name = "mio" version = "0.6.23" @@ -1513,14 +1968,14 @@ dependencies = [ [[package]] name = "mio" -version = "0.8.6" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b9d9a46eff5b4ff64b45a9e316a6d1e0bc719ef429cbec4dc630684212bfdf9" +checksum = "927a765cd3fc26206e66b296465fa9d3e5ab003e651c1b3c060e7956d96b19d2" dependencies = [ "libc", "log", "wasi 0.11.0+wasi-snapshot-preview1", - "windows-sys 0.45.0", + "windows-sys", ] [[package]] @@ -1535,6 +1990,22 @@ dependencies = [ "ws2_32-sys", ] +[[package]] +name = "mlua" +version = "0.8.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bb37b0ba91f017aa7ca2b98ef99496827770cd635b4a932a6047c5b4bbe678e" +dependencies = [ + "bstr", + "cc", + "lua-src", + "luajit-src", + "num-traits", + "once_cell", + "pkg-config", + "rustc-hash", +] + [[package]] name = "native-tls" version = "0.2.11" @@ -1555,9 +2026,9 @@ dependencies = [ [[package]] name = "net2" -version = "0.2.38" +version = "0.2.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74d0df99cfcd2530b2e694f6e17e7f37b8e26bb23983ac530c0c97408837c631" +checksum = "b13b648036a2339d06de780866fbdfda0dde886de7b3af2ddeba8b14f4ee34ac" dependencies = [ "cfg-if 0.1.10", "libc", @@ -1571,52 +2042,70 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e4a24736216ec316047a1fc4252e27dabb04218aa4a3f37c6e7ddbf1f9782b54" [[package]] -name = "nodrop" -version = "0.1.14" +name = "nias" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72ef4a56884ca558e5ddb05a1d1e7e1bfd9a68d9ed024c21704cc98872dae1bb" +checksum = "ab250442c86f1850815b5d268639dff018c0627022bc1940eb2d642ca1ce12f0" + +[[package]] +name = "no-std-compat" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b93853da6d84c2e3c7d730d6473e8817692dd89be387eb01b94d7f108ecb5b8c" + +[[package]] +name = "nonzero_ext" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38bf9645c8b145698bb0b18a4637dcacbc421ea49bef2317e4fd8065a387cf21" [[package]] name = "num-traits" -version = "0.2.15" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd" +checksum = "f30b0abd723be7e2ffca1272140fac1a2f084c77ec3e123c192b66af1ee9e6c2" dependencies = [ "autocfg 1.1.0", ] [[package]] name = "num_cpus" -version = "1.15.0" +version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fac9e2da13b5eb447a6ce3d392f23a29d8694bff781bf03a16cd9ac8697593b" +checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" dependencies = [ - "hermit-abi 0.2.6", + "hermit-abi", "libc", ] [[package]] name = "object" -version = "0.30.3" +version = "0.32.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea86265d3d3dcb6a27fc51bd29a4bf387fae9d2986b823079d4986af253eb439" +checksum = "9cf5f9dd3933bd50a9e1f149ec995f39ae2c496d31fd772c1fd45ebc27e902b0" dependencies = [ "memchr", ] [[package]] name = "once_cell" -version = "1.17.1" +version = "1.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3" +checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d" + +[[package]] +name = "oorandom" +version = "11.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575" [[package]] name = "openssl" -version = "0.10.52" +version = "0.10.57" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01b8574602df80f7b85fdfc5392fa884a4e3b3f4f35402c070ab34c3d3f78d56" +checksum = "bac25ee399abb46215765b1cb35bc0212377e58a061560d8b29b024fd0430e7c" dependencies = [ - "bitflags", + "bitflags 2.4.0", "cfg-if 1.0.0", "foreign-types", "libc", @@ -1631,9 +2120,9 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ - "proc-macro2 1.0.56", - "quote 1.0.27", - "syn 2.0.15", + "proc-macro2 1.0.67", + "quote 1.0.33", + "syn 2.0.37", ] [[package]] @@ -1644,9 +2133,9 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" [[package]] name = "openssl-sys" -version = "0.9.87" +version = "0.9.93" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e17f59264b2809d77ae94f0e1ebabc434773f370d6ca667bd223ea10e06cc7e" +checksum = "db4d56a4c0478783083cfafcc42493dd4a981d41669da64b4572a2a089b51b1d" dependencies = [ "cc", "libc", @@ -1671,8 +2160,8 @@ version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" dependencies = [ - "lock_api 0.4.9", - "parking_lot_core 0.9.7", + "lock_api 0.4.10", + "parking_lot_core 0.9.8", ] [[package]] @@ -1692,22 +2181,22 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.9.7" +version = "0.9.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9069cbb9f99e3a5083476ccb29ceb1de18b9118cafa53e90c9551235de2b9521" +checksum = "93f00c865fe7cabf650081affecd3871070f26767e7b2070a3ffae14c654b447" dependencies = [ "cfg-if 1.0.0", "libc", - "redox_syscall 0.2.16", - "smallvec 1.10.0", - "windows-sys 0.45.0", + "redox_syscall 0.3.5", + "smallvec 1.11.1", + "windows-targets", ] [[package]] name = "paste" -version = "1.0.12" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f746c4065a8fa3fe23974dd82f15431cc8d40779821001404d10d2e79ca7d79" +checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c" [[package]] name = "percent-encoding" @@ -1717,25 +2206,26 @@ checksum = "31010dd2e1ac33d5b46a5b413495239882813e0369f8ed8a5e266f173602f831" [[package]] name = "percent-encoding" -version = "2.2.0" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "478c572c3d73181ff3c2539045f6eb99e5491218eae919370993b890cdbdd98e" +checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94" [[package]] name = "pest" -version = "2.6.0" +version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e68e84bfb01f0507134eac1e9b410a12ba379d064eab48c50ba4ce329a527b70" +checksum = "c022f1e7b65d6a24c0dbbd5fb344c66881bc01f3e5ae74a1c8100f2f985d98a4" dependencies = [ + "memchr", "thiserror", "ucd-trie", ] [[package]] name = "pest_derive" -version = "2.6.0" +version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b79d4c71c865a25a4322296122e3924d30bc8ee0834c8bfc8b95f7f054afbfb" +checksum = "35513f630d46400a977c4cb58f78e1bfbe01434316e60c37d27b9ad6139c66d8" dependencies = [ "pest", "pest_generator", @@ -1743,22 +2233,22 @@ dependencies = [ [[package]] name = "pest_generator" -version = "2.6.0" +version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c435bf1076437b851ebc8edc3a18442796b30f1728ffea6262d59bbe28b077e" +checksum = "bc9fc1b9e7057baba189b5c626e2d6f40681ae5b6eb064dc7c7834101ec8123a" dependencies = [ "pest", "pest_meta", - "proc-macro2 1.0.56", - "quote 1.0.27", - "syn 2.0.15", + "proc-macro2 1.0.67", + "quote 1.0.33", + "syn 2.0.37", ] [[package]] name = "pest_meta" -version = "2.6.0" +version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "745a452f8eb71e39ffd8ee32b3c5f51d03845f99786fa9b68db6ff509c505411" +checksum = "1df74e9e7ec4053ceb980e7c0c8bd3594e977fde1af91daba9c928e8e8c6708d" dependencies = [ "once_cell", "pest", @@ -1774,24 +2264,23 @@ dependencies = [ "phf_shared 0.7.24", ] -[[package]] -name = "phf" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3dfb61232e34fcb633f43d12c58f83c1df82962dcdfa565a4e866ffc17dafe12" -dependencies = [ - "phf_shared 0.8.0", -] - [[package]] name = "phf" version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fabbf1ead8a5bcbc20f5f8b939ee3f5b0f6f281b6ad3468b84656b658b455259" dependencies = [ - "phf_macros", "phf_shared 0.10.0", - "proc-macro-hack", +] + +[[package]] +name = "phf" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ade2d8b8f33c7333b51bcf0428d37e217e9f32192ae4772156f65063b8ce03dc" +dependencies = [ + "phf_macros", + "phf_shared 0.11.2", ] [[package]] @@ -1804,16 +2293,6 @@ dependencies = [ "phf_shared 0.7.24", ] -[[package]] -name = "phf_codegen" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cbffee61585b0411840d3ece935cce9cb6321f01c45477d30066498cd5e1a815" -dependencies = [ - "phf_generator 0.8.0", - "phf_shared 0.8.0", -] - [[package]] name = "phf_codegen" version = "0.10.0" @@ -1834,16 +2313,6 @@ dependencies = [ "rand 0.6.5", ] -[[package]] -name = "phf_generator" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17367f0cc86f2d25802b2c26ee58a7b23faeccf78a396094c13dced0d0182526" -dependencies = [ - "phf_shared 0.8.0", - "rand 0.7.3", -] - [[package]] name = "phf_generator" version = "0.10.0" @@ -1855,17 +2324,26 @@ dependencies = [ ] [[package]] -name = "phf_macros" -version = "0.10.0" +name = "phf_generator" +version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58fdf3184dd560f160dd73922bea2d5cd6e8f064bf4b13110abd81b03697b4e0" +checksum = "48e4cc64c2ad9ebe670cb8fd69dd50ae301650392e81c05f9bfcb2d5bdbc24b0" dependencies = [ - "phf_generator 0.10.0", - "phf_shared 0.10.0", - "proc-macro-hack", - "proc-macro2 1.0.56", - "quote 1.0.27", - "syn 1.0.109", + "phf_shared 0.11.2", + "rand 0.8.5", +] + +[[package]] +name = "phf_macros" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3444646e286606587e49f3bcf1679b8cef1dc2c5ecc29ddacaffc305180d464b" +dependencies = [ + "phf_generator 0.11.2", + "phf_shared 0.11.2", + "proc-macro2 1.0.67", + "quote 1.0.33", + "syn 2.0.37", ] [[package]] @@ -1877,29 +2355,49 @@ dependencies = [ "siphasher 0.2.3", ] -[[package]] -name = "phf_shared" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c00cf8b9eafe68dde5e9eaa2cef8ee84a9336a47d566ec55ca16589633b65af7" -dependencies = [ - "siphasher 0.3.10", -] - [[package]] name = "phf_shared" version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6796ad771acdc0123d2a88dc428b5e38ef24456743ddb1744ed628f9815c096" dependencies = [ - "siphasher 0.3.10", + "siphasher 0.3.11", +] + +[[package]] +name = "phf_shared" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90fcb95eef784c2ac79119d1dd819e162b5da872ce6f3c3abe1e8ca1c082f72b" +dependencies = [ + "siphasher 0.3.11", +] + +[[package]] +name = "pin-project" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fda4ed1c6c173e3fc7a83629421152e01d7b1f9b7f65fb301e490e8cfc656422" +dependencies = [ + "pin-project-internal", +] + +[[package]] +name = "pin-project-internal" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405" +dependencies = [ + "proc-macro2 1.0.67", + "quote 1.0.33", + "syn 2.0.37", ] [[package]] name = "pin-project-lite" -version = "0.2.9" +version = "0.2.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116" +checksum = "8afb450f006bf6385ca15ef45d71d2288452bc3683ce2e2cacc0d18e4be60b58" [[package]] name = "pin-utils" @@ -1913,6 +2411,34 @@ version = "0.3.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26072860ba924cbfa98ea39c8c19b4dd6a4a25423dbdf219c1eca91aa0cf6964" +[[package]] +name = "plotters" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2c224ba00d7cadd4d5c660deaf2098e5e80e07846537c51f9cfa4be50c1fd45" +dependencies = [ + "num-traits", + "plotters-backend", + "plotters-svg", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "plotters-backend" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e76628b4d3a7581389a35d5b6e2139607ad7c75b17aed325f210aa91f4a9609" + +[[package]] +name = "plotters-svg" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38f6d39893cca0701371e3c27294f09797214b86f1fb951b89ade8ec04e2abab" +dependencies = [ + "plotters-backend", +] + [[package]] name = "ppv-lite86" version = "0.2.17" @@ -1925,12 +2451,6 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" -[[package]] -name = "proc-macro-hack" -version = "0.5.20+deprecated" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068" - [[package]] name = "proc-macro2" version = "0.4.30" @@ -1942,9 +2462,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.56" +version = "1.0.67" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b63bdb0cd06f1f4dedf69b254734f9b45af66e4a031e42a7480257d9898b435" +checksum = "3d433d9f1a3e8c1263d9456598b16fec66f4acc9a74dacffd35c7bb09b3a1328" dependencies = [ "unicode-ident", ] @@ -1956,7 +2476,34 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "95b4ce31ff0a27d93c8de1849cf58162283752f065a90d508f1105fa6c9a213f" dependencies = [ "idna 0.2.3", - "url 2.3.1", + "url 2.4.1", +] + +[[package]] +name = "pulldown-cmark" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a1a2f1f0a7ecff9c31abbe177637be0e97a0aef46cf8738ece09327985d998" +dependencies = [ + "bitflags 1.3.2", + "memchr", + "unicase", +] + +[[package]] +name = "quanta" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "20afe714292d5e879d8b12740aa223c6a88f118af41870e8b6196e39a02238a8" +dependencies = [ + "crossbeam-utils 0.8.16", + "libc", + "mach", + "once_cell", + "raw-cpuid", + "wasi 0.10.2+wasi-snapshot-preview1", + "web-sys", + "winapi 0.3.9", ] [[package]] @@ -1970,11 +2517,11 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.27" +version = "1.0.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f4f29d145265ec1c483c7c654450edde0bfe043d3938d6972630663356d9500" +checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae" dependencies = [ - "proc-macro2 1.0.56", + "proc-macro2 1.0.67", ] [[package]] @@ -1987,29 +2534,15 @@ dependencies = [ "libc", "rand_chacha 0.1.1", "rand_core 0.4.2", - "rand_hc 0.1.0", + "rand_hc", "rand_isaac", "rand_jitter", "rand_os", - "rand_pcg 0.1.2", + "rand_pcg", "rand_xorshift", "winapi 0.3.9", ] -[[package]] -name = "rand" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" -dependencies = [ - "getrandom 0.1.16", - "libc", - "rand_chacha 0.2.2", - "rand_core 0.5.1", - "rand_hc 0.2.0", - "rand_pcg 0.2.1", -] - [[package]] name = "rand" version = "0.8.5" @@ -2031,16 +2564,6 @@ dependencies = [ "rand_core 0.3.1", ] -[[package]] -name = "rand_chacha" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" -dependencies = [ - "ppv-lite86", - "rand_core 0.5.1", -] - [[package]] name = "rand_chacha" version = "0.3.1" @@ -2066,22 +2589,13 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc" -[[package]] -name = "rand_core" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" -dependencies = [ - "getrandom 0.1.16", -] - [[package]] name = "rand_core" version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom 0.2.9", + "getrandom", ] [[package]] @@ -2093,15 +2607,6 @@ dependencies = [ "rand_core 0.3.1", ] -[[package]] -name = "rand_hc" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" -dependencies = [ - "rand_core 0.5.1", -] - [[package]] name = "rand_isaac" version = "0.1.1" @@ -2146,15 +2651,6 @@ dependencies = [ "rand_core 0.4.2", ] -[[package]] -name = "rand_pcg" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16abd0c1b639e9eb4d7c50c0b8100b0d0f849be2349829c740fe8e6eb4816429" -dependencies = [ - "rand_core 0.5.1", -] - [[package]] name = "rand_xorshift" version = "0.1.1" @@ -2164,6 +2660,35 @@ dependencies = [ "rand_core 0.3.1", ] +[[package]] +name = "raw-cpuid" +version = "10.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c297679cb867470fa8c9f67dbba74a78d78e3e98d7cf2b08d6d71540f797332" +dependencies = [ + "bitflags 1.3.2", +] + +[[package]] +name = "rayon" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c27db03db7734835b3f53954b534c91069375ce6ccaa2e065441e07d9b6cdb1" +dependencies = [ + "either", + "rayon-core", +] + +[[package]] +name = "rayon-core" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ce3fb6ad83f861aac485e76e1985cd109d9a3713802152be56c3b1f0e0658ed" +dependencies = [ + "crossbeam-deque 0.8.3", + "crossbeam-utils 0.8.16", +] + [[package]] name = "rdrand" version = "0.4.0" @@ -2175,16 +2700,26 @@ dependencies = [ [[package]] name = "redis" -version = "0.23.0" +version = "0.23.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ea8c51b5dc1d8e5fd3350ec8167f464ec0995e79f2e90a075b63371500d557f" +checksum = "4f49cdc0bb3f412bf8e7d1bd90fe1d9eb10bc5c399ba90973c14662a27b3f8ba" dependencies = [ + "arc-swap", + "async-trait", + "bytes 1.5.0", "combine", - "itoa 1.0.6", - "percent-encoding 2.2.0", + "futures 0.3.28", + "futures-util", + "itoa 1.0.9", + "percent-encoding 2.3.0", + "pin-project-lite", "ryu", "sha1_smol", - "url 2.3.1", + "socket2 0.4.9", + "tokio 1.32.0", + "tokio-retry", + "tokio-util", + "url 2.4.1", ] [[package]] @@ -2193,29 +2728,32 @@ version = "0.1.57" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "41cc0f7e4d5d4544e8861606a285bb08d3e70712ccc7d2b84d7c0ccfaf4b05ce" -[[package]] -name = "redox_syscall" -version = "0.2.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" -dependencies = [ - "bitflags", -] - [[package]] name = "redox_syscall" version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29" dependencies = [ - "bitflags", + "bitflags 1.3.2", ] [[package]] name = "regex" -version = "1.8.1" +version = "1.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af83e617f331cc6ae2da5443c602dfa5af81e517212d9d611a5b3ba1777b5370" +checksum = "ebee201405406dbf528b8b672104ae6d6d63e6d118cb10e4d51abbc7b58044ff" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59b23e92ee4318893fa3fe3e6fb365258efbfe6ac6ab30f090cdcbb7aa37efa9" dependencies = [ "aho-corasick", "memchr", @@ -2224,9 +2762,9 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.7.1" +version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a5996294f19bd3aae0453a862ad728f60e6600695733dd5df01da90c54363a3c" +checksum = "dbb5fb1acd8a1a18b3dd5be62d25485eb770e05afb408a9627d14d451bae12da" [[package]] name = "reqwest" @@ -2240,7 +2778,7 @@ dependencies = [ "cookie_store", "encoding_rs", "flate2", - "futures", + "futures 0.1.31", "http 0.1.21", "hyper 0.12.36", "hyper-tls 0.3.2", @@ -2251,7 +2789,7 @@ dependencies = [ "serde", "serde_json", "serde_urlencoded 0.5.5", - "time 0.1.45", + "time 0.1.43", "tokio 0.1.22", "tokio-executor", "tokio-io", @@ -2264,19 +2802,19 @@ dependencies = [ [[package]] name = "reqwest" -version = "0.11.17" +version = "0.11.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13293b639a097af28fc8a90f22add145a9c954e49d77da06263d58cf44d5fb91" +checksum = "3e9ad3fe7488d7e34558a2033d45a0c90b72d97b4f80705666fea71472e2e6a1" dependencies = [ - "base64 0.21.0", - "bytes 1.4.0", + "base64 0.21.4", + "bytes 1.5.0", "encoding_rs", "futures-core", "futures-util", - "h2 0.3.18", + "h2 0.3.21", "http 0.2.9", "http-body 0.4.5", - "hyper 0.14.26", + "hyper 0.14.27", "hyper-tls 0.5.0", "ipnet", "js-sys", @@ -2284,43 +2822,19 @@ dependencies = [ "mime", "native-tls", "once_cell", - "percent-encoding 2.2.0", + "percent-encoding 2.3.0", "pin-project-lite", "serde", "serde_json", "serde_urlencoded 0.7.1", - "tokio 1.28.0", + "tokio 1.32.0", "tokio-native-tls", "tower-service", - "url 2.3.1", + "url 2.4.1", "wasm-bindgen", "wasm-bindgen-futures", "web-sys", - "winreg 0.10.1", -] - -[[package]] -name = "rlua" -version = "0.19.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95b38117a836316ef62c02f6751e6d28e2eb53a1c35f0329427a9fb9c1c7b6a0" -dependencies = [ - "bitflags", - "bstr", - "libc", - "num-traits", - "rlua-lua54-sys", -] - -[[package]] -name = "rlua-lua54-sys" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93f42202b5aeb0bcc5df28436f8d963f8cbcbb898033a9e28c7ba4f299707934" -dependencies = [ - "cc", - "libc", - "pkg-config", + "winreg 0.50.0", ] [[package]] @@ -2329,6 +2843,12 @@ version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76" +[[package]] +name = "rustc-hash" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" + [[package]] name = "rustc_version" version = "0.2.3" @@ -2344,28 +2864,39 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" dependencies = [ - "semver 1.0.17", + "semver 1.0.19", ] [[package]] name = "rustix" -version = "0.37.19" +version = "0.38.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "acf8729d8542766f1b2cf77eb034d52f40d375bb8b615d0b147089946e16613d" +checksum = "d2f9da0cbd88f9f09e7814e388301c8414c51c62aa6ce1e4b5c551d49d96e531" dependencies = [ - "bitflags", + "bitflags 2.4.0", "errno", - "io-lifetimes", "libc", "linux-raw-sys", - "windows-sys 0.48.0", + "windows-sys", +] + +[[package]] +name = "rusty-hook" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96cee9be61be7e1cbadd851e58ed7449c29c620f00b23df937cb9cbc04ac21a3" +dependencies = [ + "ci_info", + "getopts", + "nias", + "toml", ] [[package]] name = "ryu" -version = "1.0.13" +version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f91339c0467de62360649f8d3e185ca8de4224ff281f66000de5eb2a77a79041" +checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741" [[package]] name = "same-file" @@ -2378,43 +2909,43 @@ dependencies = [ [[package]] name = "schannel" -version = "0.1.21" +version = "0.1.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "713cfb06c7059f3588fb8044c0fad1d09e3c01d225e25b9220dbfdcf16dbb1b3" +checksum = "0c3733bf4cf7ea0880754e19cb5a462007c4a8c1914bff372ccc95b464f1df88" dependencies = [ - "windows-sys 0.42.0", + "windows-sys", ] [[package]] name = "scopeguard" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "scraper" -version = "0.16.0" +version = "0.17.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59e25654b5e9fd557a67dbaab5a5d36b8c448d0561beb4c041b6dbb902eddfa6" +checksum = "c95a930e03325234c18c7071fd2b60118307e025d6fff3e12745ffbf63a3d29c" dependencies = [ - "ahash 0.8.3", + "ahash", "cssparser", "ego-tree", "getopts", "html5ever 0.26.0", "once_cell", "selectors", - "smallvec 1.10.0", + "smallvec 1.11.1", "tendril", ] [[package]] name = "security-framework" -version = "2.8.2" +version = "2.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a332be01508d814fed64bf28f798a146d73792121129962fdf335bb3c49a4254" +checksum = "05b64fb303737d99b81884b2c63433e9ae28abebe5eb5045dcdd175dc2ecf4de" dependencies = [ - "bitflags", + "bitflags 1.3.2", "core-foundation", "core-foundation-sys", "libc", @@ -2423,9 +2954,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.8.0" +version = "2.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31c9bb296072e961fcbd8853511dd39c2d8be2deb1e17c6860b1d30732b323b4" +checksum = "e932934257d3b408ed8f30db49d85ea163bfe74961f017f405b025af298f0c7a" dependencies = [ "core-foundation-sys", "libc", @@ -2443,20 +2974,21 @@ dependencies = [ [[package]] name = "selectors" -version = "0.24.0" +version = "0.25.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c37578180969d00692904465fb7f6b3d50b9a2b952b87c23d0e2e5cb5013416" +checksum = "4eb30575f3638fc8f6815f448d50cb1a2e255b0897985c8c59f4d37b72a07b06" dependencies = [ - "bitflags", + "bitflags 2.4.0", "cssparser", "derive_more", "fxhash", "log", - "phf 0.8.0", - "phf_codegen 0.8.0", + "new_debug_unreachable", + "phf 0.10.1", + "phf_codegen 0.10.0", "precomputed-hash", "servo_arc", - "smallvec 1.10.0", + "smallvec 1.11.1", ] [[package]] @@ -2470,9 +3002,12 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.17" +version = "1.0.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bebd363326d05ec3e2f532ab7660680f3b02130d780c299bca73469d521bc0ed" +checksum = "ad977052201c6de01a8ef2aa3378c4bd23217a056337d1d6da40468d267a4fb0" +dependencies = [ + "serde", +] [[package]] name = "semver-parser" @@ -2482,31 +3017,31 @@ checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" [[package]] name = "serde" -version = "1.0.162" +version = "1.0.188" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71b2f6e1ab5c2b98c05f0f35b236b22e8df7ead6ffbf51d7808da7f8817e7ab6" +checksum = "cf9e0fcba69a370eed61bcf2b728575f726b50b55cba78064753d708ddc7549e" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.162" +version = "1.0.188" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2a0814352fd64b58489904a44ea8d90cb1a91dcb6b4f5ebabc32c8318e93cb6" +checksum = "4eca7ac642d82aa35b60049a6eccb4be6be75e599bd2e9adb5f875a737654af2" dependencies = [ - "proc-macro2 1.0.56", - "quote 1.0.27", - "syn 2.0.15", + "proc-macro2 1.0.67", + "quote 1.0.33", + "syn 2.0.37", ] [[package]] name = "serde_json" -version = "1.0.96" +version = "1.0.107" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "057d394a50403bcac12672b2b18fb387ab6d289d957dab67dd201875391e52f1" +checksum = "6b420ce6e3d8bd882e9b243c6eed35dbc9a6110c9769e74b584e0d68d1f20c65" dependencies = [ - "itoa 1.0.6", + "itoa 1.0.9", "ryu", "serde", ] @@ -2517,7 +3052,7 @@ version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "642dd69105886af2efd227f75a520ec9b44a820d65bc133a9131f7d229fd165a" dependencies = [ - "dtoa", + "dtoa 0.4.8", "itoa 0.4.8", "serde", "url 1.7.2", @@ -2530,26 +3065,25 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" dependencies = [ "form_urlencoded", - "itoa 1.0.6", + "itoa 1.0.9", "ryu", "serde", ] [[package]] name = "servo_arc" -version = "0.2.0" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d52aa42f8fdf0fed91e5ce7f23d8138441002fa31dca008acf47e6fd4721f741" +checksum = "d036d71a959e00c77a63538b90a6c2390969f9772b096ea837205c6bd0491a44" dependencies = [ - "nodrop", "stable_deref_trait", ] [[package]] name = "sha1" -version = "0.10.5" +version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3" +checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" dependencies = [ "cfg-if 1.0.0", "cpufeatures", @@ -2564,9 +3098,9 @@ checksum = "ae1a47186c03a32177042e55dbc5fd5aee900b8e0069a8d70fba96a9375cd012" [[package]] name = "sha2" -version = "0.10.6" +version = "0.10.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82e6b795fe2e3b1e845bafcb27aa35405c4d47cdfc92af5fc8d3002f76cebdc0" +checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" dependencies = [ "cfg-if 1.0.0", "cpufeatures", @@ -2590,15 +3124,30 @@ checksum = "0b8de496cf83d4ed58b6be86c3a275b8602f6ffe98d3024a869e124147a9a3ac" [[package]] name = "siphasher" -version = "0.3.10" +version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7bd3e3206899af3f8b12af284fafc038cc1dc2b41d1b89dd17297221c5d225de" +checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" + +[[package]] +name = "skeptic" +version = "0.13.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16d23b015676c90a0f01c197bfdc786c20342c73a0afdda9025adb0bc42940a8" +dependencies = [ + "bytecount", + "cargo_metadata", + "error-chain", + "glob", + "pulldown-cmark", + "tempfile", + "walkdir", +] [[package]] name = "slab" -version = "0.4.8" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6528351c9bc8ab22353f9d776db39a20288e8d6c37ef8cfe3317cf875eecfc2d" +checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" dependencies = [ "autocfg 1.1.0", ] @@ -2614,9 +3163,12 @@ dependencies = [ [[package]] name = "smallvec" -version = "1.10.0" +version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0" +checksum = "942b4a808e05215192e39f4ab80813e599068285906cc91aa64f923db842bd5a" +dependencies = [ + "serde", +] [[package]] name = "socket2" @@ -2628,6 +3180,16 @@ dependencies = [ "winapi 0.3.9", ] +[[package]] +name = "socket2" +version = "0.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4031e820eb552adee9295814c0ced9e5cf38ddf1e8b7d566d6de8e2538ea989e" +dependencies = [ + "libc", + "windows-sys", +] + [[package]] name = "stable_deref_trait" version = "1.2.0" @@ -2680,8 +3242,8 @@ checksum = "f0f45ed1b65bf9a4bf2f7b7dc59212d1926e9eaf00fa998988e420fd124467c6" dependencies = [ "phf_generator 0.7.24", "phf_shared 0.7.24", - "proc-macro2 1.0.56", - "quote 1.0.27", + "proc-macro2 1.0.67", + "quote 1.0.33", "string_cache_shared", ] @@ -2693,8 +3255,8 @@ checksum = "6bb30289b722be4ff74a408c3cc27edeaad656e06cb1fe8fa9231fa59c728988" dependencies = [ "phf_generator 0.10.0", "phf_shared 0.10.0", - "proc-macro2 1.0.56", - "quote 1.0.27", + "proc-macro2 1.0.67", + "quote 1.0.33", ] [[package]] @@ -2720,19 +3282,19 @@ version = "1.0.109" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" dependencies = [ - "proc-macro2 1.0.56", - "quote 1.0.27", + "proc-macro2 1.0.67", + "quote 1.0.33", "unicode-ident", ] [[package]] name = "syn" -version = "2.0.15" +version = "2.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a34fcf3e8b60f57e6a14301a2e916d323af98b0ea63c599441eec8558660c822" +checksum = "7303ef2c05cd654186cb250d29049a24840ca25d2747c25c0381c8d9e2f582e8" dependencies = [ - "proc-macro2 1.0.56", - "quote 1.0.27", + "proc-macro2 1.0.67", + "quote 1.0.33", "unicode-ident", ] @@ -2742,23 +3304,39 @@ version = "0.12.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f" dependencies = [ - "proc-macro2 1.0.56", - "quote 1.0.27", + "proc-macro2 1.0.67", + "quote 1.0.33", "syn 1.0.109", "unicode-xid 0.2.4", ] [[package]] -name = "tempfile" -version = "3.5.0" +name = "sys-info" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9fbec84f381d5795b08656e4912bec604d162bff9291d6189a78f4c8ab87998" +checksum = "0b3a0d0aba8bf96a0e1ddfdc352fc53b3df7f39318c71854910c3c4b024ae52c" +dependencies = [ + "cc", + "libc", +] + +[[package]] +name = "tagptr" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b2093cf4c8eb1e67749a6762251bc9cd836b6fc171623bd0a9d324d37af2417" + +[[package]] +name = "tempfile" +version = "3.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb94d2f3cc536af71caac6b6fcebf65860b347e7ce0cc9ebe8f70d3e521054ef" dependencies = [ "cfg-if 1.0.0", "fastrand", "redox_syscall 0.3.5", "rustix", - "windows-sys 0.45.0", + "windows-sys", ] [[package]] @@ -2774,51 +3352,57 @@ dependencies = [ [[package]] name = "termcolor" -version = "1.2.0" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be55cf8942feac5c765c2c993422806843c9a9a45d4d5c407ad6dd2ea95eb9b6" +checksum = "6093bad37da69aab9d123a8091e4be0aa4a03e4d601ec641c327398315f62b64" dependencies = [ "winapi-util", ] [[package]] name = "thiserror" -version = "1.0.40" +version = "1.0.49" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "978c9a314bd8dc99be594bc3c175faaa9794be04a5a5e153caba6915336cebac" +checksum = "1177e8c6d7ede7afde3585fd2513e611227efd6481bd78d2e82ba1ce16557ed4" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.40" +version = "1.0.49" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f" +checksum = "10712f02019e9288794769fba95cd6847df9874d49d871d062172f9dd41bc4cc" dependencies = [ - "proc-macro2 1.0.56", - "quote 1.0.27", - "syn 2.0.15", + "proc-macro2 1.0.67", + "quote 1.0.33", + "syn 2.0.37", ] [[package]] -name = "time" -version = "0.1.45" +name = "thousands" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b797afad3f312d1c66a56d11d0316f916356d11bd158fbc6ca6389ff6bf805a" +checksum = "3bf63baf9f5039dadc247375c29eb13706706cfde997d0330d05aa63a77d8820" + +[[package]] +name = "time" +version = "0.1.43" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca8a50ef2360fbd1eeb0ecd46795a87a19024eb4b53c5dc916ca1fd95fe62438" dependencies = [ "libc", - "wasi 0.10.0+wasi-snapshot-preview1", "winapi 0.3.9", ] [[package]] name = "time" -version = "0.3.21" +version = "0.3.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f3403384eaacbca9923fa06940178ac13e4edb725486d70e8e15881d0c836cc" +checksum = "426f806f4089c493dcac0d24c29c01e2c38baf8e30f1b716ee37e83d200b18fe" dependencies = [ - "itoa 1.0.6", + "deranged", + "itoa 1.0.9", "serde", "time-core", "time-macros", @@ -2826,19 +3410,29 @@ dependencies = [ [[package]] name = "time-core" -version = "0.1.1" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7300fbefb4dadc1af235a9cef3737cea692a9d97e1b9cbcd4ebdae6f8868e6fb" +checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" [[package]] name = "time-macros" -version = "0.2.9" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "372950940a5f07bf38dbe211d7283c9e6d7327df53794992d293e534c733d09b" +checksum = "4ad70d68dba9e1f8aceda7aa6711965dfec1cac869f311a51bd08b3a2ccbce20" dependencies = [ "time-core", ] +[[package]] +name = "tinytemplate" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be4d6b5f19ff7664e8c98d03e2139cb510db9b0a60b55f8e8709b689d939b6bc" +dependencies = [ + "serde", + "serde_json", +] + [[package]] name = "tinyvec" version = "1.6.0" @@ -2861,7 +3455,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a09c0b5bb588872ab2f09afa13ee6e9dac11e10a0ec9e8e3ba39a5a5d530af6" dependencies = [ "bytes 0.4.12", - "futures", + "futures 0.1.31", "mio 0.6.23", "num_cpus", "tokio-current-thread", @@ -2875,21 +3469,21 @@ dependencies = [ [[package]] name = "tokio" -version = "1.28.0" +version = "1.32.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3c786bf8134e5a3a166db9b29ab8f48134739014a3eca7bc6bfa95d673b136f" +checksum = "17ed6077ed6cd6c74735e21f37eb16dc3935f96878b1fe961074089cc80893f9" dependencies = [ - "autocfg 1.1.0", - "bytes 1.4.0", + "backtrace", + "bytes 1.5.0", "libc", - "mio 0.8.6", + "mio 0.8.8", "num_cpus", "parking_lot 0.12.1", "pin-project-lite", "signal-hook-registry", - "socket2", + "socket2 0.5.4", "tokio-macros", - "windows-sys 0.48.0", + "windows-sys", ] [[package]] @@ -2900,7 +3494,7 @@ checksum = "8fb220f46c53859a4b7ec083e41dec9778ff0b1851c0942b211edb89e0ccdc46" dependencies = [ "bytes 0.4.12", "either", - "futures", + "futures 0.1.31", ] [[package]] @@ -2909,7 +3503,7 @@ version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b1de0e32a83f131e002238d7ccde18211c0a5397f60cbfffcb112868c2e0e20e" dependencies = [ - "futures", + "futures 0.1.31", "tokio-executor", ] @@ -2919,8 +3513,8 @@ version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fb2d1b8f4548dbf5e1f7818512e9c406860678f29c300cdf0ebac72d1a3a1671" dependencies = [ - "crossbeam-utils", - "futures", + "crossbeam-utils 0.7.2", + "futures 0.1.31", ] [[package]] @@ -2930,7 +3524,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "57fc868aae093479e3131e3d165c93b1c7474109d13c90ec0dda2a1bbfff0674" dependencies = [ "bytes 0.4.12", - "futures", + "futures 0.1.31", "log", ] @@ -2940,9 +3534,9 @@ version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e" dependencies = [ - "proc-macro2 1.0.56", - "quote 1.0.27", - "syn 2.0.15", + "proc-macro2 1.0.67", + "quote 1.0.33", + "syn 2.0.37", ] [[package]] @@ -2952,7 +3546,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" dependencies = [ "native-tls", - "tokio 1.28.0", + "tokio 1.32.0", ] [[package]] @@ -2961,8 +3555,8 @@ version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09bc590ec4ba8ba87652da2068d150dcada2cfa2e07faae270a5e0409aa51351" dependencies = [ - "crossbeam-utils", - "futures", + "crossbeam-utils 0.7.2", + "futures 0.1.31", "lazy_static", "log", "mio 0.6.23", @@ -2974,6 +3568,17 @@ dependencies = [ "tokio-sync", ] +[[package]] +name = "tokio-retry" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f57eb36ecbe0fc510036adff84824dd3c24bb781e21bfa67b69d556aa85214f" +dependencies = [ + "pin-project", + "rand 0.8.5", + "tokio 1.32.0", +] + [[package]] name = "tokio-sync" version = "0.1.8" @@ -2981,7 +3586,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "edfe50152bc8164fcc456dab7891fa9bf8beaf01c5ee7e1dd43a397c3cf87dee" dependencies = [ "fnv", - "futures", + "futures 0.1.31", ] [[package]] @@ -2991,7 +3596,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "98df18ed66e3b72e742f185882a9e201892407957e45fbff8da17ae7a7c51f72" dependencies = [ "bytes 0.4.12", - "futures", + "futures 0.1.31", "iovec", "mio 0.6.23", "tokio-io", @@ -3004,10 +3609,10 @@ version = "0.1.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "df720b6581784c118f0eb4310796b12b1d242a7eb95f716a8367855325c25f89" dependencies = [ - "crossbeam-deque", + "crossbeam-deque 0.7.4", "crossbeam-queue", - "crossbeam-utils", - "futures", + "crossbeam-utils 0.7.2", + "futures 0.1.31", "lazy_static", "log", "num_cpus", @@ -3021,26 +3626,35 @@ version = "0.2.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "93044f2d313c95ff1cb7809ce9a7a05735b012288a888b62d4434fd58c94f296" dependencies = [ - "crossbeam-utils", - "futures", + "crossbeam-utils 0.7.2", + "futures 0.1.31", "slab", "tokio-executor", ] [[package]] name = "tokio-util" -version = "0.7.8" +version = "0.7.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "806fe8c2c87eccc8b3267cbae29ed3ab2d0bd37fca70ab622e46aaa9375ddb7d" +checksum = "1d68074620f57a0b21594d9735eb2e98ab38b17f80d3fcb189fca266771ca60d" dependencies = [ - "bytes 1.4.0", + "bytes 1.5.0", "futures-core", "futures-sink", "pin-project-lite", - "tokio 1.28.0", + "tokio 1.32.0", "tracing", ] +[[package]] +name = "toml" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4f7f0dd8d50a853a531c426359045b1998f04219d88799810762cd4ad314234" +dependencies = [ + "serde", +] + [[package]] name = "tower-service" version = "0.3.2" @@ -3061,13 +3675,19 @@ dependencies = [ [[package]] name = "tracing-core" -version = "0.1.30" +version = "0.1.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24eb03ba0eab1fd845050058ce5e616558e8f8d8fca633e6b163fe25c797213a" +checksum = "0955b8137a1df6f1a2e9a37d8a6656291ff0297c1a97c24e0d8425fe2312f79a" dependencies = [ "once_cell", ] +[[package]] +name = "triomphe" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee8098afad3fb0c54a9007aab6804558410503ad676d4633f9c2559a00ac0f" + [[package]] name = "try-lock" version = "0.2.4" @@ -3085,21 +3705,21 @@ dependencies = [ [[package]] name = "typenum" -version = "1.16.0" +version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba" +checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" [[package]] name = "ucd-trie" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e79c4d996edb816c91e4308506774452e55e95c3c9de07b6729e17e15a5ef81" +checksum = "ed646292ffc8188ef8ea4d1e0e0150fb15a5c2e12ad9b8fc191ae7a8a7f3c4b9" [[package]] name = "unicase" -version = "2.6.0" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6" +checksum = "f7d2d4dafb69621809a81864c9c1b864479e1235c0dd4e199924b9742439ed89" dependencies = [ "version_check", ] @@ -3112,9 +3732,9 @@ checksum = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460" [[package]] name = "unicode-ident" -version = "1.0.8" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5464a87b239f13a63a501f2701565754bae92d243d4bb7eb12f6d57d2269bf4" +checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" [[package]] name = "unicode-normalization" @@ -3127,9 +3747,9 @@ dependencies = [ [[package]] name = "unicode-width" -version = "0.1.10" +version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" +checksum = "e51733f11c9c4f72aa0c160008246859e340b00807569a0da0e7a1079b27ba85" [[package]] name = "unicode-xid" @@ -3156,13 +3776,13 @@ dependencies = [ [[package]] name = "url" -version = "2.3.1" +version = "2.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d68c799ae75762b8c3fe375feb6600ef5602c883c5d21eb51c09f22b83c4643" +checksum = "143b538f18257fac9cad154828a57c6bf5157e1aa604d4816b5995bf6de87ae5" dependencies = [ "form_urlencoded", - "idna 0.3.0", - "percent-encoding 2.2.0", + "idna 0.4.0", + "percent-encoding 2.3.0", ] [[package]] @@ -3194,9 +3814,9 @@ checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" [[package]] name = "walkdir" -version = "2.3.3" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36df944cda56c7d8d8b7496af378e6b16de9284591917d307c9b4d313c44e698" +checksum = "d71d857dc86794ca4c280d616f7da00d2dbfd8cd788846559a6813e6aa4b54ee" dependencies = [ "same-file", "winapi-util", @@ -3208,32 +3828,25 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6395efa4784b027708f7451087e647ec73cc74f5d9bc2e418404248d679a230" dependencies = [ - "futures", + "futures 0.1.31", "log", "try-lock", ] [[package]] name = "want" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ce8a968cb1cd110d136ff8b819a556d6fb6d919363c61534f6860c7eb172ba0" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" dependencies = [ - "log", "try-lock", ] [[package]] name = "wasi" -version = "0.9.0+wasi-snapshot-preview1" +version = "0.10.2+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" - -[[package]] -name = "wasi" -version = "0.10.0+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f" +checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6" [[package]] name = "wasi" @@ -3243,9 +3856,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.85" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b6cb788c4e39112fbe1822277ef6fb3c55cd86b95cb3d3c4c1c9597e4ac74b4" +checksum = "7706a72ab36d8cb1f80ffbf0e071533974a60d0a308d01a5d0375bf60499a342" dependencies = [ "cfg-if 1.0.0", "wasm-bindgen-macro", @@ -3253,24 +3866,24 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.85" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35e522ed4105a9d626d885b35d62501b30d9666283a5c8be12c14a8bdafe7822" +checksum = "5ef2b6d3c510e9625e5fe6f509ab07d66a760f0885d858736483c32ed7809abd" dependencies = [ "bumpalo", "log", "once_cell", - "proc-macro2 1.0.56", - "quote 1.0.27", - "syn 2.0.15", + "proc-macro2 1.0.67", + "quote 1.0.33", + "syn 2.0.37", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.35" +version = "0.4.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "083abe15c5d88556b77bdf7aef403625be9e327ad37c62c4e4129af740168163" +checksum = "c02dbc21516f9f1f04f187958890d7e6026df8d16540b7ad9492bc34a67cea03" dependencies = [ "cfg-if 1.0.0", "js-sys", @@ -3280,38 +3893,38 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.85" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "358a79a0cb89d21db8120cbfb91392335913e4890665b1a7981d9e956903b434" +checksum = "dee495e55982a3bd48105a7b947fd2a9b4a8ae3010041b9e0faab3f9cd028f1d" dependencies = [ - "quote 1.0.27", + "quote 1.0.33", "wasm-bindgen-macro-support", ] [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.85" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4783ce29f09b9d93134d41297aded3a712b7b979e9c6f28c32cb88c973a94869" +checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" dependencies = [ - "proc-macro2 1.0.56", - "quote 1.0.27", - "syn 2.0.15", + "proc-macro2 1.0.67", + "quote 1.0.33", + "syn 2.0.37", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.85" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a901d592cafaa4d711bc324edfaff879ac700b19c3dfd60058d2b445be2691eb" +checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1" [[package]] name = "web-sys" -version = "0.3.62" +version = "0.3.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16b5f940c7edfdc6d12126d98c9ef4d1b3d470011c47c76a6581df47ad9ba721" +checksum = "9b85cbef8c220a6abc02aefd892dfc0fc23afb1c6a426316ec33253a3877249b" dependencies = [ "js-sys", "wasm-bindgen", @@ -3319,23 +3932,50 @@ dependencies = [ [[package]] name = "websurfx" -version = "0.6.0" +version = "1.0.0" dependencies = [ + "actix-cors", "actix-files", + "actix-governor", "actix-web", + "async-once-cell", + "async-trait", + "criterion", + "dhat", "env_logger", + "error-stack", "fake-useragent", + "futures 0.3.28", "handlebars", "log", "md5", + "mimalloc", + "mini-moka", + "mlua", + "once_cell", "rand 0.8.5", "redis", - "reqwest 0.11.17", - "rlua", + "regex", + "reqwest 0.11.20", + "rusty-hook", "scraper", "serde", "serde_json", - "tokio 1.28.0", + "smallvec 1.11.1", + "tempfile", + "tokio 1.32.0", +] + +[[package]] +name = "which" +version = "4.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7" +dependencies = [ + "either", + "home", + "once_cell", + "rustix", ] [[package]] @@ -3368,9 +4008,9 @@ checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" +checksum = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596" dependencies = [ "winapi 0.3.9", ] @@ -3381,152 +4021,71 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" -[[package]] -name = "windows-sys" -version = "0.42.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7" -dependencies = [ - "windows_aarch64_gnullvm 0.42.2", - "windows_aarch64_msvc 0.42.2", - "windows_i686_gnu 0.42.2", - "windows_i686_msvc 0.42.2", - "windows_x86_64_gnu 0.42.2", - "windows_x86_64_gnullvm 0.42.2", - "windows_x86_64_msvc 0.42.2", -] - -[[package]] -name = "windows-sys" -version = "0.45.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" -dependencies = [ - "windows-targets 0.42.2", -] - [[package]] name = "windows-sys" version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" dependencies = [ - "windows-targets 0.48.0", + "windows-targets", ] [[package]] name = "windows-targets" -version = "0.42.2" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" dependencies = [ - "windows_aarch64_gnullvm 0.42.2", - "windows_aarch64_msvc 0.42.2", - "windows_i686_gnu 0.42.2", - "windows_i686_msvc 0.42.2", - "windows_x86_64_gnu 0.42.2", - "windows_x86_64_gnullvm 0.42.2", - "windows_x86_64_msvc 0.42.2", -] - -[[package]] -name = "windows-targets" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b1eb6f0cd7c80c79759c929114ef071b87354ce476d9d94271031c0497adfd5" -dependencies = [ - "windows_aarch64_gnullvm 0.48.0", - "windows_aarch64_msvc 0.48.0", - "windows_i686_gnu 0.48.0", - "windows_i686_msvc 0.48.0", - "windows_x86_64_gnu 0.48.0", - "windows_x86_64_gnullvm 0.48.0", - "windows_x86_64_msvc 0.48.0", + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", ] [[package]] name = "windows_aarch64_gnullvm" -version = "0.42.2" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" [[package]] name = "windows_aarch64_msvc" -version = "0.42.2" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" [[package]] name = "windows_i686_gnu" -version = "0.42.2" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" - -[[package]] -name = "windows_i686_gnu" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" [[package]] name = "windows_i686_msvc" -version = "0.42.2" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" - -[[package]] -name = "windows_i686_msvc" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" [[package]] name = "windows_x86_64_gnu" -version = "0.42.2" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" [[package]] name = "windows_x86_64_gnullvm" -version = "0.42.2" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" [[package]] name = "windows_x86_64_msvc" -version = "0.42.2" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" [[package]] name = "winreg" @@ -3539,11 +4098,12 @@ dependencies = [ [[package]] name = "winreg" -version = "0.10.1" +version = "0.50.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80d0f4e272c85def139476380b12f9ac60926689dd2e01d4923222f40580869d" +checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" dependencies = [ - "winapi 0.3.9", + "cfg-if 1.0.0", + "windows-sys", ] [[package]] @@ -3558,18 +4118,18 @@ dependencies = [ [[package]] name = "zstd" -version = "0.12.3+zstd.1.5.2" +version = "0.12.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76eea132fb024e0e13fd9c2f5d5d595d8a967aa72382ac2f9d39fcc95afd0806" +checksum = "1a27595e173641171fc74a1232b7b1c7a7cb6e18222c11e9dfb9888fa424c53c" dependencies = [ "zstd-safe", ] [[package]] name = "zstd-safe" -version = "6.0.5+zstd.1.5.4" +version = "6.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d56d9e60b4b1758206c238a10165fbcae3ca37b01744e394c463463f6529d23b" +checksum = "ee98ffd0b48ee95e6c5168188e44a54550b1564d9d530ee21d5f0eaed1069581" dependencies = [ "libc", "zstd-sys", diff --git a/Cargo.toml b/Cargo.toml index ce99ca3..2984ffe 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,23 +1,73 @@ [package] name = "websurfx" -version = "0.6.0" +version = "1.0.0" edition = "2021" - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html +description = "An open-source alternative to Searx that provides clean, ad-free, and organic results with incredible speed while keeping privacy and security in mind." +repository = "https://github.com/neon-mmd/websurfx" +license = "AGPL-3.0" [dependencies] -reqwest = {version="*",features=["json"]} -tokio = {version="*",features=["full"]} -serde = {version="*",features=["derive"]} -handlebars = { version = "4.3.6", features = ["dir_source"] } -scraper = {version="*"} -actix-web = {version="4.3.1"} +reqwest = {version="0.11.20",features=["json"]} +tokio = {version="1.32.0",features=["rt-multi-thread","macros"]} +serde = {version="1.0.188",features=["derive"]} +handlebars = { version = "4.4.0", features = ["dir_source"] } +scraper = {version="0.17.1"} +actix-web = {version="4.4.0", features = ["cookies"]} actix-files = {version="0.6.2"} -serde_json = {version="*"} -fake-useragent = {version="*"} +actix-cors = {version="0.6.4"} +serde_json = {version="1.0.105"} +fake-useragent = {version="0.1.3"} env_logger = {version="0.10.0"} -log = {version="0.4.17"} -rlua = {version="*"} -redis = {version="*"} -md5 = {version="*"} -rand={version="*"} +log = {version="0.4.20"} +mlua = {version="0.8.10", features=["luajit", "vendored"]} +redis = {version="0.23.3", features=["tokio-comp","connection-manager"], optional = true} +md5 = {version="0.7.0"} +rand={version="0.8.5"} +once_cell = {version="1.18.0"} +error-stack = {version="0.4.0"} +async-trait = {version="0.1.73"} +regex = {version="1.9.4", features=["perf"]} +smallvec = {version="1.11.0", features=["union", "serde"]} +futures = {version="0.3.28"} +dhat = {version="0.3.2", optional = true} +mimalloc = { version = "0.1.38", default-features = false } +async-once-cell = {version="0.5.3"} +actix-governor = {version="0.4.1"} +mini-moka = { version="0.10", optional = true} + +[dev-dependencies] +rusty-hook = "^0.11.2" +criterion = "0.5.1" +tempfile = "3.8.0" + +[profile.dev] +opt-level = 0 +debug = true +split-debuginfo = '...' +debug-assertions = true +overflow-checks = true +lto = false +panic = 'unwind' +incremental = true +codegen-units = 256 +rpath = false + +[profile.release] +opt-level = 3 +debug = false # This should only be commented when testing with dhat profiler +# debug = 1 # This should only be uncommented when testing with dhat profiler +split-debuginfo = '...' +debug-assertions = false +overflow-checks = false +lto = true +panic = 'abort' +incremental = false +codegen-units = 1 +rpath = false +strip = "debuginfo" + +[features] +default = ["memory-cache"] +dhat-heap = ["dep:dhat"] +memory-cache = ["dep:mini-moka"] +redis-cache = ["dep:redis"] diff --git a/Dockerfile b/Dockerfile index 0fbba94..74c7adb 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,9 +1,9 @@ FROM rust:latest AS chef # We only pay the installation cost once, # it will be cached from the second build onwards -RUN cargo install cargo-chef +RUN cargo install cargo-chef --locked -WORKDIR app +WORKDIR /app FROM chef AS planner COPY . . @@ -12,15 +12,29 @@ RUN cargo chef prepare --recipe-path recipe.json FROM chef AS builder COPY --from=planner /app/recipe.json recipe.json # Build dependencies - this is the caching Docker layer! +# Uncomment the line below if you want to use the `hybrid` caching feature. +# RUN cargo chef cook --release --features redis-cache --recipe-path recipe.json +# Comment the line below if you don't want to use the `In-Memory` caching feature. RUN cargo chef cook --release --recipe-path recipe.json +# Uncomment the line below if you want to use the `no cache` feature. +# RUN cargo chef cook --release --no-default-features --recipe-path recipe.json +# Uncomment the line below if you want to use the `redis` caching feature. +# RUN cargo chef cook --release --no-default-features --features redis-cache --recipe-path recipe.json # Build application COPY . . +# Uncomment the line below if you want to use the `hybrid` caching feature. +# RUN cargo install --path . --features redis-cache +# Comment the line below if you don't want to use the `In-Memory` caching feature. RUN cargo install --path . +# Uncomment the line below if you want to use the `no cache` feature. +# RUN cargo install --path . --no-default-features +# Uncomment the line below if you want to use the `redis` caching feature. +# RUN cargo install --path . --no-default-features --features redis-cache # We do not need the Rust toolchain to run the binary! -FROM gcr.io/distroless/cc-debian11 -COPY --from=builder ./public/ ./public/ -COPY --from=builder ./websurfx/ ./websurfx/ +FROM gcr.io/distroless/cc-debian12 +COPY --from=builder /app/public/ /opt/websurfx/public/ +VOLUME ["/etc/xdg/websurfx/"] COPY --from=builder /usr/local/cargo/bin/* /usr/local/bin/ CMD ["websurfx"] diff --git a/PULL_REQUEST_TEMPLATE.md b/PULL_REQUEST_TEMPLATE.md index a7ad130..28cb6b7 100644 --- a/PULL_REQUEST_TEMPLATE.md +++ b/PULL_REQUEST_TEMPLATE.md @@ -16,7 +16,7 @@ ## Author's checklist - + ## Related issues diff --git a/README.md b/README.md index 95d4d21..6bdfc02 100644 --- a/README.md +++ b/README.md @@ -1,12 +1,22 @@ -

+īģŋ

websurfx logo

Readme | Discord | + Instances | + User Showcase | GitHub | - Documentation + Documentation

+ + Awesome Self-Hosted + GitHub code size in bytesmeta search engine (pronounced as websurface or web-surface /wɛbˈsɜːrfəs/.) written in Rust. It - provides a quick and secure search experience while maintaining user + provides a quick and secure search experience while completely respecting user privacy.

@@ -49,30 +59,31 @@

- **Getting Started** - - [🔭 Preview](#preview-) - - [🚀 Features](#features-) - - [🛠ī¸ Installation and Testing](#installation-and-testing-) - - [🔧 Configuration](#configuration-) + - [🔭 Preview](#preview-) + - [🚀 Features](#features-) + - [🔗 Instances](instances-) + - [🛠ī¸ Installation and Testing](#installation-and-testing-%EF%B8%8F) + - [🔧 Configuration](#configuration-) - **Feature Overview** - - [🎨 Theming](#theming-) - - [🌍 Multi-Language Support](#multi-language-support-) + - [🎨 Theming](#theming-) + - [🌍 Multi-Language Support](#multi-language-support-) - **Community** - - [📊 System Requirements](#system-requirements-) - - [🗨ī¸ FAQ (Frequently Asked Questions)](#faq-frequently-asked-questions-) - - [đŸ“Ŗ More Contributers Wanted](#more-contributers-wanted-) - - [💖 Supporting Websurfx](#supporting-websurfx-) - - [📘 Documentation](#documentation-) - - [đŸ›Ŗī¸ Roadmap](#roadmap-) - - [🙋 Contributing](#contributing-) - - [📜 License](#license-) - - [🤝 Credits](#credits-) + - [📊 System Requirements](#system-requirements-) + - [🗨ī¸ FAQ (Frequently Asked Questions)](#faq-frequently-asked-questions-) + - [đŸ“Ŗ More Contributors Wanted](#more-contributors-wanted-) + - [💖 Supporting Websurfx](#supporting-websurfx-) + - [📘 Documentation](#documentation-) + - [đŸ›Ŗī¸ Roadmap](#roadmap-) + - [🙋 Contributing](#contributing-) + - [📜 License](#license-) + - [🤝 Credits](#credits-)

-# Preview 🔭 +# Preview 🔭 -## Main Page +## Home Page @@ -86,9 +97,15 @@ **[âŦ†ī¸ Back to Top](#--)** -# Features 🚀 +# Instances 🔗 -- 🎨 High level of customizability with nine color schemes provided by default with a simple theme, also supporting the creation of your custom themes and colorschemes very quickly and easily +> For a full list of publicly available community driven `websurfx` instances to test or for daily use. see [**Instances**](./docs/instances.md) + +**[âŦ†ī¸ Back to Top](#--)** + +# Features 🚀 + +- 🎨 Make Websurfx uniquely yours with twelve color schemes provided by default. It also supports creation of custom themes and color schemes in a quick and easy way, so unleash your creativity! - 🔐 Fast, private, and secure - 🆓 100% free and open source - 💨 Ad-free and clean results @@ -96,7 +113,7 @@ **[âŦ†ī¸ Back to Top](#--)** -# Installation and Testing 🛠ī¸ +# Installation and Testing 🛠ī¸ > For full setup instructions, see: [**Installation**](./docs/installation.md) @@ -104,9 +121,10 @@ Before you can start building `websurfx`, you will need to have `Cargo` installe To get started with Websurfx, clone the repository, edit the config file, which is located in the `websurfx/` directory, and install the Redis server by following the instructions located [here](https://redis.io/docs/getting-started/) and then run the websurfx server and redis server using the following commands: -``` shell +```shell git clone https://github.com/neon-mmd/websurfx.git cd websurfx +git checkout stable cargo build -r redis-server --port 8082 & ./target/release/websurfx @@ -114,12 +132,14 @@ redis-server --port 8082 & Once you have started the server, open your preferred web browser and navigate to to start using Websurfx. -> **Warning** -> Please be aware that the project is still in the testing phase and is not ready for production use. +> **Note** +> +> 1. The project is no longer in the testing phase and is now ready for production use. +> 2. There are many features still missing like `support for image search`, `different categories`, `quick apps`, etc but they will be added soon as part of future releases. **[âŦ†ī¸ Back to Top](#--)** -# Configuration 🔧 +# Configuration 🔧 > For full configuration instructions, see: [**Configuration**](./docs/configuration.md) @@ -127,18 +147,18 @@ Websurfx is configured through the config.lua file, located at `websurfx/config. **[âŦ†ī¸ Back to Top](#--)** -# Theming 🎨 +# Theming 🎨 > For full theming and customization instructions, see: [**Theming**](./docs/theming.md) -Websurfx comes with several themes and color schemes by default, which you can apply and edit through the config file. Supports custom themes and color schemes using CSS, allowing you to develop your own unique-looking website. +Websurfx comes loaded with several themes and color schemes, which you can apply and edit through the config file. It also supports custom themes and color schemes using CSS, allowing you to make it truly yours. **[âŦ†ī¸ Back to Top](#--)** # Multi-Language Support 🌍 > **Note** -> Currently, we do not support other languages, but in the future, we will start accepting contributions regarding language support because we believe that language should not be a barrier to entry. +> Currently, we do not support other languages but we will start accepting contributions regarding language support in the future. We believe language should never be a barrier to entry. **[âŦ†ī¸ Back to Top](#--)** @@ -152,19 +172,19 @@ At present, we only support x86_64 architecture systems, but we would love to ha ## Why Websurfx? -The primary purpose of the Websurfx project is to create a fast, secure, and privacy-focused meta-search engine. While there are numerous meta-search engines available, not all of them guarantee the security of their search engine, which is critical for maintaining privacy. Memory flaws, for example, can expose private or sensitive information, which is never a good thing. Also, there is the added problem of Spam, ads, and unorganic results which most engines don't have the full-proof answer to it till now but with Websurfx I finally put a full stop to this problem, also, Rust is used to write Websurfx, which ensures memory safety and removes such issues. Many meta-search engines also lack important features like advanced picture search, which is required by many graphic designers, content providers, and others. Websurfx attempts to improve the user experience by providing these and other features, such as proper NSFW blocking and Micro-apps or Quick results (like providing a calculator, currency exchanges, etc in the search results). +The primary purpose of the Websurfx project is to create a fast, secure, and privacy-focused meta-search engine. There are numerous meta-search engines available, but not all guarantee the security of their search engine, which is critical for maintaining privacy. Memory flaws, for example, can expose private or sensitive information, which is understandably bad. There is also the added problem of spam, ads, and inorganic results which most engines don't have a fool-proof answer to. Until now. With Websurfx I finally put a full stop to this problem. Websurfx is based on Rust, which ensures memory safety and removes such issues. Many meta-search engines also lack important features like advanced picture search, required by graphic designers, content providers, and others. Websurfx improves the user experience by providing these and other features, such as proper NSFW blocking and Micro-apps or Quick Results (providing a calculator, currency exchanges, etc in the search results). ## Why AGPLv3? -Websurfx is distributed under the **AGPLv3** license to keep the source code open and transparent. This helps to keep malware, telemetry, and other dangerous programs out of the project. **AGPLv3** is a strong copyleft license that ensures the software's source code, including any modifications or improvements made to the code, remains open and available to everyone. +Websurfx is distributed under the **AGPLv3** license to keep the source code open and transparent. This helps keep malware, telemetry, and other dangers out of the project. **AGPLv3** is a strong copyleft license that ensures the software's source code, including any modifications or improvements made to the code, remains open and available to everyone. ## Why Rust? -Rust was chosen as the programming language for Websurfx because of its memory safety features, which can help prevent vulnerabilities and make the codebase more secure. Rust is also faster than C++, which contributes to Websurfx's speed and responsiveness. Furthermore, the Rust ownership and borrowing system enables secure concurrency and thread safety in the program. +Websurfx is based on Rust due to its memory safety features, which prevents vulnerabilities and makes the codebase more secure. Rust is also faster than C++, contributing to Websurfx's speed and responsiveness. Finally, the Rust ownership and borrowing system enables secure concurrency and thread safety in the program. **[âŦ†ī¸ Back to Top](#--)** -# More Contributers Wanted đŸ“Ŗ +# More Contributors Wanted đŸ“Ŗ We are looking for more willing contributors to help grow this project. For more information on how you can contribute, check out the [project board](https://github.com/neon-mmd/websurfx/projects?query=is%3Aopen) and the [CONTRIBUTING.md](CONTRIBUTING.md) file for guidelines and rules for making contributions. @@ -174,14 +194,15 @@ We are looking for more willing contributors to help grow this project. For more > For full details and other ways you can help out, see: [**Contributing**]() -If you use Websurfx and would like to contribute to its development, that would be fantastic! Contributions of any size or type are always welcome, and we will properly acknowledge your efforts. +If you use Websurfx and would like to contribute to its development, we're glad to have you on board! Contributions of any size or type are always welcome, and we will always acknowledge your efforts. Several areas that we need a bit of help with at the moment are: + - **Better and more color schemes**: Help fix color schemes and add other famous color schemes. - **Improve evasion code for bot detection** - Help improve code related to evading IP blocking and emulating human behaviors located in everyone's engine file. - **Logo** - Help create a logo for the project and website. - **Docker Support** - Help write a Docker Compose file for the project. -- Submit a PR to add a new feature, fix a bug, update the docs, add a theme, widget, or something else. +- Submit a PR to add a new feature, fix a bug, update the docs, add a theme, widget, or anything else. - Star Websurfx on GitHub. **[âŦ†ī¸ Back to Top](#--)** @@ -189,19 +210,19 @@ Several areas that we need a bit of help with at the moment are: # Documentation 📘 > **Note** -> We welcome any contributions to the [documentation](./docs/) as this will benefit everyone who uses this project. +> We welcome any contributions to the [documentation](../../tree/HEAD/docs/) as this will benefit everyone who uses this project. **[âŦ†ī¸ Back to Top](#--)** # Roadmap đŸ›Ŗī¸ -> Coming soon!! 🙂. +> Coming soon! 🙂. **[âŦ†ī¸ Back to Top](#--)** -# Contributing 🙋 +# Contributing 🙋 -Contributions are welcome from anyone. It doesn\'t matter who you are; you can still contribute to the project in your own way. +Contributions are welcome from anyone. It doesn't matter who you are; you can still contribute to the project in your own way. ## Not a developer but still want to contribute? @@ -213,16 +234,18 @@ If you are a developer, have a look at the [CONTRIBUTING.org](CONTRIBUTING.md) d **[âŦ†ī¸ Back to Top](#--)** -# License 📜 +# License 📜 Websurfx is licensed under the [AGPLv3](LICENSE) license. **[âŦ†ī¸ Back to Top](#--)** -# Credits 🤝 +# Credits 🤝 We would like to thank the following people for their contributions and support: +**Contributors** +


@@ -231,6 +254,14 @@ We would like to thank the following people for their contributions and support:

+**Stargazers** + +

+ + + +

+ **[âŦ†ī¸ Back to Top](#--)** --- diff --git a/docker-compose.yml b/docker-compose.yml index 37ef93d..6b50b24 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,3 +1,4 @@ +--- version: "3.9" services: app: @@ -5,11 +6,15 @@ services: build: . ports: - 8080:8080 - depends_on: - - redis - links: - - redis - redis: - image: redis:latest - ports: - - 6379:6379 + # Uncomment the following lines if you are using the `hybrid` or `redis` caching feature. + # depends_on: + # - redis + # links: + # - redis + volumes: + - ./websurfx/:/etc/xdg/websurfx/ + # Uncomment the following lines if you are using the `hybrid` or `redis` caching feature. + # redis: + # image: redis:latest + # ports: + # - 6379:6379 \ No newline at end of file diff --git a/docs/README.md b/docs/README.md index 02f304b..b092778 100644 --- a/docs/README.md +++ b/docs/README.md @@ -7,7 +7,9 @@ # Users +- [Instances](./instances.md) - [Installation](./installation.md) +- [Features](./features.md) - [Configuration](./configuration.md) - [Theming](./theming.md) diff --git a/docs/configuration.md b/docs/configuration.md index bb10ba6..7bfdfc9 100644 --- a/docs/configuration.md +++ b/docs/configuration.md @@ -11,33 +11,59 @@ If you have built `websurfx` from source then the configuration file will be loc If you have installed `websurfx` using the package manager of your Linux distro then the default configuration file will be located at `/etc/xdg/websurfx/`. You can copy the default config to `~/.config/websurfx/` and make the changes there and rerun the websurfx server. -Some of the configuration options provided in the file are stated below. These are subdivided into three categories: +Some of the configuration options provided in the file are stated below. These are subdivided into the following categories: +- General - Server +- Search - Website - Cache +- Search Engines + +# General + +- **logging:** An option to enable or disable logs. +- **debug:** An option to enable or disable debug mode. +- **threads:** The amount of threads that the app will use to run (the value should be greater than 0). ## Server - **port:** Port number on which server should be launched. - **binding_ip_addr:** IP address on the which server should be launched. -- **production_use:** Whether to use production mode or not (in other words this option should be used if it is to be used to host it on the server to provide a service to a large number of users). If production_use is set to true. There will be a random delay before sending the request to the search engines, this is to prevent DDoSing the upstream search engines from a large number of simultaneous requests. This is newly added option and hence is only available in the **edge version**. +- **production_use:** Whether to use production mode or not (in other words this option should be used if it is to be used to host it on the server to provide a service to a large number of users). If production_use is set to true. There will be a random delay before sending the request to the search engines, this is to prevent DDoSing the upstream search engines from a large number of simultaneous requests. +- **request_timeout:** Timeout for the search requests sent to the upstream search engines to be fetched (value in seconds). +- **rate_limiter:** The configuration option to configure rate limiting on the search engine website. + +## Search + +- **safe_search:** This option is used to configure the search filtering based on different safe search levels. (value a number between 0 to 4) + +> This option provides 4 levels of search filtering: +> +> - Level 0 - With this level no search filtering occurs. +> - Level 1 - With this level some search filtering occurs. +> - Level 2 - With this level the upstream search engines are restricted to send sensitive contents like NSFW search results, etc. +> - Level 3 - With this level the regex based filter lists is used alongside level 2 to filter more search results that have slipped in or custom results that needs to be filtered using the filter lists. +> - Level 4 - This level is similar to level 3 except in this level the regex based filter lists are used to disallow users to search sensitive or disallowed content. This level could be useful if you are parent or someone who wants to completely disallow their kids or yourself from watching sensitive content. ## Website - **colorscheme:** The colorscheme name which should be used for the website theme (the name should be in accordance to the colorscheme file name present in `public/static/colorschemes` folder). -> By Default we provide 9 colorschemes to choose from these are: +> By Default we provide 12 colorschemes to choose from these are: > > 1. catppuccin-mocha -> 2. dracula -> 3. monokai -> 4. nord -> 5. oceanic-next -> 6. solarized-dark -> 7. solarized-light -> 8. tomorrow-night -> 9. gruvbox-dark +> 2. dark-chocolate +> 3. dracula +> 4. gruvbox-dark +> 5. monokai +> 6. nord +> 7. oceanic-next +> 8. one-dark +> 9. solarized-dark +> 10. solarized-light +> 11. tokyo-night +> 12. tomorrow-night - **theme:** The theme name which should be used for the website (again, the name should be in accordance to the theme file name present in `public/static/themes` folder). @@ -47,6 +73,13 @@ Some of the configuration options provided in the file are stated below. These a ## Cache -- **redis_connection_url:** Redis connection url address on which the client should connect on. +- **redis_url:** Redis connection url address on which the client should connect on. -[âŦ…ī¸ Go back to Home](./README.md) +> **Note** +> This option can be commented out if you have compiled the app without the `redis-cache` feature. For more information, See [**building**](./building.md). + +## Search Engines + +- **upstream_search_engines:** Select from the different upstream search engines from which the results should be fetched. + +[âŦ…ī¸ Go back to Home](./README.md) diff --git a/docs/features.md b/docs/features.md new file mode 100644 index 0000000..b5e8db4 --- /dev/null +++ b/docs/features.md @@ -0,0 +1,42 @@ +# Features + +The project provides 4 caching options as conditionally compiled features. This helps reduce the size of the compiled app by only including the code that is necessary for a particular caching option. + +The different caching features provided are as follows: +- No cache +- Redis cache +- In memory cache +- Hybrid cache + +## Explaination + +### No Cache + +This feature can drastically reduce binary size but with the cost that subsequent search requests and previous & next page search results are not cached which can make navigating between pages slower. As well as page refreshes of the same page also becomes slower as each refresh has to fetch the results from the upstream search engines. + +### Redis Cache + +This feature allows the search engine to cache the results on the redis server. This feature can be useful for having a dedicated cache server for multiple devices hosted with the `Websurfx` server which can use the one dedicated cache server for hosting their cache on it. But a disadvantage of this solution is that if the `Redis`server is located far away (for example provided by a vps as service) and if it is unavailable or down for some reason then the `Websurfx` server would not be able to function properly or will crash on startup. + +### In Memory Cache + +This feature is the default feature provided by the project. This feature allows the search engine to cache the results in the memory which can help increase the speed of the fetched cache results and it also has an advantage that it is extremely reliable as all the results are stored in memory within the search engine. Though the disadvantage of this solution are that caching of results is slightly slower than the `redis-cache` solution, it requires a good amount of memory on the system and as such is not ideal for very low memory devices and is highly unscalable. + +### Hybrid Cache + +This feature provides the advantages of both `In Memory` caching and `Redis` caching and it is an ideal solution if you need a very resiliant and reliable solution for the `Websurfx` which can provide both speed and reliability. Like for example if the `Redis` server becomes unavailable then the search engine switches to `In Memory` caching until the server becomes available again. This solution can be useful for hosting `Websurfx` instance which will be used by hundreds or thousands of users over the world. + +## Tabular Summary + + +| **Attributes** | **Hybrid** | **In-Memory** | **No Cache** | **Redis** | +|-----------------------------------------|------------|------------------------------------------------------|-----------------|------------------------| +| **Speed** | Fast | Caching is slow, but retrieval of cache data is fast | Slow | Fastest | +| **Reliability** | ✅ | ✅ | ✅ | ❌ | +| **Scalability** | ✅ | ❌ | - | ✅ | +| **Resiliancy** | ✅ | ✅ | ✅ | ❌ | +| **Production/Large Scale/Instance use** | ✅ | Not Recommended | Not Recommended | Not Recommended | +| **Low Memory Support** | ❌ | ❌ | ✅ | ❌ | +| **Binary Size** | Big | Bigger than `No Cache` | small | Bigger than `No Cache` | + +[âŦ…ī¸ Go back to Home](./README.md) diff --git a/docs/installation.md b/docs/installation.md index 4719ddc..1c4b112 100644 --- a/docs/installation.md +++ b/docs/installation.md @@ -2,16 +2,17 @@ ## Arch Linux -You can install `Websurfx` through the [Aur](https://aur.archlinux.org/packages/websurfx-git), Currently we only support `Rolling/Edge` version. You can install the rolling/edge version by running the following command (using [paru](https://github.com/Morganamilo/paru)): +### Rolling/Edge/Unstable -```bash +You can install `Websurfx` through the [Aur](https://aur.archlinux.org/packages/websurfx-git), By running the following command (using [paru](https://github.com/Morganamilo/paru)): + +```shell paru -S websurfx-edge-git ``` After installing it you can run the websurfx server by running the following commands: -``` bash -redis-server --port 8082 & +```shell websurfx ``` @@ -19,7 +20,79 @@ Once you have started the server, open your preferred web browser and navigate t If you want to change the port or the ip or any other configuration setting checkout the [configuration docs](./configuration.md). -## Other Distros +### Stable + +For the stable version, follow the same steps as above (as mentioned for the `unstable/rolling/edge` version) with the only difference being that the package to be installed for stable version is called `websurfx-git` instead of `websurfx-edge-git`. + +## NixOS + +A `flake.nix` has been provided to allow installing `websurfx` easily. It utilizes [nearsk](https://github.com/nix-community/naersk) to automatically generate a derivation based on `Cargo.toml` and `Cargo.lock`. + +The Websurfx project provides 2 versions/flavours for the flake `stable` and `rolling/unstable/edge`. The steps for each are covered below in different sections. + +### Rolling/Edge/Unstable + +To get started, First clone the repository, edit the config file which is located in the `websurfx` directory and then build and run the websurfx server by running the following commands: + +```shell +git clone https://github.com/neon-mmd/websurfx.git +cd websurfx +cp -rf ./websurfx/ ~/.config/ +$ mkdir /opt/websurfx/ +$ cp -rf ./public/ /opt/websurfx/ +nix build .#websurfx +nix run .#websurfx +``` + +> **Note** +> In the above command the dollar sign(**$**) refers to running the command in privilaged mode by using utilities `sudo`, `doas`, `pkgexec` or any other privilage access methods. + +Once you have run the above set of commands, then open your preferred web browser and navigate to http://127.0.0.1:8080/ to start using Websurfx. + +If you want to change the port or the ip or any other configuration setting checkout the [configuration docs](./configuration.md). + +> Optionally, you may include it in your own flake by adding this repo to its inputs and adding it to `environment.systemPackages` as follows: +> +> ```nix +> { +> description = "My awesome configuration"; +> +> inputs = { +> websurfx.url = "github:neon-mmd/websurfx"; +> }; +> +> outputs = { nixpkgs, ... }@inputs: { +> nixosConfigurations = { +> hostname = nixpkgs.lib.nixosSystem { +> system = "x86_64-linux"; +> modules = [{ +> environment.systemPackages = [inputs.websurfx.packages.x86_64-linux.websurfx]; +> }]; +> }; +> }; +> }; +> } +> ``` + +### Stable + +For the stable version, follow the same steps as above (as mentioned for the `unstable/rolling/edge version`) with an addition of one command which has to be performed after cloning and changing directory into the repository which makes the building step as follows: + +```shell +git clone https://github.com/neon-mmd/websurfx.git +cd websurfx +git checkout stable +cp -rf ./websurfx/ ~/.config/ +$ mkdir /opt/websurfx/ +$ cp -rf ./public/ /opt/websurfx/ +nix build .#websurfx +nix run .#websurfx +``` + +> **Note** +> In the above command the dollar sign(**$**) refers to running the command in privilaged mode by using utilities `sudo`, `doas`, `pkgexec` or any other privilage access methods. + +## Other Distros The package is currently not available on other Linux distros. With contribution and support it can be made available on other distros as well 🙂. @@ -34,6 +107,7 @@ To get started with Websurfx, clone the repository, edit the config file which i ```shell git clone https://github.com/neon-mmd/websurfx.git cd websurfx +git checkout stable cargo build -r redis-server --port 8082 & ./target/release/websurfx @@ -50,13 +124,63 @@ If you want to use the rolling/edge branch, run the following commands instead: ```shell git clone https://github.com/neon-mmd/websurfx.git cd websurfx -git checkout rolling +``` + +Once you have changed the directory to the `websurfx` directory then follow the build options listed below: + +### Hybrid Cache + +> For more information on the features and their pros and cons. see: [**Features**](./features.md) + +To build the search engine with the `Hybrid caching` feature. Run the following build command: + +```shell +cargo build -r --features redis-cache +``` + +### Memory Cache (Default Feature) + +> For more information on the features and their pros and cons. see: [**Features**](./features.md) + +To build the search engine with the `In-Memory caching` feature. Run the following build command: + +```shell cargo build -r -redis-server --port 8082 & +``` + +### No Cache + +> For more information on the features and their pros and cons. see: [**Features**](./features.md) + +To build the search engine with the `No caching` feature. Run the following build command: + +```shell +cargo build -r --no-default-features +``` + +### Redis Cache + +> For more information on the features and their pros and cons. see: [**Features**](./features.md) + +To build the search engine with the `hybrid caching` feature. Run the following build command: + +```shell +cargo build -r --no-default-features --features redis-cache +``` + +> Optionally, If you have build the app with the `Redis cache`or `Hybrid cache` feature (as mentioned above) then before launching the search engine run the following command: +> +> ```shell +> redis-server --port 8082 & +> ``` + +Once you have finished building the `search engine`. then run the following command to start the search engine: + +```shell ./target/release/websurfx ``` -Once you have started the server, open your preferred web browser and navigate to http://127.0.0.1:8080/ to start using Websurfx. +Once you have started the server, then launch your preferred web browser and navigate to http://127.0.0.1:8080/ to start using Websurfx. If you want to change the port or the ip or any other configuration setting checkout the [configuration docs](./configuration.md). @@ -64,7 +188,121 @@ If you want to change the port or the ip or any other configuration setting chec Before you start, you will need [Docker](https://docs.docker.com/get-docker/) installed on your system first. -## Stable +## Prebuild + +The Websurfx project provides several prebuild images based on the different features provided by the search engine. To get started using the prebuild image, you will first need to create a `docker-compose.yml` file with the following content: + +```yaml +--- +version: '3.9' +services: + app: + # Comment the line below if you don't want to use the `hybrid/latest` image. + image: neonmmd/websurfx:latest + # Uncomment the line below if you want to use the `no cache` image. + # image: neonmmd/websurfx:nocache + # Uncomment the line below if you want to use the `memory` image. + # image: neonmmd/websurfx:memory + # Uncomment the line below if you want to use the `redis` image. + # image: neonmmd/websurfx:redis + ports: + - 8080:8080 + # Uncomment the following lines if you are using the `hybrid/latest` or `redis` image. + # depends_on: + # - redis + # links: + # - redis + volumes: + - ./websurfx/:/etc/xdg/websurfx/ + # Uncomment the following lines if you are using the `hybrid/latest` or `redis` image. + # redis: + # image: redis:latest + # ports: + # - 6379:6379 +``` + +Then make sure to edit the `docker-compose.yml` file as required. After that create a directory `websurfx` in the directory you have placed the `docker-compose.yml` file, and then in the new directory create two new empty files named `allowlist.txt` and `blocklist.txt`. Finally, create a new config file `config.lua` with the default configuration, which looks something like this: + +```lua +-- ### General ### +logging = true -- an option to enable or disable logs. +debug = false -- an option to enable or disable debug mode. +threads = 8 -- the amount of threads that the app will use to run (the value should be greater than 0). + +-- ### Server ### +port = "8080" -- port on which server should be launched +binding_ip = "0.0.0.0" --ip address on the which server should be launched. +production_use = false -- whether to use production mode or not (in other words this option should be used if it is to be used to host it on the server to provide a service to a large number of users (more than one)) +-- if production_use is set to true +-- There will be a random delay before sending the request to the search engines, this is to prevent DDoSing the upstream search engines from a large number of simultaneous requests. +request_timeout = 30 -- timeout for the search requests sent to the upstream search engines to be fetched (value in seconds). +rate_limiter = { + number_of_requests = 20, -- The number of request that are allowed within a provided time limit. + time_limit = 3, -- The time limit in which the quantity of requests that should be accepted. +} + +-- ### Search ### +-- Filter results based on different levels. The levels provided are: +-- {{ +-- 0 - None +-- 1 - Low +-- 2 - Moderate +-- 3 - High +-- 4 - Aggressive +-- }} +safe_search = 2 + +-- ### Website ### +-- The different colorschemes provided are: +-- {{ +-- catppuccin-mocha +-- dark-chocolate +-- dracula +-- gruvbox-dark +-- monokai +-- nord +-- oceanic-next +-- one-dark +-- solarized-dark +-- solarized-light +-- tokyo-night +-- tomorrow-night +-- }} +colorscheme = "catppuccin-mocha" -- the colorscheme name which should be used for the website theme +theme = "simple" -- the theme name which should be used for the website + +-- ### Caching ### +redis_url = "redis://redis:6379" -- redis connection url address on which the client should connect on. + +-- ### Search Engines ### +upstream_search_engines = { + DuckDuckGo = true, + Searx = false, +} -- select the upstream search engines from which the results should be fetched. +``` + +Then run the following command to deploy the search engine: + +```shell +$ docker compose up -d +``` + +> **Note** +> In the above command the dollar sign(**$**) refers to running the command in privilaged mode by using utilities `sudo`, `doas`, `pkgexec` or any other privilage access methods. + +Then launch the browser of your choice and navigate to http://:. + +> **Note** +> The official prebuild images only support `stable` versions of the app and will not support `rolling/edge/unstable` versions. But with support and contribution it could be made available for these versions as well 🙂. + +## Manual Deployment + +This section covers how to deploy the app with docker manually by manually building the image and deploying it. + +> **Note** +> This section is provided for those who want to futher customize the docker image or for those who are extra cautious about security. + +### Unstable/Edge/Rolling First clone the the repository by running the following command: @@ -76,49 +314,82 @@ cd websurfx After that edit the config.lua file located under `websurfx` directory. In the config file you will specifically need to change to values which is `binding_ip_addr` and `redis_connection_url` which should make the config look something like this: ```lua --- Server -port = "8080" -- port on which server should be launched -binding_ip_addr = "0.0.0.0" --ip address on the which server should be launched. +-- ### General ### +logging = true -- an option to enable or disable logs. +debug = false -- an option to enable or disable debug mode. +threads = 8 -- the amount of threads that the app will use to run (the value should be greater than 0). --- Website +-- ### Server ### +port = "8080" -- port on which server should be launched +binding_ip = "0.0.0.0" --ip address on the which server should be launched. +production_use = false -- whether to use production mode or not (in other words this option should be used if it is to be used to host it on the server to provide a service to a large number of users (more than one)) +-- if production_use is set to true +-- There will be a random delay before sending the request to the search engines, this is to prevent DDoSing the upstream search engines from a large number of simultaneous requests. +request_timeout = 30 -- timeout for the search requests sent to the upstream search engines to be fetched (value in seconds). +rate_limiter = { + number_of_requests = 20, -- The number of request that are allowed within a provided time limit. + time_limit = 3, -- The time limit in which the quantity of requests that should be accepted. +} + +-- ### Search ### +-- Filter results based on different levels. The levels provided are: +-- {{ +-- 0 - None +-- 1 - Low +-- 2 - Moderate +-- 3 - High +-- 4 - Aggressive +-- }} +safe_search = 2 + +-- ### Website ### -- The different colorschemes provided are: -- {{ -- catppuccin-mocha +-- dark-chocolate -- dracula +-- gruvbox-dark -- monokai -- nord -- oceanic-next +-- one-dark -- solarized-dark -- solarized-light +-- tokyo-night -- tomorrow-night -- }} colorscheme = "catppuccin-mocha" -- the colorscheme name which should be used for the website theme theme = "simple" -- the theme name which should be used for the website --- Caching -redis_connection_url = "redis://127.0.0.1:8082" -- redis connection url address on which the client should connect on. +-- ### Caching ### +redis_url = "redis://redis:6379" -- redis connection url address on which the client should connect on. -production_use = false -- whether to use production mode or not (in other words this option should be used if it is to be used to host it on the server to provide a service to a large number of users) --- if production_use is set to true - -- There will be a random delay before sending the request to the search engines, this is to prevent DDoSing the upstream search engines from a large number of simultaneous requests. +-- ### Search Engines ### +upstream_search_engines = { + DuckDuckGo = true, + Searx = false, +} -- select the upstream search engines from which the results should be fetched. ``` -After this run the following command to deploy the app: +After this make sure to edit the `docker-compose.yml` and `Dockerfile` files as required and run the following command to deploy the app: ```bash -docker compose up -d --build +$ docker compose up -d --build ``` +> **Note** +> In the above command the dollar sign(**$**) refers to running the command in privilaged mode by using utilities `sudo`, `doas`, `pkgexec` or any other privilage access methods. + This will take around 5-10 mins for first deployment, afterwards the docker build stages will be cached so it will be faster to be build from next time onwards. After the above step finishes launch your preferred browser and then navigate to `http://:`. -## Unstable/Edge/Rolling +### Stable -For the unstable/rolling/edge version, follow the same steps as above (as mentioned for the stable version) with an addition of one command which has to be performed after cloning and changing directory into the repository which makes the cloning step as follows: +For the stable version, follow the same steps as above (as mentioned for the unstable/rolling/edge version) with an addition of one command which has to be performed after cloning and changing directory into the repository which makes the cloning step as follows: ```bash git clone https://github.com/neon-mmd/websurfx.git cd websurfx -git checkout rolling +git checkout stable ``` -[âŦ…ī¸ Go back to Home](./README.md) +[âŦ…ī¸ Go back to Home](./README.md) diff --git a/docs/instances.md b/docs/instances.md new file mode 100644 index 0000000..67d982b --- /dev/null +++ b/docs/instances.md @@ -0,0 +1,12 @@ +# Instances + +> To contribute your server instance, check out the contributing guide [here](https://github.com/neon-mmd/websurfx/blob/HEAD/CONTRIBUTING.md). + +This page provides a list of `Websurfx` instances provided by us and our community. + +|URL|Network|Version|Location|Behind Cloudflare?|Maintained By|TLS|IPv6|Comment| +|-|-|-|-|-|-|-|-|-| +|https://alamin655-websurfx.hf.space/|www|v0.21.4|đŸ‡ē🇸 US||[websurfx project](https://github.com/neon-mmd/websurfx)|✅||| + + +[âŦ…ī¸ Go back to Home](./README.md) diff --git a/docs/theming.md b/docs/theming.md index 49f3f22..dd46a65 100644 --- a/docs/theming.md +++ b/docs/theming.md @@ -8,17 +8,17 @@ By default `websurfx` comes with 9 colorschemes to choose from which can be easi Creating coloschemes is as easy as it gets it requires the user to have a theme file name with the colorscheme in which every space should be replaced with a `-` (dash) and it should end with a `.css` file extension. After creating the file you need to add the following code with the `colors` you want: -``` css -:root{ - --bg: ; - --fg: ; - --1: ; - --2: ; - --3: ; - --4: ; - --5: ; - --6: ; - --7: ; +```css +:root { + --background-color: ; + --foreground-color: ; + --color-one: ; + --color-two: ; + --color-three: ; + --color-four: ; + --color-five: ; + --color-six: ; + --color-seven: ; } ``` @@ -27,17 +27,17 @@ Creating coloschemes is as easy as it gets it requires the user to have a theme **Example of `catppuccin-mocha` colorscheme:** -``` css +```css :root { - --bg: #1e1e2e; - --fg: #cdd6f4; - --1: #45475a; - --2: #f38ba8; - --3: #a6e3a1; - --4: #f9e2af; - --5: #89b4fa; - --6: #f5c2e7; - --7: #ffffff; + --background-color: #1e1e2e; + --foreground-color: #cdd6f4; + --color-one: #45475a; + --color-two: #f38ba8; + --color-three: #a6e3a1; + --color-four: #f9e2af; + --color-five: #89b4fa; + --color-six: #f5c2e7; + --color-seven: #ffffff; } ``` @@ -47,14 +47,15 @@ Creating coloschemes is as easy as it gets it requires the user to have a theme By default `websurfx` comes with 1 theme to choose from which can be easily chosen using the config file. To how to change themes please view the [Configuration](https://github.com/neon-mmd/websurfx/wiki/configuration) section of the wiki. -## Custom +## Custom -To write custom color scheme, it requires the user to have some knowledge of `css stylesheets`. +To write custom color scheme, it requires the user to have some knowledge of `css stylesheets`. **Here is an example of `simple theme` (which we provide by default with the app) which will give the user a better idea on how to create a custom theme using it as a template:** ### General -``` css + +```css * { padding: 0; margin: 0; @@ -71,11 +72,13 @@ body { justify-content: space-between; align-items: center; height: 100vh; - background: var(--1); + background: var(--color-one); } ``` + ### Styles for the index page -``` css + +```css .search-container { display: flex; flex-direction: column; @@ -88,8 +91,10 @@ body { display: flex; } ``` + ### Styles for the search box and search button -``` css + +```css .search_bar { display: flex; } @@ -101,7 +106,7 @@ body { outline: none; border: none; box-shadow: rgba(0, 0, 0, 1); - background: var(--fg); + background: var(--foreground-color); } .search_bar button { @@ -114,8 +119,8 @@ body { outline: none; border: none; gap: 0; - background: var(--bg); - color: var(--3); + background: var(--background-color); + color: var(--color-three); font-weight: 600; letter-spacing: 0.1rem; } @@ -124,11 +129,133 @@ body { .search_bar button:hover { filter: brightness(1.2); } + +.search_area .search_options { + display: flex; + justify-content: space-between; + align-items: center; +} + +.search_area .search_options select { + margin: 0.7rem 0; + width: 20rem; + background-color: var(--color-one); + color: var(--foreground-color); + padding: 1rem 2rem; + border-radius: 0.5rem; + outline: none; + border: none; + text-transform: capitalize; +} + +.search_area .search_options option:hover { + background-color: var(--color-one); +} + +.result_not_found { + display: flex; + flex-direction: column; + font-size: 1.5rem; + color: var(--foreground-color); +} + +.result_not_found p { + margin: 1rem 0; +} + +.result_not_found ul { + margin: 1rem 0; +} + +.result_not_found img { + width: 40rem; +} ``` + +```css +/* styles for the error box */ +.error_box .error_box_toggle_button { + background: var(--foreground-color); +} + +.error_box .dropdown_error_box { + position: absolute; + display: none; + flex-direction: column; + background: var(--background-color); + border-radius: 0; + margin-left: 2rem; + min-height: 20rem; + min-width: 22rem; +} +.error_box .dropdown_error_box.show { + display: flex; +} +.error_box .dropdown_error_box .error_item, +.error_box .dropdown_error_box .no_errors { + display: flex; + align-items: center; + color: var(--foreground-color); + letter-spacing: 0.1rem; + padding: 1rem; + font-size: 1.2rem; +} +.error_box .dropdown_error_box .error_item { + justify-content: space-between; +} +.error_box .dropdown_error_box .no_errors { + min-height: 18rem; + justify-content: center; +} + +.error_box .dropdown_error_box .error_item:hover { + box-shadow: inset 0 0 100px 100px rgba(255, 255, 255, 0.1); +} + +.error_box .error_item .severity_color { + width: 1.2rem; + height: 1.2rem; +} +.results .result_disallowed, +.results .result_filtered, +.results .result_engine_not_selected { + display: flex; + justify-content: center; + align-items: center; + gap: 10rem; + font-size: 2rem; + color: var(--foreground-color); + margin: 0rem 7rem; +} + +.results .result_disallowed .user_query, +.results .result_filtered .user_query, +.results .result_engine_not_selected .user_query { + color: var(--background-color); + font-weight: 300; +} + +.results .result_disallowed img, +.results .result_filtered img, +.results .result_engine_not_selected img { + width: 30rem; +} + +.results .result_disallowed div, +.results .result_filtered div, +.results .result_engine_not_selected div { + display: flex; + flex-direction: column; + gap: 1rem; + line-break: strict; +} +``` + ### Styles for the footer and header -``` css + +```css header { - background: var(--bg); + background: var(--background-color); width: 100%; display: flex; justify-content: right; @@ -151,7 +278,7 @@ footer ul li a, header ul li a:visited, footer ul li a:visited { text-decoration: none; - color: var(--2); + color: var(--color-two); text-transform: capitalize; letter-spacing: 0.1rem; } @@ -162,7 +289,27 @@ header ul li a { header ul li a:hover, footer ul li a:hover { - color: var(--5); + color: var(--color-five); +} + +footer div span { + font-size: 1.5rem; + color: var(--color-four); +} + +footer div { + display: flex; + gap: 1rem; +} + +footer { + background: var(--background-color); + width: 100%; + padding: 1rem; + display: flex; + flex-direction: column; + justify-content: center; + align-items: center; } footer div span { @@ -185,8 +332,10 @@ footer { align-items: center; } ``` + ### Styles for the search page -``` css + +```css .results { width: 90%; display: flex; @@ -213,28 +362,28 @@ footer { .results_aggregated .result h1 a { font-size: 1.5rem; - color: var(--2); + color: var(--color-two); text-decoration: none; letter-spacing: 0.1rem; } .results_aggregated .result h1 a:hover { - color: var(--5); + color: var(--color-five); } .results_aggregated .result h1 a:visited { - color: var(--bg); + color: var(--background-color); } .results_aggregated .result small { - color: var(--3); + color: var(--color-three); font-size: 1.1rem; word-wrap: break-word; line-break: anywhere; } .results_aggregated .result p { - color: var(--fg); + color: var(--foreground-color); font-size: 1.2rem; margin-top: 0.3rem; word-wrap: break-word; @@ -245,13 +394,13 @@ footer { text-align: right; font-size: 1.2rem; padding: 1rem; - color: var(--5); + color: var(--color-five); } ``` -### Styles for the 404 page +### Styles for the 404 page -``` css +```css .error_container { display: flex; justify-content: center; @@ -290,16 +439,18 @@ footer { .error_content p a, .error_content p a:visited { - color: var(--2); + color: var(--color-two); text-decoration: none; } .error_content p a:hover { - color: var(--5); + color: var(--color-five); } ``` + ### Styles for the previous and next button on the search page -``` css + +```css .page_navigation { padding: 0 0 2rem 0; display: flex; @@ -308,8 +459,8 @@ footer { } .page_navigation button { - background: var(--bg); - color: var(--fg); + background: var(--background-color); + color: var(--foreground-color); padding: 1rem; border-radius: 0.5rem; outline: none; @@ -326,40 +477,258 @@ footer { This part is only available right now in the **rolling/edge/unstable** version ```css -.about-container article{ - font-size: 1.5rem; - color:var(--fg); - padding-bottom: 10px; - } - -.about-container article h1{ - color: var(--2); - font-size: 2.8rem; - } - -.about-container article div{ - padding-bottom: 15px; - } - -.about-container a{ - color:var(--3); +.about-container article { + font-size: 1.5rem; + color: var(--foreground-color); + padding-bottom: 10px; } -.about-container article h2{ - color: var(--3); +.about-container article h1 { + color: var(--color-two); + font-size: 2.8rem; +} + +.about-container article div { + padding-bottom: 15px; +} + +.about-container a { + color: var(--color-three); +} + +.about-container article h2 { + color: var(--color-three); font-size: 1.8rem; padding-bottom: 10px; } -.about-container p{ - color:var(--fg); - font-size: 1.6rem; +.about-container p { + color: var(--foreground-color); + font-size: 1.6rem; padding-bottom: 10px; } -.about-container h3{ +.about-container h3 { font-size: 1.5rem; } + +.about-container { + width: 80%; +} ``` -[âŦ…ī¸ Go back to Home](./README.md) +### Styles for the Settings Page + +This part is only available right now in the **rolling/edge/unstable** version + +```css +.settings_container { + display: flex; + justify-content: space-around; + width: 80dvw; +} + +.settings h1 { + color: var(--color-two); + font-size: 2.5rem; +} + +.settings hr { + border-color: var(--color-three); + margin: 0.3rem 0 1rem 0; +} + +.settings_container .sidebar { + width: 30%; + cursor: pointer; + font-size: 2rem; + display: flex; + flex-direction: column; + margin-right: 0.5rem; + margin-left: -0.7rem; + padding: 0.7rem; + border-radius: 5px; + font-weight: bold; + margin-bottom: 0.5rem; + color: var(--foreground-color); + text-transform: capitalize; + gap: 1.5rem; +} + +.settings_container .sidebar .btn { + padding: 0.5rem; + border-radius: 0.5rem; +} + +.settings_container .sidebar .btn.active { + background-color: var(--color-two); +} + +.settings_container .main_container { + width: 70%; + border-left: 1.5px solid var(--color-three); + padding-left: 3rem; +} + +.settings_container .tab { + display: none; +} + +.settings_container .tab.active { + display: flex; + flex-direction: column; + justify-content: space-around; +} + +.settings_container button { + margin-top: 1rem; + padding: 1rem 2rem; + font-size: 1.5rem; + background: var(--color-three); + color: var(--background-color); + border-radius: 0.5rem; + border: 2px solid transparent; + font-weight: bold; + transition: all 0.1s ease-out; + cursor: pointer; + box-shadow: 5px 5px; + outline: none; +} + +.settings_container button:active { + box-shadow: none; + translate: 5px 5px; +} + +.settings_container .main_container .message { + font-size: 1.5rem; + color: var(--foreground-color); +} + +.settings_container .tab h3 { + font-size: 2rem; + font-weight: bold; + color: var(--color-four); + margin-top: 1.5rem; + text-transform: capitalize; +} + +.settings_container .tab .description { + font-size: 1.5rem; + margin-bottom: 0.5rem; + color: var(--foreground-color); +} + +.settings_container .user_interface select, +.settings_container .general select { + margin: 0.7rem 0; + width: 20rem; + background-color: var(--background-color); + color: var(--foreground-color); + padding: 1rem 2rem; + border-radius: 0.5rem; + outline: none; + border: none; + text-transform: capitalize; +} + +.settings_container .user_interface option:hover, +.settings_container .general option:hover { + background-color: var(--color-one); +} + +.settings_container .engines .engine_selection { + display: flex; + flex-direction: column; + justify-content: center; + gap: 1rem; + padding: 1rem 0; +} + +.settings_container .engines .toggle_btn { + color: var(--foreground-color); + font-size: 1.5rem; + display: flex; + gap: 0.5rem; + align-items: center; +} + +.settings_container .engines hr { + margin: 0; +} + +.settings_container .cookies input { + margin: 1rem 0rem; +} +``` + +### Styles for the Toggle Button + +This part is only available right now in the **rolling/edge/unstable** version + +```css +/* The switch - the box around the slider */ +.switch { + position: relative; + display: inline-block; + width: 6rem; + height: 3.4rem; +} + +/* Hide default HTML checkbox */ +.switch input { + opacity: 0; + width: 0; + height: 0; +} + +/* The slider */ +.slider { + position: absolute; + cursor: pointer; + top: 0; + left: 0; + right: 0; + bottom: 0; + background-color: var(--background-color); + -webkit-transition: 0.4s; + transition: 0.4s; +} + +.slider:before { + position: absolute; + content: ''; + height: 2.6rem; + width: 2.6rem; + left: 0.4rem; + bottom: 0.4rem; + background-color: var(--foreground-color); + -webkit-transition: 0.4s; + transition: 0.4s; +} + +input:checked + .slider { + background-color: var(--color-three); +} + +input:focus + .slider { + box-shadow: 0 0 1px var(--color-three); +} + +input:checked + .slider:before { + -webkit-transform: translateX(2.6rem); + -ms-transform: translateX(2.6rem); + transform: translateX(2.6rem); +} + +/* Rounded sliders */ +.slider.round { + border-radius: 3.4rem; +} + +.slider.round:before { + border-radius: 50%; +} +``` + +[âŦ…ī¸ Go back to Home](./README.md) diff --git a/flake.lock b/flake.lock new file mode 100644 index 0000000..2d1b886 --- /dev/null +++ b/flake.lock @@ -0,0 +1,94 @@ +{ + "nodes": { + "naersk": { + "inputs": { + "nixpkgs": "nixpkgs" + }, + "locked": { + "lastModified": 1694081375, + "narHash": "sha256-vzJXOUnmkMCm3xw8yfPP5m8kypQ3BhAIRe4RRCWpzy8=", + "owner": "nix-community", + "repo": "naersk", + "rev": "3f976d822b7b37fc6fb8e6f157c2dd05e7e94e89", + "type": "github" + }, + "original": { + "owner": "nix-community", + "ref": "master", + "repo": "naersk", + "type": "github" + } + }, + "nixpkgs": { + "locked": { + "lastModified": 1695318763, + "narHash": "sha256-FHVPDRP2AfvsxAdc+AsgFJevMz5VBmnZglFUMlxBkcY=", + "path": "/nix/store/p7iz0r8gs6ppkhj83zjmwyd21k8b7v3y-source", + "rev": "e12483116b3b51a185a33a272bf351e357ba9a99", + "type": "path" + }, + "original": { + "id": "nixpkgs", + "type": "indirect" + } + }, + "nixpkgs_2": { + "locked": { + "lastModified": 1695318763, + "narHash": "sha256-FHVPDRP2AfvsxAdc+AsgFJevMz5VBmnZglFUMlxBkcY=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "e12483116b3b51a185a33a272bf351e357ba9a99", + "type": "github" + }, + "original": { + "owner": "NixOS", + "ref": "nixpkgs-unstable", + "repo": "nixpkgs", + "type": "github" + } + }, + "root": { + "inputs": { + "naersk": "naersk", + "nixpkgs": "nixpkgs_2", + "utils": "utils" + } + }, + "systems": { + "locked": { + "lastModified": 1681028828, + "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", + "owner": "nix-systems", + "repo": "default", + "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", + "type": "github" + }, + "original": { + "owner": "nix-systems", + "repo": "default", + "type": "github" + } + }, + "utils": { + "inputs": { + "systems": "systems" + }, + "locked": { + "lastModified": 1694529238, + "narHash": "sha256-zsNZZGTGnMOf9YpHKJqMSsa0dXbfmxeoJ7xHlrt+xmY=", + "owner": "numtide", + "repo": "flake-utils", + "rev": "ff7b65b44d01cf9ba6a71320833626af21126384", + "type": "github" + }, + "original": { + "owner": "numtide", + "repo": "flake-utils", + "type": "github" + } + } + }, + "root": "root", + "version": 7 +} diff --git a/flake.nix b/flake.nix new file mode 100644 index 0000000..6c30713 --- /dev/null +++ b/flake.nix @@ -0,0 +1,52 @@ +{ + # Websurfx NixOS flake + inputs = { + naersk.url = "github:nix-community/naersk/master"; + nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable"; + utils.url = "github:numtide/flake-utils"; + }; + + outputs = { + naersk, + nixpkgs, + self, + utils, + }: + # We do this for all systems - namely x86_64-linux, aarch64-linux, + # x86_64-darwin and aarch64-darwin + utils.lib.eachDefaultSystem (system: let + pkgs = import nixpkgs {inherit system;}; + naersk-lib = pkgs.callPackage naersk {}; + in rec { + # Build via "nix build .#default" + packages.default = naersk-lib.buildPackage { + # The build dependencies + buildInputs = with pkgs; [pkg-config openssl]; + src = ./.; + }; + + # Enter devshell with all the tools via "nix develop" + # or "nix-shell" + devShells.default = with pkgs; + mkShell { + buildInputs = [ + actionlint + cargo + haskellPackages.hadolint + nodePackages_latest.cspell + nodePackages_latest.eslint + nodePackages_latest.markdownlint-cli2 + nodePackages_latest.stylelint + redis + rustPackages.clippy + rustc + yamllint + ]; + RUST_SRC_PATH = rustPlatform.rustLibSrc; + }; + + # Build via "nix build .#websurfx", which is basically just + # calls the build function + packages.websurfx = packages.default; + }); +} diff --git a/images/create_fork_button.png b/images/create_fork_button.png deleted file mode 100644 index c060fd4..0000000 Binary files a/images/create_fork_button.png and /dev/null differ diff --git a/images/fork_button.png b/images/fork_button.png deleted file mode 100644 index f86ae80..0000000 Binary files a/images/fork_button.png and /dev/null differ diff --git a/images/fork_options_page.png b/images/fork_options_page.png deleted file mode 100644 index f8e9c24..0000000 Binary files a/images/fork_options_page.png and /dev/null differ diff --git a/public/images/barricade.png b/public/images/barricade.png new file mode 100644 index 0000000..eef097b Binary files /dev/null and b/public/images/barricade.png differ diff --git a/public/images/filter.png b/public/images/filter.png new file mode 100644 index 0000000..5325c27 Binary files /dev/null and b/public/images/filter.png differ diff --git a/public/images/info.svg b/public/images/info.svg new file mode 100644 index 0000000..0007caf --- /dev/null +++ b/public/images/info.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/public/images/no_results.gif b/public/images/no_results.gif new file mode 100644 index 0000000..1336f4c Binary files /dev/null and b/public/images/no_results.gif differ diff --git a/public/images/no_selection.png b/public/images/no_selection.png new file mode 100644 index 0000000..f646ad6 Binary files /dev/null and b/public/images/no_selection.png differ diff --git a/public/images/warning.svg b/public/images/warning.svg new file mode 100644 index 0000000..4139fb2 --- /dev/null +++ b/public/images/warning.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/public/images/websurfx_logo.png b/public/images/websurfx_logo.png new file mode 100644 index 0000000..9449e33 Binary files /dev/null and b/public/images/websurfx_logo.png differ diff --git a/public/static/colorschemes/catppuccin-mocha.css b/public/static/colorschemes/catppuccin-mocha.css index d2c1075..95f68b4 100644 --- a/public/static/colorschemes/catppuccin-mocha.css +++ b/public/static/colorschemes/catppuccin-mocha.css @@ -1,11 +1,11 @@ :root { - --bg: #1e1e2e; - --fg: #cdd6f4; - --1: #45475a; - --2: #f38ba8; - --3: #a6e3a1; - --4: #f9e2af; - --5: #89b4fa; - --6: #f5c2e7; - --7: #ffffff; + --background-color: #1e1e2e; + --foreground-color: #cdd6f4; + --color-one: #45475a; + --color-two: #f38ba8; + --color-three: #a6e3a1; + --color-four: #f9e2af; + --color-five: #89b4fa; + --color-six: #f5c2e7; + --color-seven: #ffffff; } diff --git a/public/static/colorschemes/dark-chocolate.css b/public/static/colorschemes/dark-chocolate.css new file mode 100644 index 0000000..f60d5ab --- /dev/null +++ b/public/static/colorschemes/dark-chocolate.css @@ -0,0 +1,11 @@ +:root { + --background-color: #000000; + --foreground-color: #ffffff; + --color-one: #121212; + --color-two: #808080; + --color-three: #999999; + --color-four: #666666; + --color-five: #bfbfbf; + --color-six: #e0e0e0; + --color-seven: #555555; +} diff --git a/public/static/colorschemes/dracula.css b/public/static/colorschemes/dracula.css index fb9c26c..71739ab 100644 --- a/public/static/colorschemes/dracula.css +++ b/public/static/colorschemes/dracula.css @@ -1,11 +1,11 @@ :root { - --bg: #44475a; - --fg: #8be9fd; - --1: #ff5555; - --2: #50fa7b; - --3: #ffb86c; - --4: #bd93f9; - --5: #ff79c6; - --6: #94a3a5; - --7: #ffffff; + --background-color: #44475a; + --foreground-color: #8be9fd; + --color-one: #ff5555; + --color-two: #50fa7b; + --color-three: #ffb86c; + --color-four: #bd93f9; + --color-five: #ff79c6; + --color-six: #94a3a5; + --color-seven: #ffffff; } diff --git a/public/static/colorschemes/gruvbox-dark.css b/public/static/colorschemes/gruvbox-dark.css index 0b022a5..69f81d6 100644 --- a/public/static/colorschemes/gruvbox-dark.css +++ b/public/static/colorschemes/gruvbox-dark.css @@ -1,11 +1,11 @@ :root { - --bg: #282828; - --fg: #ebdbb2; - --1: #cc241d; - --2: #98971a; - --3: #d79921; - --4: #458588; - --5: #b16286; - --6: #689d6a; - --7: #ffffff; + --background-color: #1d2021; + --foreground-color: #ebdbb2; + --color-one: #282828; + --color-two: #98971a; + --color-three: #d79921; + --color-four: #458588; + --color-five: #b16286; + --color-six: #689d6a; + --color-seven: #ffffff; } diff --git a/public/static/colorschemes/monokai.css b/public/static/colorschemes/monokai.css index 2c7b738..7c33160 100644 --- a/public/static/colorschemes/monokai.css +++ b/public/static/colorschemes/monokai.css @@ -1,11 +1,11 @@ :root { - --bg: #403e41; - --fg: #fcfcfa; - --1: #ff6188; - --2: #a9dc76; - --3: #ffd866; - --4: #fc9867; - --5: #ab9df2; - --6: #78dce8; - --7: #ffffff; + --background-color: #49483Eff; + --foreground-color: #FFB269; + --color-one: #272822ff; + --color-two: #61AFEF; + --color-three: #ffd866; + --color-four: #fc9867; + --color-five: #ab9df2; + --color-six: #78dce8; + --color-seven: #ffffff; } diff --git a/public/static/colorschemes/nord.css b/public/static/colorschemes/nord.css index cc0793f..234b57b 100644 --- a/public/static/colorschemes/nord.css +++ b/public/static/colorschemes/nord.css @@ -1,11 +1,11 @@ :root { - --bg: #2e3440; - --fg: #d8dee9; - --1: #3b4252; - --2: #bf616a; - --3: #a3be8c; - --4: #ebcb8b; - --5: #81a1c1; - --6: #b48ead; - --7: #ffffff; + --background-color: #122736ff; + --foreground-color: #a2e2a9; + --color-one: #121B2Cff; + --color-two: #f08282; + --color-three: #ABC5AAff; + --color-four: #e6d2d2; + --color-five: #81a1c1; + --color-six: #e2ecd6; + --color-seven: #fff; } diff --git a/public/static/colorschemes/oceanic-next.css b/public/static/colorschemes/oceanic-next.css index e7e25b7..896bae1 100644 --- a/public/static/colorschemes/oceanic-next.css +++ b/public/static/colorschemes/oceanic-next.css @@ -1,11 +1,11 @@ :root { - --bg: #1b2b34; - --fg: #d8dee9; - --1: #343d46; - --2: #ec5f67; - --3: #99c794; - --4: #fac863; - --5: #6699cc; - --6: #c594c5; - --7: #ffffff; + --background-color: #1b2b34; + --foreground-color: #d8dee9; + --color-one: #343d46; + --color-two: #5FB3B3ff; + --color-three: #69Cf; + --color-four: #99c794; + --color-five: #69c; + --color-six: #c594c5; + --color-seven: #D8DEE9ff; } diff --git a/public/static/colorschemes/one-dark.css b/public/static/colorschemes/one-dark.css new file mode 100644 index 0000000..30f858e --- /dev/null +++ b/public/static/colorschemes/one-dark.css @@ -0,0 +1,11 @@ +:root { + --background-color: #282c34; + --foreground-color: #abb2bf; + --color-one: #3b4048; + --color-two: #a3be8c; + --color-three: #b48ead; + --color-four: #c8ccd4; + --color-five: #e06c75; + --color-six: #61afef; + --color-seven: #be5046; +} diff --git a/public/static/colorschemes/solarized-dark.css b/public/static/colorschemes/solarized-dark.css index 1cad24f..44494f9 100644 --- a/public/static/colorschemes/solarized-dark.css +++ b/public/static/colorschemes/solarized-dark.css @@ -1,11 +1,11 @@ :root { - --bg: #002b36; - --fg: #839496; - --1: #073642; - --2: #dc322f; - --3: #859900; - --4: #b58900; - --5: #268bd2; - --6: #d33682; - --7: #ffffff; + --background-color: #002b36; + --foreground-color: #c9e0e6; + --color-one: #073642; + --color-two: #2AA198ff; + --color-three: #2AA198ff; + --color-four: #EEE8D5ff; + --color-five: #268bd2; + --color-six: #d33682; + --color-seven: #fff; } diff --git a/public/static/colorschemes/solarized-light.css b/public/static/colorschemes/solarized-light.css index c6de267..7434b37 100644 --- a/public/static/colorschemes/solarized-light.css +++ b/public/static/colorschemes/solarized-light.css @@ -1,11 +1,11 @@ :root { - --bg: #fdf6e3; - --fg: #657b83; - --1: #073642; - --2: #dc322f; - --3: #859900; - --4: #b58900; - --5: #268bd2; - --6: #d33682; - --7: #ffffff; + --background-color: #EEE8D5ff; + --foreground-color: #b1ab97; + --color-one: #fdf6e3; + --color-two: #DC322Fff; + --color-three: #586E75ff; + --color-four: #b58900; + --color-five: #268bd2; + --color-six: #d33682; + --color-seven: #fff; } diff --git a/public/static/colorschemes/tokyo-night.css b/public/static/colorschemes/tokyo-night.css new file mode 100644 index 0000000..16c54bd --- /dev/null +++ b/public/static/colorschemes/tokyo-night.css @@ -0,0 +1,11 @@ +:root { + --background-color: #1a1b26; + --foreground-color: #c0caf5; + --color-one: #32364a; + --color-two: #a9b1d6; + --color-three: #5a5bb8; + --color-four: #6b7089; + --color-five: #e2afff; + --color-six: #a9a1e1; + --color-seven: #988bc7; +} diff --git a/public/static/colorschemes/tomorrow-night.css b/public/static/colorschemes/tomorrow-night.css index 05502bc..2f2c29c 100644 --- a/public/static/colorschemes/tomorrow-night.css +++ b/public/static/colorschemes/tomorrow-night.css @@ -1,11 +1,11 @@ :root { - --bg: #1d1f21; - --fg: #c5c8c6; - --1: #cc6666; - --2: #b5bd68; - --3: #f0c674; - --4: #81a2be; - --5: #b294bb; - --6: #8abeb7; - --7: #ffffff; + --background-color: #35383Cff; + --foreground-color: #D7DAD8ff; + --color-one: #1d1f21; + --color-two: #D77C79ff; + --color-three: #f0c674; + --color-four: #92B2CAff; + --color-five: #C0A7C7ff; + --color-six: #9AC9C4ff; + --color-seven: #fff; } diff --git a/public/static/cookies.js b/public/static/cookies.js new file mode 100644 index 0000000..6b55e02 --- /dev/null +++ b/public/static/cookies.js @@ -0,0 +1,30 @@ +/** + * This function is executed when any page on the website finishes loading and + * this function retrieves the cookies if it is present on the user's machine. + * If it is available then the saved cookies is display in the cookies tab + * otherwise an appropriate message is displayed if it is not available. + * + * @function + * @listens DOMContentLoaded + * @returns {void} + */ +document.addEventListener( + 'DOMContentLoaded', + () => { + try { + // Decode the cookie value + let cookie = decodeURIComponent(document.cookie) + // Set the value of the input field to the decoded cookie value if it is not empty + // Otherwise, display a message indicating that no cookies have been saved on the user's system + document.querySelector('.cookies input').value = cookie.length + ? cookie + : 'No cookies have been saved on your system' + } catch (error) { + // If there is an error decoding the cookie, log the error to the console + // and display an error message in the input field + console.error('Error decoding cookie:', error) + document.querySelector('.cookies input').value = 'Error decoding cookie' + } + }, + false, +) diff --git a/public/static/error_box.js b/public/static/error_box.js new file mode 100644 index 0000000..1e2e874 --- /dev/null +++ b/public/static/error_box.js @@ -0,0 +1,7 @@ +/** + * This function provides the ability for the button to toggle the dropdown error-box + * in the search page. + */ +function toggleErrorBox() { + document.querySelector('.dropdown_error_box').classList.toggle('show') +} diff --git a/public/static/index.js b/public/static/index.js index 1261e15..515065a 100644 --- a/public/static/index.js +++ b/public/static/index.js @@ -1,10 +1,34 @@ -let search_box = document.querySelector('input') -function search_web() { - window.location = `search?q=${search_box.value}` +/** + * Selects the input element for the search box + * @type {HTMLInputElement} + */ +const searchBox = document.querySelector('input') + +/** + * Redirects the user to the search results page with the query parameter + */ +function searchWeb() { + const query = searchBox.value.trim() + try { + let safeSearchLevel = document.querySelector('.search_options select').value + if (query) { + window.location.href = `search?q=${encodeURIComponent( + query, + )}&safesearch=${encodeURIComponent(safeSearchLevel)}` + } + } catch (error) { + if (query) { + window.location.href = `search?q=${encodeURIComponent(query)}` + } + } } -search_box.addEventListener('keyup', (e) => { - if (e.keyCode === 13) { - search_web() - } +/** + * Listens for the 'Enter' key press event on the search box and calls the searchWeb function + * @param {KeyboardEvent} e - The keyboard event object + */ +searchBox.addEventListener('keyup', (e) => { + if (e.key === 'Enter') { + searchWeb() + } }) diff --git a/public/static/pagination.js b/public/static/pagination.js index 12c568f..bdbfb39 100644 --- a/public/static/pagination.js +++ b/public/static/pagination.js @@ -1,26 +1,39 @@ +/** + * Navigates to the next page by incrementing the current page number in the URL query string. + * @returns {void} + */ function navigate_forward() { - const url = new URL(window.location) - const searchParams = url.searchParams + let url = new URL(window.location); + let searchParams = url.searchParams; - let q = searchParams.get('q') - let page = searchParams.get('page') + let q = searchParams.get('q'); + let page = parseInt(searchParams.get('page')); - if (page === null) { - page = 2 - window.location = `${url.origin}${url.pathname}?q=${q}&page=${page}` + if (isNaN(page)) { + page = 1; } else { - window.location = `${url.origin}${url.pathname}?q=${q}&page=${++page}` + page++; } + + window.location.href = `${url.origin}${url.pathname}?q=${encodeURIComponent(q)}&page=${page}`; } +/** + * Navigates to the previous page by decrementing the current page number in the URL query string. + * @returns {void} + */ function navigate_backward() { - const url = new URL(window.location) - const searchParams = url.searchParams + let url = new URL(window.location); + let searchParams = url.searchParams; - let q = searchParams.get('q') - let page = searchParams.get('page') + let q = searchParams.get('q'); + let page = parseInt(searchParams.get('page')); - if (page !== null && page > 1) { - window.location = `${url.origin}${url.pathname}?q=${q}&page=${--page}` + if (isNaN(page)) { + page = 0; + } else if (page > 0) { + page--; } + + window.location.href = `${url.origin}${url.pathname}?q=${encodeURIComponent(q)}&page=${page}`; } diff --git a/public/static/search_area_options.js b/public/static/search_area_options.js new file mode 100644 index 0000000..10e0390 --- /dev/null +++ b/public/static/search_area_options.js @@ -0,0 +1,18 @@ +document.addEventListener( + 'DOMContentLoaded', + () => { + let url = new URL(window.location) + let searchParams = url.searchParams + + let safeSearchLevel = searchParams.get('safesearch') + + if ( + safeSearchLevel >= 0 && + safeSearchLevel <= 2 && + safeSearchLevel !== null + ) { + document.querySelector('.search_options select').value = safeSearchLevel + } + }, + false, +) diff --git a/public/static/settings.js b/public/static/settings.js new file mode 100644 index 0000000..fc0e118 --- /dev/null +++ b/public/static/settings.js @@ -0,0 +1,115 @@ +/** + * This function handles the toggling of selections of all upstream search engines + * options in the settings page under the tab engines. + */ +function toggleAllSelection() { + document + .querySelectorAll('.engine') + .forEach( + (engine_checkbox) => + (engine_checkbox.checked = + document.querySelector('.select_all').checked), + ) +} + +/** + * This function adds the functionality to sidebar buttons to only show settings + * related to that tab. + * @param {HTMLElement} current_tab - The current tab that was clicked. + */ +function setActiveTab(current_tab) { + // Remove the active class from all tabs and buttons + document + .querySelectorAll('.tab') + .forEach((tab) => tab.classList.remove('active')) + document + .querySelectorAll('.btn') + .forEach((tab) => tab.classList.remove('active')) + + // Add the active class to the current tab and its corresponding settings + current_tab.classList.add('active') + document + .querySelector(`.${current_tab.innerText.toLowerCase().replace(' ', '_')}`) + .classList.add('active') +} + +/** + * This function adds the functionality to save all the user selected preferences + * to be saved in a cookie on the users machine. + */ +function setClientSettings() { + // Create an object to store the user's preferences + let cookie_dictionary = new Object() + + // Loop through all select tags and add their values to the cookie dictionary + document.querySelectorAll('select').forEach((select_tag) => { + switch (select_tag.name) { + case 'themes': + cookie_dictionary['theme'] = select_tag.value + break + case 'colorschemes': + cookie_dictionary['colorscheme'] = select_tag.value + break + case 'safe_search_levels': + cookie_dictionary['safe_search_level'] = Number(select_tag.value) + break + } + }) + + // Loop through all engine checkboxes and add their values to the cookie dictionary + let engines = [] + + document.querySelectorAll('.engine').forEach((engine_checkbox) => { + if (engine_checkbox.checked) { + engines.push(engine_checkbox.parentNode.parentNode.innerText.trim()) + } + }) + + cookie_dictionary['engines'] = engines + + // Set the expiration date for the cookie to 1 year from the current date + let expiration_date = new Date() + expiration_date.setFullYear(expiration_date.getFullYear() + 1) + + // Save the cookie to the user's machine + document.cookie = `appCookie=${JSON.stringify( + cookie_dictionary, + )}; expires=${expiration_date.toUTCString()}` + + // Display a success message to the user + document.querySelector('.message').innerText = + '✅ The settings have been saved sucessfully!!' + + // Clear the success message after 10 seconds + setTimeout(() => { + document.querySelector('.message').innerText = '' + }, 10000) +} + +/** + * This functions gets the saved cookies if it is present on the user's machine If it + * is available then it is parsed and converted to an object which is then used to + * retrieve the preferences that the user had selected previously and is then loaded in the + * website otherwise the function does nothing and the default server side settings are loaded. + */ +function getClientSettings() { + // Get the appCookie from the user's machine + let cookie = decodeURIComponent(document.cookie) + + // If the cookie is not empty, parse it and use it to set the user's preferences + if (cookie.length) { + let cookie_value = cookie + .split(';') + .map((item) => item.split('=')) + .reduce((acc, [_, v]) => (acc = JSON.parse(v)) && acc, {}) + + // Loop through all link tags and update their href values to match the user's preferences + Array.from(document.querySelectorAll('link')).forEach((item) => { + if (item.href.includes('static/themes')) { + item.href = `static/themes/${cookie_value['theme']}.css` + } else if (item.href.includes('static/colorschemes')) { + item.href = `static/colorschemes/${cookie_value['colorscheme']}.css` + } + }) + } +} diff --git a/public/static/themes/simple.css b/public/static/themes/simple.css index 97643e8..62ff0ea 100644 --- a/public/static/themes/simple.css +++ b/public/static/themes/simple.css @@ -16,7 +16,7 @@ body { justify-content: space-between; align-items: center; height: 100vh; - background: var(--1); + background: var(--color-one); } /* styles for the index page */ @@ -46,7 +46,7 @@ body { outline: none; border: none; box-shadow: rgba(0, 0, 0, 1); - background: var(--fg); + background: var(--foreground-color); } .search_bar button { @@ -59,8 +59,8 @@ body { outline: none; border: none; gap: 0; - background: var(--bg); - color: var(--3); + background: var(--background-color); + color: var(--color-three); font-weight: 600; letter-spacing: 0.1rem; } @@ -70,15 +70,145 @@ body { filter: brightness(1.2); } +.search_area .search_options { + display: flex; + justify-content: space-between; + align-items: center; +} + +.search_area .search_options select { + margin: 0.7rem 0; + width: 20rem; + background-color: var(--color-one); + color: var(--foreground-color); + padding: 1rem 2rem; + border-radius: 0.5rem; + outline: none; + border: none; + text-transform: capitalize; +} + +.search_area .search_options option:hover { + background-color: var(--color-one); +} + +.result_not_found { + display: flex; + flex-direction: column; + font-size: 1.5rem; + color: var(--foreground-color); +} + +.result_not_found p { + margin: 1rem 0; +} + +.result_not_found ul { + margin: 1rem 0; +} + +.result_not_found img { + width: 40rem; +} + +/* styles for the error box */ +.error_box .error_box_toggle_button { + background: var(--foreground-color); +} + +.error_box .dropdown_error_box { + position: absolute; + display: none; + flex-direction: column; + background: var(--background-color); + border-radius: 0; + margin-left: 2rem; + min-height: 20rem; + min-width: 22rem; +} +.error_box .dropdown_error_box.show { + display: flex; +} +.error_box .dropdown_error_box .error_item, +.error_box .dropdown_error_box .no_errors { + display: flex; + align-items: center; + color: var(--foreground-color); + letter-spacing: 0.1rem; + padding: 1rem; + font-size: 1.2rem; +} +.error_box .dropdown_error_box .error_item { + justify-content: space-between; +} +.error_box .dropdown_error_box .no_errors { + min-height: 18rem; + justify-content: center; +} + +.error_box .dropdown_error_box .error_item:hover { + box-shadow: inset 0 0 100px 100px rgba(255, 255, 255, 0.1); +} + +.error_box .error_item .severity_color { + width: 1.2rem; + height: 1.2rem; +} +.results .result_disallowed, +.results .result_filtered, +.results .result_engine_not_selected { + display: flex; + justify-content: center; + align-items: center; + gap: 10rem; + font-size: 2rem; + color: var(--foreground-color); + margin: 0rem 7rem; +} + +.results .result_disallowed .user_query, +.results .result_filtered .user_query, +.results .result_engine_not_selected .user_query { + color: var(--background-color); + font-weight: 300; +} + +.results .result_disallowed img, +.results .result_filtered img, +.results .result_engine_not_selected img { + width: 30rem; +} + +.results .result_disallowed div, +.results .result_filtered div, +.results .result_engine_not_selected div { + display: flex; + flex-direction: column; + gap: 1rem; + line-break: strict; +} + /* styles for the footer and header */ -header { - background: var(--bg); +header, +footer { width: 100%; + background: var(--background-color); display: flex; - justify-content: right; - align-items: center; padding: 1rem; + align-items: center; +} + +header { + justify-content: space-between; +} + +header h1 a { + text-transform: capitalize; + text-decoration: none; + color: var(--foreground-color); + letter-spacing: 0.1rem; + margin-left: 1rem; } header ul, @@ -96,7 +226,7 @@ footer ul li a, header ul li a:visited, footer ul li a:visited { text-decoration: none; - color: var(--2); + color: var(--color-two); text-transform: capitalize; letter-spacing: 0.1rem; } @@ -107,12 +237,12 @@ header ul li a { header ul li a:hover, footer ul li a:hover { - color: var(--5); + color: var(--color-five); } footer div span { font-size: 1.5rem; - color: var(--4); + color: var(--color-four); } footer div { @@ -121,13 +251,8 @@ footer div { } footer { - background: var(--bg); - width: 100%; - padding: 1rem; - display: flex; flex-direction: column; justify-content: center; - align-items: center; } /* Styles for the search page */ @@ -158,28 +283,28 @@ footer { .results_aggregated .result h1 a { font-size: 1.5rem; - color: var(--2); + color: var(--color-two); text-decoration: none; letter-spacing: 0.1rem; } .results_aggregated .result h1 a:hover { - color: var(--5); + color: var(--color-five); } .results_aggregated .result h1 a:visited { - color: var(--bg); + color: var(--background-color); } .results_aggregated .result small { - color: var(--3); + color: var(--color-three); font-size: 1.1rem; word-wrap: break-word; line-break: anywhere; } .results_aggregated .result p { - color: var(--fg); + color: var(--foreground-color); font-size: 1.2rem; margin-top: 0.3rem; word-wrap: break-word; @@ -190,7 +315,7 @@ footer { text-align: right; font-size: 1.2rem; padding: 1rem; - color: var(--5); + color: var(--color-five); } /* Styles for the 404 page */ @@ -233,12 +358,12 @@ footer { .error_content p a, .error_content p a:visited { - color: var(--2); + color: var(--color-two); text-decoration: none; } .error_content p a:hover { - color: var(--5); + color: var(--color-five); } .page_navigation { @@ -249,8 +374,8 @@ footer { } .page_navigation button { - background: var(--bg); - color: var(--fg); + background: var(--background-color); + color: var(--foreground-color); padding: 1rem; border-radius: 0.5rem; outline: none; @@ -260,3 +385,248 @@ footer { .page_navigation button:active { filter: brightness(1.2); } + +/* Styles for the about page */ + +.about-container article { + font-size: 1.5rem; + color: var(--foreground-color); + padding-bottom: 10px; +} + +.about-container article h1 { + color: var(--color-two); + font-size: 2.8rem; +} + +.about-container article div { + padding-bottom: 15px; +} + +.about-container a { + color: var(--color-three); +} + +.about-container article h2 { + color: var(--color-three); + font-size: 1.8rem; + padding-bottom: 10px; +} + +.about-container p { + color: var(--foreground-color); + font-size: 1.6rem; + padding-bottom: 10px; +} + +.about-container h3 { + font-size: 1.5rem; +} + +.about-container { + width: 80%; +} + +/* Styles for the settings page */ +.settings_container { + display: flex; + justify-content: space-around; + width: 80dvw; +} + +.settings h1 { + color: var(--color-two); + font-size: 2.5rem; +} + +.settings hr { + border-color: var(--color-three); + margin: 0.3rem 0 1rem 0; +} + +.settings_container .sidebar { + width: 30%; + cursor: pointer; + font-size: 2rem; + display: flex; + flex-direction: column; + margin-right: 0.5rem; + margin-left: -0.7rem; + padding: 0.7rem; + border-radius: 5px; + font-weight: bold; + margin-bottom: 0.5rem; + color: var(--foreground-color); + text-transform: capitalize; + gap: 1.5rem; +} + +.settings_container .sidebar .btn { + padding: 0.5rem; + border-radius: 0.5rem; +} + +.settings_container .sidebar .btn.active { + background-color: var(--color-two); +} + +.settings_container .main_container { + width: 70%; + border-left: 1.5px solid var(--color-three); + padding-left: 3rem; +} + +.settings_container .tab { + display: none; +} + +.settings_container .tab.active { + display: flex; + flex-direction: column; + justify-content: space-around; +} + +.settings_container button { + margin-top: 1rem; + padding: 1rem 2rem; + font-size: 1.5rem; + background: var(--color-three); + color: var(--background-color); + border-radius: 0.5rem; + border: 2px solid transparent; + font-weight: bold; + transition: all 0.1s ease-out; + cursor: pointer; + box-shadow: 5px 5px; + outline: none; +} + +.settings_container button:active { + box-shadow: none; + translate: 5px 5px; +} + +.settings_container .main_container .message { + font-size: 1.5rem; + color: var(--foreground-color); +} + +.settings_container .tab h3 { + font-size: 2rem; + font-weight: bold; + color: var(--color-four); + margin-top: 1.5rem; + text-transform: capitalize; +} + +.settings_container .tab .description { + font-size: 1.5rem; + margin-bottom: 0.5rem; + color: var(--foreground-color); +} + +.settings_container .user_interface select, +.settings_container .general select { + margin: 0.7rem 0; + width: 20rem; + background-color: var(--background-color); + color: var(--foreground-color); + padding: 1rem 2rem; + border-radius: 0.5rem; + outline: none; + border: none; + text-transform: capitalize; +} + +.settings_container .user_interface option:hover, +.settings_container .general option:hover { + background-color: var(--color-one); +} + +.settings_container .engines .engine_selection { + display: flex; + flex-direction: column; + justify-content: center; + gap: 1rem; + padding: 1rem 0; +} + +.settings_container .engines .toggle_btn { + color: var(--foreground-color); + font-size: 1.5rem; + display: flex; + gap: 0.5rem; + align-items: center; +} + +.settings_container .engines hr { + margin: 0; +} + +.settings_container .cookies input { + margin: 1rem 0rem; +} + +/* Styles for the toggle button */ +/* The switch - the box around the slider */ +.switch { + position: relative; + display: inline-block; + width: 6rem; + height: 3.4rem; +} + +/* Hide default HTML checkbox */ +.switch input { + opacity: 0; + width: 0; + height: 0; +} + +/* The slider */ +.slider { + position: absolute; + cursor: pointer; + top: 0; + left: 0; + right: 0; + bottom: 0; + background-color: var(--background-color); + -webkit-transition: 0.4s; + transition: 0.4s; +} + +.slider:before { + position: absolute; + content: ''; + height: 2.6rem; + width: 2.6rem; + left: 0.4rem; + bottom: 0.4rem; + background-color: var(--foreground-color); + -webkit-transition: 0.4s; + transition: 0.4s; +} + +input:checked + .slider { + background-color: var(--color-three); +} + +input:focus + .slider { + box-shadow: 0 0 1px var(--color-three); +} + +input:checked + .slider:before { + -webkit-transform: translateX(2.6rem); + -ms-transform: translateX(2.6rem); + transform: translateX(2.6rem); +} + +/* Rounded sliders */ +.slider.round { + border-radius: 3.4rem; +} + +.slider.round:before { + border-radius: 50%; +} diff --git a/public/templates/about.html b/public/templates/about.html index 56c2165..9c4cbb0 100644 --- a/public/templates/about.html +++ b/public/templates/about.html @@ -1,20 +1,29 @@ {{>header this}}
-

Websurfx

- a lightening fast, privacy respecting, secure meta search engine -
- Lorem ipsum dolor sit amet, officia excepteur ex fugiat reprehenderit enim - labore culpa sint ad nisi Lorem pariatur mollit ex esse exercitation amet. - Nisi anim cupidatat excepteur officia. Reprehenderit nostrud nostrud ipsum - Lorem est aliquip amet voluptate voluptate dolor minim nulla est proident. - Nostrud officia pariatur ut officia. Sit irure elit esse ea nulla sunt ex - occaecat reprehenderit commodo officia dolor Lorem duis laboris cupidatat - officia voluptate. Culpa proident adipisicing id nulla nisi laboris ex in - Lorem sunt duis officia eiusmod. Aliqua reprehenderit commodo ex non - excepteur duis sunt velit enim. Voluptate laboris sint cupidatat ullamco ut - ea consectetur et est culpa et culpa duis. +
+
+

Websurfx

+
+
+

A modern-looking, lightning-fast, privacy-respecting, secure meta search engine written in Rust. It provides a fast and secure search experience while respecting user privacy.
It aggregates results from multiple search engines and presents them in an unbiased manner, filtering out trackers and ads. +

+ +

Some of the Top Features:

+ +
    Lightning fast - Results load within milliseconds for an instant search experience.
+ +
    Secure search - All searches are performed over an encrypted connection to prevent snooping.
+ +
    Ad free results - All search results are ad free and clutter free for a clean search experience.
+ +
    Privacy focused - Websurface does not track, store or sell your search data. Your privacy is our priority.
+ +
    Free and Open source - The entire project's code is open source and available for free on GitHub under an GNU Affero General Public License.
+ +
    Highly customizable - Websurface comes with 9 built-in color themes and supports creating custom themes effortlessly.
+ +

Devoloped by: Websurfx team

{{>footer}} + diff --git a/public/templates/bar.html b/public/templates/bar.html new file mode 100644 index 0000000..489b075 --- /dev/null +++ b/public/templates/bar.html @@ -0,0 +1,3 @@ + + diff --git a/public/templates/general_tab.html b/public/templates/general_tab.html new file mode 100644 index 0000000..92fc592 --- /dev/null +++ b/public/templates/general_tab.html @@ -0,0 +1,13 @@ +
+

General

+

Select a safe search level

+

+ Select a safe search level from the menu below to filter content based on + the level. +

+ +
diff --git a/public/templates/header.html b/public/templates/header.html index 92053b9..4e8fec0 100644 --- a/public/templates/header.html +++ b/public/templates/header.html @@ -1,12 +1,16 @@ - + - + + Websurfx - + - -
{{>navbar}}
+ +
+

Websurfx

+ {{>navbar}} +
diff --git a/public/templates/index.html b/public/templates/index.html index 64021e9..87a5449 100644 --- a/public/templates/index.html +++ b/public/templates/index.html @@ -1,7 +1,8 @@ {{>header this}}
- Websurfx meta-search engine logo - {{>search_bar}} + Websurfx meta-search engine logo + {{>bar}} +
{{>footer}} diff --git a/public/templates/navbar.html b/public/templates/navbar.html index f5f581f..c369739 100644 --- a/public/templates/navbar.html +++ b/public/templates/navbar.html @@ -1,6 +1,6 @@ diff --git a/public/templates/search.html b/public/templates/search.html index e1f952b..c6c9d6a 100644 --- a/public/templates/search.html +++ b/public/templates/search.html @@ -1,25 +1,86 @@ {{>header this.style}}
- {{>search_bar}} -
- {{#each results}} -
-

{{{this.title}}}

- {{this.url}} -

{{{this.description}}}

-
- {{#each engine}} - {{this}} - {{/each}} -
+ {{>search_bar this}} +
+ {{#if results}} {{#each results}} +
+

{{{this.title}}}

+ {{{this.url}}} +

{{{this.description}}}

+
+ {{#each engine}} + {{{this}}} + {{/each}} +
+
+ {{/each}} {{else}} {{#if disallowed}} +
+
+

+ Your search - {{{this.pageQuery}}} - + has been disallowed. +

+

Dear user,

+

+ The query - {{{this.pageQuery}}} - has + been blacklisted via server configuration and hence disallowed by the + server. Henceforth no results could be displayed for your query. +

+
+ Image of a Barricade +
+ {{else}} {{#if filtered}} +
+
+

+ Your search - {{{this.pageQuery}}} - + has been filtered. +

+

Dear user,

+

+ All the search results contain results that has been configured to be + filtered out via server configuration and henceforth has been + completely filtered out. +

+
+ Image of a paper inside a funnel +
+ {{else}} {{#if noEnginesSelected}} +
+
+

+ No results could be fetched for your search "{{{this.pageQuery}}}" . +

+

Dear user,

+

+ No results could be retrieved from the upstream search engines as no + upstream search engines were selected from the settings page. +

+
+ Image of a white cross inside a red circle +
+ {{else}} +
+

Your search - {{{this.pageQuery}}} - did not match any documents.

+

Suggestions:

+
    +
  • Make sure that all words are spelled correctly.
  • +
  • Try different keywords.
  • +
  • Try more general keywords.
  • +
+ Man fishing gif +
+ {{/if}} {{/if}} {{/if}} {{/if}} +
+ - {{/each}} -
-
+ + {{>footer}} diff --git a/public/templates/search_bar.html b/public/templates/search_bar.html index a013209..a006d89 100644 --- a/public/templates/search_bar.html +++ b/public/templates/search_bar.html @@ -1,9 +1,36 @@ - diff --git a/public/templates/settings.html b/public/templates/settings.html index 4215d08..3c97213 100644 --- a/public/templates/settings.html +++ b/public/templates/settings.html @@ -1,5 +1,22 @@ {{>header this}} -
-

Page is under construction

+
+

Settings

+
+
+ +
+ {{> general_tab}} {{> user_interface_tab}} {{> engines_tab}} {{> + cookies_tab}} +

+ +
+
+ + {{>footer}} diff --git a/public/templates/user_interface_tab.html b/public/templates/user_interface_tab.html new file mode 100644 index 0000000..7de0f06 --- /dev/null +++ b/public/templates/user_interface_tab.html @@ -0,0 +1,28 @@ +
+

User Interface

+

select theme

+

+ Select the theme from the available themes to be used in user interface +

+ +

select color scheme

+

+ Select the color scheme for your theme to be used in user interface +

+ +
diff --git a/src/bin/websurfx.rs b/src/bin/websurfx.rs index fa21486..d80c8e0 100644 --- a/src/bin/websurfx.rs +++ b/src/bin/websurfx.rs @@ -3,10 +3,18 @@ //! This module contains the main function which handles the logging of the application to the //! stdout and handles the command line arguments provided and launches the `websurfx` server. +use mimalloc::MiMalloc; use std::net::TcpListener; +use websurfx::{cache::cacher::Cache, config::parser::Config, run}; -use env_logger::Env; -use websurfx::{config_parser::parser::Config, run}; +/// A dhat heap memory profiler +#[cfg(feature = "dhat-heap")] +#[global_allocator] +static ALLOC: dhat::Alloc = dhat::Alloc; + +#[cfg(not(feature = "dhat-heap"))] +#[global_allocator] +static GLOBAL: MiMalloc = MiMalloc; /// The function that launches the main server and registers all the routes of the website. /// @@ -16,15 +24,27 @@ use websurfx::{config_parser::parser::Config, run}; /// available for being used for other applications. #[actix_web::main] async fn main() -> std::io::Result<()> { + // A dhat heap profiler initialization. + #[cfg(feature = "dhat-heap")] + let _profiler = dhat::Profiler::new_heap(); + // Initialize the parsed config file. - let config = Config::parse().unwrap(); + let config = Config::parse(false).unwrap(); - // Initializing logging middleware with level set to default or info. - env_logger::Builder::from_env(Env::default().default_filter_or("info")).init(); + let cache = Cache::build(&config).await; - log::info!("started server on port {}", config.port); + log::info!( + "started server on port {} and IP {}", + config.port, + config.binding_ip + ); + log::info!( + "Open http://{}:{}/ in your browser", + config.binding_ip, + config.port, + ); - let listener = TcpListener::bind((config.binding_ip_addr.clone(), config.port))?; + let listener = TcpListener::bind((config.binding_ip.clone(), config.port))?; - run(listener, config)?.await + run(listener, config, cache)?.await } diff --git a/src/cache/cacher.rs b/src/cache/cacher.rs index 54d9a48..12f88ff 100644 --- a/src/cache/cacher.rs +++ b/src/cache/cacher.rs @@ -1,78 +1,267 @@ //! This module provides the functionality to cache the aggregated results fetched and aggregated //! from the upstream search engines in a json format. -use md5::compute; -use redis::{Client, Commands, Connection}; +use error_stack::Report; +#[cfg(feature = "memory-cache")] +use mini_moka::sync::Cache as MokaCache; +#[cfg(feature = "memory-cache")] +use std::time::Duration; +use tokio::sync::Mutex; -/// A named struct which stores the redis Connection url address to which the client will -/// connect to. -/// -/// # Fields -/// -/// * `redis_connection_url` - It stores the redis Connection url address. +use crate::{config::parser::Config, models::aggregation_models::SearchResults}; + +use super::error::CacheError; +#[cfg(feature = "redis-cache")] +use super::redis_cacher::RedisCache; + +/// Different implementations for caching, currently it is possible to cache in-memory or in Redis. #[derive(Clone)] -pub struct RedisCache { - redis_connection_url: String, +pub enum Cache { + /// Caching is disabled + Disabled, + #[cfg(all(feature = "redis-cache", not(feature = "memory-cache")))] + /// Encapsulates the Redis based cache + Redis(RedisCache), + #[cfg(all(feature = "memory-cache", not(feature = "redis-cache")))] + /// Contains the in-memory cache. + InMemory(MokaCache), + #[cfg(all(feature = "redis-cache", feature = "memory-cache"))] + /// Contains both the in-memory cache and Redis based cache + Hybrid(RedisCache, MokaCache), } -impl RedisCache { - /// Constructs a new `SearchResult` with the given arguments needed for the struct. +impl Cache { + /// A function that builds the cache from the given configuration. /// /// # Arguments /// - /// * `redis_connection_url` - It stores the redis Connection url address. - pub fn new(redis_connection_url: String) -> Self { - RedisCache { - redis_connection_url, + /// * `config` - It takes the config struct as an argument. + /// + /// # Returns + /// + /// It returns a newly initialized variant based on the feature enabled by the user. + pub async fn build(_config: &Config) -> Self { + #[cfg(all(feature = "redis-cache", feature = "memory-cache"))] + { + log::info!("Using a hybrid cache"); + Cache::new_hybrid( + RedisCache::new(&_config.redis_url, 5) + .await + .expect("Redis cache configured"), + ) + } + #[cfg(all(feature = "redis-cache", not(feature = "memory-cache")))] + { + log::info!("Listening redis server on {}", &_config.redis_url); + Cache::new( + RedisCache::new(&_config.redis_url, 5) + .await + .expect("Redis cache configured"), + ) + } + #[cfg(all(feature = "memory-cache", not(feature = "redis-cache")))] + { + log::info!("Using an in-memory cache"); + Cache::new_in_memory() + } + #[cfg(not(any(feature = "memory-cache", feature = "redis-cache")))] + { + log::info!("Caching is disabled"); + Cache::Disabled } } - /// A helper function which computes the hash of the url and formats and returns it as string. + /// A function that initializes a new connection pool struct. /// /// # Arguments /// - /// * `url` - It takes an url as string. - fn compute_url_hash(self, url: &str) -> String { - format!("{:?}", compute(url)) + /// * `redis_cache` - It takes the newly initialized connection pool struct as an argument. + /// + /// # Returns + /// + /// It returns a `Redis` variant with the newly initialized connection pool struct. + #[cfg(all(feature = "redis-cache", not(feature = "memory-cache")))] + pub fn new(redis_cache: RedisCache) -> Self { + Cache::Redis(redis_cache) } - /// A function which fetches the cached json results as json string from the redis server. + /// A function that initializes the `in memory` cache which is used to cache the results in + /// memory with the search engine thus improving performance by making retrieval and caching of + /// results faster. + /// + /// # Returns + /// + /// It returns a `InMemory` variant with the newly initialized in memory cache type. + #[cfg(all(feature = "memory-cache", not(feature = "redis-cache")))] + pub fn new_in_memory() -> Self { + let cache = MokaCache::builder() + .max_capacity(1000) + .time_to_live(Duration::from_secs(60)) + .build(); + Cache::InMemory(cache) + } + + /// A function that initializes both in memory cache and redis client connection for being used + /// for managing hybrid cache which increases resiliancy of the search engine by allowing the + /// cache to switch to `in memory` caching if the `redis` cache server is temporarily + /// unavailable. + /// + /// # Arguments + /// + /// * `redis_cache` - It takes `redis` client connection struct as an argument. + /// + /// # Returns + /// + /// It returns a tuple variant `Hybrid` storing both the in-memory cache type and the `redis` + /// client connection struct. + #[cfg(all(feature = "redis-cache", feature = "memory-cache"))] + pub fn new_hybrid(redis_cache: RedisCache) -> Self { + let cache = MokaCache::builder() + .max_capacity(1000) + .time_to_live(Duration::from_secs(60)) + .build(); + Cache::Hybrid(redis_cache, cache) + } + + /// A function which fetches the cached json results as json string. /// /// # Arguments /// /// * `url` - It takes an url as a string. - pub fn cached_results_json(self, url: String) -> Result> { - let hashed_url_string = self.clone().compute_url_hash(&url); - let mut redis_connection: Connection = - Client::open(self.redis_connection_url)?.get_connection()?; - Ok(redis_connection.get(hashed_url_string)?) + /// + /// # Error + /// + /// Returns the `SearchResults` from the cache if the program executes normally otherwise + /// returns a `CacheError` if the results cannot be retrieved from the cache. + pub async fn cached_json(&mut self, _url: &str) -> Result> { + match self { + Cache::Disabled => Err(Report::new(CacheError::MissingValue)), + #[cfg(all(feature = "redis-cache", not(feature = "memory-cache")))] + Cache::Redis(redis_cache) => { + let json = redis_cache.cached_json(_url).await?; + Ok(serde_json::from_str::(&json) + .map_err(|_| CacheError::SerializationError)?) + } + #[cfg(all(feature = "memory-cache", not(feature = "redis-cache")))] + Cache::InMemory(in_memory) => match in_memory.get(&_url.to_string()) { + Some(res) => Ok(res), + None => Err(Report::new(CacheError::MissingValue)), + }, + #[cfg(all(feature = "redis-cache", feature = "memory-cache"))] + Cache::Hybrid(redis_cache, in_memory) => match redis_cache.cached_json(_url).await { + Ok(res) => Ok(serde_json::from_str::(&res) + .map_err(|_| CacheError::SerializationError)?), + Err(_) => match in_memory.get(&_url.to_string()) { + Some(res) => Ok(res), + None => Err(Report::new(CacheError::MissingValue)), + }, + }, + } } - /// A function which caches the results by using the hashed `url` as the key and - /// `json results` as the value and stores it in redis server with ttl(time to live) - /// set to 60 seconds. + /// A function which caches the results by using the `url` as the key and + /// `json results` as the value and stores it in the cache /// /// # Arguments /// /// * `json_results` - It takes the json results string as an argument. /// * `url` - It takes the url as a String. - pub fn cache_results( - self, - json_results: String, - url: String, - ) -> Result<(), Box> { - let hashed_url_string = self.clone().compute_url_hash(&url); - let mut redis_connection: Connection = - Client::open(self.redis_connection_url)?.get_connection()?; - - // put results_json into cache - redis_connection.set(hashed_url_string.clone(), json_results)?; - - // Set the TTL for the key to 60 seconds - redis_connection - .expire::(hashed_url_string.clone(), 60) - .unwrap(); - - Ok(()) + /// + /// # Error + /// + /// Returns a unit type if the program caches the given search results without a failure + /// otherwise it returns a `CacheError` if the search results cannot be cached due to a + /// failure. + pub async fn cache_results( + &mut self, + _search_results: &SearchResults, + _url: &str, + ) -> Result<(), Report> { + match self { + Cache::Disabled => Ok(()), + #[cfg(all(feature = "redis-cache", not(feature = "memory-cache")))] + Cache::Redis(redis_cache) => { + let json = serde_json::to_string(_search_results) + .map_err(|_| CacheError::SerializationError)?; + redis_cache.cache_results(&json, _url).await + } + #[cfg(all(feature = "memory-cache", not(feature = "redis-cache")))] + Cache::InMemory(cache) => { + cache.insert(_url.to_string(), _search_results.clone()); + Ok(()) + } + #[cfg(all(feature = "memory-cache", feature = "redis-cache"))] + Cache::Hybrid(redis_cache, cache) => { + let json = serde_json::to_string(_search_results) + .map_err(|_| CacheError::SerializationError)?; + match redis_cache.cache_results(&json, _url).await { + Ok(_) => Ok(()), + Err(_) => { + cache.insert(_url.to_string(), _search_results.clone()); + Ok(()) + } + } + } + } + } +} + +/// A structure to efficiently share the cache between threads - as it is protected by a Mutex. +pub struct SharedCache { + /// The internal cache protected from concurrent access by a mutex + cache: Mutex, +} + +impl SharedCache { + /// A function that creates a new `SharedCache` from a Cache implementation. + /// + /// # Arguments + /// + /// * `cache` - It takes the `Cache` enum variant as an argument with the prefered cache type. + /// + /// Returns a newly constructed `SharedCache` struct. + pub fn new(cache: Cache) -> Self { + Self { + cache: Mutex::new(cache), + } + } + + /// A getter function which retrieves the cached SearchResulsts from the internal cache. + /// + /// # Arguments + /// + /// * `url` - It takes the search url as an argument which will be used as the key to fetch the + /// cached results from the cache. + /// + /// # Error + /// + /// Returns a `SearchResults` struct containing the search results from the cache if nothing + /// goes wrong otherwise returns a `CacheError`. + pub async fn cached_json(&self, url: &str) -> Result> { + let mut mut_cache = self.cache.lock().await; + mut_cache.cached_json(url).await + } + + /// A setter function which caches the results by using the `url` as the key and + /// `SearchResults` as the value. + /// + /// # Arguments + /// + /// * `search_results` - It takes the `SearchResults` as an argument which are results that + /// needs to be cached. + /// * `url` - It takes the search url as an argument which will be used as the key for storing + /// results in the cache. + /// + /// # Error + /// + /// Returns an unit type if the results are cached succesfully otherwise returns a `CacheError` + /// on a failure. + pub async fn cache_results( + &self, + search_results: &SearchResults, + url: &str, + ) -> Result<(), Report> { + let mut mut_cache = self.cache.lock().await; + mut_cache.cache_results(search_results, url).await } } diff --git a/src/cache/error.rs b/src/cache/error.rs new file mode 100644 index 0000000..62c9098 --- /dev/null +++ b/src/cache/error.rs @@ -0,0 +1,50 @@ +//! This module provides the error enum to handle different errors associated while requesting data from +//! the redis server using an async connection pool. +use std::fmt; + +#[cfg(feature = "redis-cache")] +use redis::RedisError; + +/// A custom error type used for handling redis async pool associated errors. +#[derive(Debug)] +pub enum CacheError { + /// This variant handles all errors related to `RedisError`, + #[cfg(feature = "redis-cache")] + RedisError(RedisError), + /// This variant handles the errors which occurs when all the connections + /// in the connection pool return a connection dropped redis error. + PoolExhaustionWithConnectionDropError, + /// Whenever serialization or deserialization fails during communication with the cache. + SerializationError, + /// Returned when the value is missing. + MissingValue, +} + +impl fmt::Display for CacheError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + #[cfg(feature = "redis-cache")] + CacheError::RedisError(redis_error) => { + if let Some(detail) = redis_error.detail() { + write!(f, "{}", detail) + } else { + write!(f, "") + } + } + CacheError::PoolExhaustionWithConnectionDropError => { + write!( + f, + "Error all connections from the pool dropped with connection error" + ) + } + CacheError::MissingValue => { + write!(f, "The value is missing from the cache") + } + CacheError::SerializationError => { + write!(f, "Unable to serialize, deserialize from the cache") + } + } + } +} + +impl error_stack::Context for CacheError {} diff --git a/src/cache/mod.rs b/src/cache/mod.rs index 91a91ca..887f119 100644 --- a/src/cache/mod.rs +++ b/src/cache/mod.rs @@ -1 +1,7 @@ -pub mod cacher; +//! This module provides the modules which provide the functionality to cache the aggregated +//! results fetched and aggregated from the upstream search engines in a json format. + +pub mod cacher; +pub mod error; +#[cfg(feature = "redis-cache")] +pub mod redis_cacher; diff --git a/src/cache/redis_cacher.rs b/src/cache/redis_cacher.rs new file mode 100644 index 0000000..e90344f --- /dev/null +++ b/src/cache/redis_cacher.rs @@ -0,0 +1,167 @@ +//! This module provides the functionality to cache the aggregated results fetched and aggregated +//! from the upstream search engines in a json format. + +use error_stack::Report; +use futures::future::try_join_all; +use md5::compute; +use redis::{aio::ConnectionManager, AsyncCommands, Client, RedisError}; + +use super::error::CacheError; + +/// A named struct which stores the redis Connection url address to which the client will +/// connect to. +#[derive(Clone)] +pub struct RedisCache { + /// It stores a pool of connections ready to be used. + connection_pool: Vec, + /// It stores the size of the connection pool (in other words the number of + /// connections that should be stored in the pool). + pool_size: u8, + /// It stores the index of which connection is being used at the moment. + current_connection: u8, +} + +impl RedisCache { + /// A function which fetches the cached json results as json string. + /// + /// # Arguments + /// + /// * `redis_connection_url` - It takes the redis Connection url address. + /// * `pool_size` - It takes the size of the connection pool (in other words the number of + /// connections that should be stored in the pool). + /// + /// # Error + /// + /// Returns a newly constructed `RedisCache` struct on success otherwise returns a standard + /// error type. + pub async fn new( + redis_connection_url: &str, + pool_size: u8, + ) -> Result> { + let client = Client::open(redis_connection_url)?; + let mut tasks: Vec<_> = Vec::new(); + + for _ in 0..pool_size { + tasks.push(client.get_tokio_connection_manager()); + } + + let redis_cache = RedisCache { + connection_pool: try_join_all(tasks).await?, + pool_size, + current_connection: Default::default(), + }; + Ok(redis_cache) + } + + /// A helper function which computes the hash of the url and formats and returns it as string. + /// + /// # Arguments + /// + /// * `url` - It takes an url as string. + fn hash_url(&self, url: &str) -> String { + format!("{:?}", compute(url)) + } + + /// A function which fetches the cached json results as json string from the redis server. + /// + /// # Arguments + /// + /// * `url` - It takes an url as a string. + /// + /// # Error + /// + /// Returns the results as a String from the cache on success otherwise returns a `CacheError` + /// on a failure. + pub async fn cached_json(&mut self, url: &str) -> Result> { + self.current_connection = Default::default(); + let hashed_url_string: &str = &self.hash_url(url); + + let mut result: Result = self.connection_pool + [self.current_connection as usize] + .get(hashed_url_string) + .await; + + // Code to check whether the current connection being used is dropped with connection error + // or not. if it drops with the connection error then the current connection is replaced + // with a new connection from the pool which is then used to run the redis command then + // that connection is also checked whether it is dropped or not if it is not then the + // result is passed as a `Result` or else the same process repeats again and if all of the + // connections in the pool result in connection drop error then a custom pool error is + // returned. + loop { + match result { + Err(error) => match error.is_connection_dropped() { + true => { + self.current_connection += 1; + if self.current_connection == self.pool_size { + return Err(Report::new( + CacheError::PoolExhaustionWithConnectionDropError, + )); + } + result = self.connection_pool[self.current_connection as usize] + .get(hashed_url_string) + .await; + continue; + } + false => return Err(Report::new(CacheError::RedisError(error))), + }, + Ok(res) => return Ok(res), + } + } + } + + /// A function which caches the results by using the hashed `url` as the key and + /// `json results` as the value and stores it in redis server with ttl(time to live) + /// set to 60 seconds. + /// + /// # Arguments + /// + /// * `json_results` - It takes the json results string as an argument. + /// * `url` - It takes the url as a String. + /// + /// # Error + /// + /// Returns an unit type if the results are cached succesfully otherwise returns a `CacheError` + /// on a failure. + pub async fn cache_results( + &mut self, + json_results: &str, + url: &str, + ) -> Result<(), Report> { + self.current_connection = Default::default(); + let hashed_url_string: &str = &self.hash_url(url); + + let mut result: Result<(), RedisError> = self.connection_pool + [self.current_connection as usize] + .set_ex(hashed_url_string, json_results, 60) + .await; + + // Code to check whether the current connection being used is dropped with connection error + // or not. if it drops with the connection error then the current connection is replaced + // with a new connection from the pool which is then used to run the redis command then + // that connection is also checked whether it is dropped or not if it is not then the + // result is passed as a `Result` or else the same process repeats again and if all of the + // connections in the pool result in connection drop error then a custom pool error is + // returned. + loop { + match result { + Err(error) => match error.is_connection_dropped() { + true => { + self.current_connection += 1; + if self.current_connection == self.pool_size { + return Err(Report::new( + CacheError::PoolExhaustionWithConnectionDropError, + )); + } + result = self.connection_pool[self.current_connection as usize] + .set_ex(hashed_url_string, json_results, 60) + .await; + continue; + } + false => return Err(Report::new(CacheError::RedisError(error))), + }, + Ok(_) => return Ok(()), + } + } + } +} diff --git a/src/config/mod.rs b/src/config/mod.rs new file mode 100644 index 0000000..babc54f --- /dev/null +++ b/src/config/mod.rs @@ -0,0 +1,4 @@ +//! This module provides the modules which handles the functionality to parse the lua config +//! and convert the config options into rust readable form. + +pub mod parser; diff --git a/src/config/parser.rs b/src/config/parser.rs new file mode 100644 index 0000000..fb9f8b1 --- /dev/null +++ b/src/config/parser.rs @@ -0,0 +1,152 @@ +//! This module provides the functionality to parse the lua config and convert the config options +//! into rust readable form. + +use crate::handler::paths::{file_path, FileType}; + +use crate::models::parser_models::{AggregatorConfig, RateLimiter, Style}; +use log::LevelFilter; +use mlua::Lua; +use std::{collections::HashMap, fs, thread::available_parallelism}; + +/// A named struct which stores the parsed config file options. +#[derive(Clone)] +pub struct Config { + /// It stores the parsed port number option on which the server should launch. + pub port: u16, + /// It stores the parsed ip address option on which the server should launch + pub binding_ip: String, + /// It stores the theming options for the website. + pub style: Style, + #[cfg(feature = "redis-cache")] + /// It stores the redis connection url address on which the redis + /// client should connect. + pub redis_url: String, + /// It stores the option to whether enable or disable production use. + pub aggregator: AggregatorConfig, + /// It stores the option to whether enable or disable logs. + pub logging: bool, + /// It stores the option to whether enable or disable debug mode. + pub debug: bool, + /// It stores all the engine names that were enabled by the user. + pub upstream_search_engines: Vec, + /// It stores the time (secs) which controls the server request timeout. + pub request_timeout: u8, + /// It stores the number of threads which controls the app will use to run. + pub threads: u8, + /// It stores configuration options for the ratelimiting middleware. + pub rate_limiter: RateLimiter, + /// It stores the level of safe search to be used for restricting content in the + /// search results. + pub safe_search: u8, +} + +impl Config { + /// A function which parses the config.lua file and puts all the parsed options in the newly + /// constructed Config struct and returns it. + /// + /// # Arguments + /// + /// * `logging_initialized` - It takes a boolean which ensures that the logging doesn't get + /// initialized twice. Pass false if the logger has not yet been initialized. + /// + /// # Error + /// + /// Returns a lua parse error if parsing of the config.lua file fails or has a syntax error + /// or io error if the config.lua file doesn't exists otherwise it returns a newly constructed + /// Config struct with all the parsed config options from the parsed config file. + pub fn parse(logging_initialized: bool) -> Result> { + let lua = Lua::new(); + let globals = lua.globals(); + + lua.load(&fs::read_to_string(file_path(FileType::Config)?)?) + .exec()?; + + let parsed_threads: u8 = globals.get::<_, u8>("threads")?; + + let debug: bool = globals.get::<_, bool>("debug")?; + let logging: bool = globals.get::<_, bool>("logging")?; + + if !logging_initialized { + set_logging_level(debug, logging); + } + + let threads: u8 = if parsed_threads == 0 { + let total_num_of_threads: usize = available_parallelism()?.get() / 2; + log::error!( + "Config Error: The value of `threads` option should be a non zero positive integer" + ); + log::error!("Falling back to using {} threads", total_num_of_threads); + total_num_of_threads as u8 + } else { + parsed_threads + }; + + let rate_limiter = globals.get::<_, HashMap>("rate_limiter")?; + + let parsed_safe_search: u8 = globals.get::<_, u8>("safe_search")?; + let safe_search: u8 = match parsed_safe_search { + 0..=4 => parsed_safe_search, + _ => { + log::error!("Config Error: The value of `safe_search` option should be a non zero positive integer from 0 to 4."); + log::error!("Falling back to using the value `1` for the option"); + 1 + } + }; + + Ok(Config { + port: globals.get::<_, u16>("port")?, + binding_ip: globals.get::<_, String>("binding_ip")?, + style: Style::new( + globals.get::<_, String>("theme")?, + globals.get::<_, String>("colorscheme")?, + ), + #[cfg(feature = "redis-cache")] + redis_url: globals.get::<_, String>("redis_url")?, + aggregator: AggregatorConfig { + random_delay: globals.get::<_, bool>("production_use")?, + }, + logging, + debug, + upstream_search_engines: globals + .get::<_, HashMap>("upstream_search_engines")? + .into_iter() + .filter_map(|(key, value)| value.then_some(key)) + .filter_map(|engine| crate::models::engine_models::EngineHandler::new(&engine)) + .collect(), + request_timeout: globals.get::<_, u8>("request_timeout")?, + threads, + rate_limiter: RateLimiter { + number_of_requests: rate_limiter["number_of_requests"], + time_limit: rate_limiter["time_limit"], + }, + safe_search, + }) + } +} + +/// a helper function that sets the proper logging level +/// +/// # Arguments +/// +/// * `debug` - It takes the option to whether enable or disable debug mode. +/// * `logging` - It takes the option to whether enable or disable logs. +fn set_logging_level(debug: bool, logging: bool) { + if let Ok(pkg_env_var) = std::env::var("PKG_ENV") { + if pkg_env_var.to_lowercase() == "dev" { + env_logger::Builder::new() + .filter(None, LevelFilter::Trace) + .init(); + return; + } + } + + // Initializing logging middleware with level set to default or info. + let log_level = match (debug, logging) { + (true, true) => LevelFilter::Debug, + (true, false) => LevelFilter::Debug, + (false, true) => LevelFilter::Info, + (false, false) => LevelFilter::Error, + }; + + env_logger::Builder::new().filter(None, log_level).init(); +} diff --git a/src/config_parser/mod.rs b/src/config_parser/mod.rs deleted file mode 100644 index 11ce559..0000000 --- a/src/config_parser/mod.rs +++ /dev/null @@ -1,2 +0,0 @@ -pub mod parser; -pub mod parser_models; diff --git a/src/config_parser/parser.rs b/src/config_parser/parser.rs deleted file mode 100644 index 4625bd8..0000000 --- a/src/config_parser/parser.rs +++ /dev/null @@ -1,55 +0,0 @@ -//! This module provides the functionality to parse the lua config and convert the config options -//! into rust readable form. - -use super::parser_models::Style; -use rlua::Lua; -use std::fs; - -/// A named struct which stores the parsed config file options. -/// -/// # Fields -// -/// * `port` - It stores the parsed port number option on which the server should launch. -/// * `binding_ip_addr` - It stores the parsed ip address option on which the server should launch -/// * `style` - It stores the theming options for the website. -/// * `redis_connection_url` - It stores the redis connection url address on which the redis -/// client should connect. -#[derive(Clone)] -pub struct Config { - pub port: u16, - pub binding_ip_addr: String, - pub style: Style, - pub redis_connection_url: String, -} - -impl Config { - /// A function which parses the config.lua file and puts all the parsed options in the newly - /// contructed Config struct and returns it. - /// - /// # Error - /// - /// Returns a lua parse error if parsing of the config.lua file fails or has a syntax error - /// or io error if the config.lua file doesn't exists otherwise it returns a newly contructed - /// Config struct with all the parsed config options from the parsed config file. - pub fn parse() -> Result> { - let lua = Lua::new(); - - lua.context(|context| { - let globals = context.globals(); - - context - .load(&fs::read_to_string("./websurfx/config.lua")?) - .exec()?; - - Ok(Config { - port: globals.get::<_, u16>("port")?, - binding_ip_addr: globals.get::<_, String>("binding_ip_addr")?, - style: Style::new( - globals.get::<_, String>("theme")?, - globals.get::<_, String>("colorscheme")?, - ), - redis_connection_url: globals.get::<_, String>("redis_connection_url")?, - }) - }) - } -} diff --git a/src/engines/duckduckgo.rs b/src/engines/duckduckgo.rs index 254ab16..0f06ea4 100644 --- a/src/engines/duckduckgo.rs +++ b/src/engines/duckduckgo.rs @@ -4,102 +4,113 @@ use std::collections::HashMap; -use reqwest::header::{HeaderMap, CONTENT_TYPE, COOKIE, REFERER, USER_AGENT}; +use reqwest::header::HeaderMap; use scraper::{Html, Selector}; -use crate::search_results_handler::aggregation_models::RawSearchResult; +use crate::models::aggregation_models::SearchResult; -/// This function scrapes results from the upstream engine duckduckgo and puts all the scraped -/// results like title, visiting_url (href in html),engine (from which engine it was fetched from) -/// and description in a RawSearchResult and then adds that to HashMap whose keys are url and -/// values are RawSearchResult struct and then returns it within a Result enum. -/// -/// # Arguments -/// -/// * `query` - Takes the user provided query to query to the upstream search engine with. -/// * `page` - Takes an u32 as an argument. -/// * `user_agent` - Takes a random user agent string as an argument. -/// -/// # Errors -/// -/// Returns a reqwest error if the user is not connected to the internet or if their is failure to -/// reach the above `upstream search engine` page and also returns error if the scraping -/// selector fails to initialize" -pub async fn results( - query: &str, - page: u32, - user_agent: &str, -) -> Result, Box> { - // Page number can be missing or empty string and so appropriate handling is required - // so that upstream server recieves valid page number. - let url: String = match page { - 1 => { - format!("https://html.duckduckgo.com/html/?q={query}&s=&dc=&v=1&o=json&api=/d.js") - } - _ => { - format!( - "https://duckduckgo.com/html/?q={}&s={}&dc={}&v=1&o=json&api=/d.js", - query, - (page / 2 + (page % 2)) * 30, - (page / 2 + (page % 2)) * 30 + 1 - ) - } - }; +use crate::models::engine_models::{EngineError, SearchEngine}; - // initializing HeaderMap and adding appropriate headers. - let mut header_map = HeaderMap::new(); - header_map.insert(USER_AGENT, user_agent.parse()?); - header_map.insert(REFERER, "https://google.com/".parse()?); - header_map.insert(CONTENT_TYPE, "application/x-www-form-urlencoded".parse()?); - header_map.insert(COOKIE, "kl=wt-wt".parse()?); +use error_stack::{Report, Result, ResultExt}; - // fetch the html from upstream duckduckgo engine - // TODO: Write better error handling code to handle no results case. - let results: String = reqwest::Client::new() - .get(url) - .headers(header_map) // add spoofed headers to emulate human behaviour - .send() - .await? - .text() - .await?; +/// A new DuckDuckGo engine type defined in-order to implement the `SearchEngine` trait which allows to +/// reduce code duplication as well as allows to create vector of different search engines easily. +pub struct DuckDuckGo; - let document: Html = Html::parse_document(&results); - let results: Selector = Selector::parse(".result")?; - let result_title: Selector = Selector::parse(".result__a")?; - let result_url: Selector = Selector::parse(".result__url")?; - let result_desc: Selector = Selector::parse(".result__snippet")?; - - // scrape all the results from the html - Ok(document - .select(&results) - .map(|result| { - RawSearchResult::new( - result - .select(&result_title) - .next() - .unwrap() - .inner_html() - .trim() - .to_string(), +#[async_trait::async_trait] +impl SearchEngine for DuckDuckGo { + async fn results( + &self, + query: &str, + page: u32, + user_agent: &str, + request_timeout: u8, + _safe_search: u8, + ) -> Result, EngineError> { + // Page number can be missing or empty string and so appropriate handling is required + // so that upstream server recieves valid page number. + let url: String = match page { + 1 | 0 => { + format!("https://html.duckduckgo.com/html/?q={query}&s=&dc=&v=1&o=json&api=/d.js") + } + _ => { format!( - "https://{}", + "https://duckduckgo.com/html/?q={}&s={}&dc={}&v=1&o=json&api=/d.js", + query, + (page / 2 + (page % 2)) * 30, + (page / 2 + (page % 2)) * 30 + 1 + ) + } + }; + + // initializing HeaderMap and adding appropriate headers. + let header_map = HeaderMap::try_from(&HashMap::from([ + ("USER_AGENT".to_string(), user_agent.to_string()), + ("REFERER".to_string(), "https://google.com/".to_string()), + ( + "CONTENT_TYPE".to_string(), + "application/x-www-form-urlencoded".to_string(), + ), + ("COOKIE".to_string(), "kl=wt-wt".to_string()), + ])) + .change_context(EngineError::UnexpectedError)?; + + let document: Html = Html::parse_document( + &DuckDuckGo::fetch_html_from_upstream(self, &url, header_map, request_timeout).await?, + ); + + let no_result: Selector = Selector::parse(".no-results") + .map_err(|_| Report::new(EngineError::UnexpectedError)) + .attach_printable_lazy(|| format!("invalid CSS selector: {}", ".no-results"))?; + + if document.select(&no_result).next().is_some() { + return Err(Report::new(EngineError::EmptyResultSet)); + } + + let results: Selector = Selector::parse(".result") + .map_err(|_| Report::new(EngineError::UnexpectedError)) + .attach_printable_lazy(|| format!("invalid CSS selector: {}", ".result"))?; + let result_title: Selector = Selector::parse(".result__a") + .map_err(|_| Report::new(EngineError::UnexpectedError)) + .attach_printable_lazy(|| format!("invalid CSS selector: {}", ".result__a"))?; + let result_url: Selector = Selector::parse(".result__url") + .map_err(|_| Report::new(EngineError::UnexpectedError)) + .attach_printable_lazy(|| format!("invalid CSS selector: {}", ".result__url"))?; + let result_desc: Selector = Selector::parse(".result__snippet") + .map_err(|_| Report::new(EngineError::UnexpectedError)) + .attach_printable_lazy(|| format!("invalid CSS selector: {}", ".result__snippet"))?; + + // scrape all the results from the html + Ok(document + .select(&results) + .map(|result| { + SearchResult::new( result - .select(&result_url) + .select(&result_title) .next() .unwrap() .inner_html() - .trim() - ), - result - .select(&result_desc) - .next() - .unwrap() - .inner_html() - .trim() - .to_string(), - vec!["duckduckgo".to_string()], - ) - }) - .map(|search_result| (search_result.visiting_url.clone(), search_result)) - .collect()) + .trim(), + format!( + "https://{}", + result + .select(&result_url) + .next() + .unwrap() + .inner_html() + .trim() + ) + .as_str(), + result + .select(&result_desc) + .next() + .unwrap() + .inner_html() + .trim(), + &["duckduckgo"], + ) + }) + .map(|search_result| (search_result.url.clone(), search_result)) + .collect()) + } } diff --git a/src/engines/mod.rs b/src/engines/mod.rs index 7f390b1..0016728 100644 --- a/src/engines/mod.rs +++ b/src/engines/mod.rs @@ -1,2 +1,7 @@ +//! This module provides different modules which handles the functionlity to fetch results from the +//! upstream search engines based on user requested queries. Also provides different models to +//! provide a standard functions to be implemented for all the upstream search engine handling +//! code. Moreover, it also provides a custom error for the upstream search engine handling code. + pub mod duckduckgo; pub mod searx; diff --git a/src/engines/searx.rs b/src/engines/searx.rs index 8812dd9..6ab0469 100644 --- a/src/engines/searx.rs +++ b/src/engines/searx.rs @@ -2,90 +2,112 @@ //! by querying the upstream searx search engine instance with user provided query and with a page //! number if provided. -use reqwest::header::{HeaderMap, CONTENT_TYPE, COOKIE, REFERER, USER_AGENT}; +use reqwest::header::HeaderMap; use scraper::{Html, Selector}; use std::collections::HashMap; -use crate::search_results_handler::aggregation_models::RawSearchResult; +use crate::models::aggregation_models::SearchResult; +use crate::models::engine_models::{EngineError, SearchEngine}; +use error_stack::{Report, Result, ResultExt}; -/// This function scrapes results from the upstream engine duckduckgo and puts all the scraped -/// results like title, visiting_url (href in html),engine (from which engine it was fetched from) -/// and description in a RawSearchResult and then adds that to HashMap whose keys are url and -/// values are RawSearchResult struct and then returns it within a Result enum. -/// -/// # Arguments -/// -/// * `query` - Takes the user provided query to query to the upstream search engine with. -/// * `page` - Takes an u32 as an argument. -/// * `user_agent` - Takes a random user agent string as an argument. -/// -/// # Errors -/// -/// Returns a reqwest error if the user is not connected to the internet or if their is failure to -/// reach the above `upstream search engine` page and also returns error if the scraping -/// selector fails to initialize" -pub async fn results( - query: &str, - page: u32, - user_agent: &str, -) -> Result, Box> { - // Page number can be missing or empty string and so appropriate handling is required - // so that upstream server recieves valid page number. - let url: String = format!("https://searx.work/search?q={query}&pageno={page}"); +/// A new Searx engine type defined in-order to implement the `SearchEngine` trait which allows to +/// reduce code duplication as well as allows to create vector of different search engines easily. +pub struct Searx; - // initializing headers and adding appropriate headers. - let mut header_map = HeaderMap::new(); - header_map.insert(USER_AGENT, user_agent.parse()?); - header_map.insert(REFERER, "https://google.com/".parse()?); - header_map.insert(CONTENT_TYPE, "application/x-www-form-urlencoded".parse()?); - header_map.insert(COOKIE, "categories=general; language=auto; locale=en; autocomplete=duckduckgo; image_proxy=1; method=POST; safesearch=2; theme=simple; results_on_new_tab=1; doi_resolver=oadoi.org; simple_style=auto; center_alignment=1; query_in_title=1; infinite_scroll=0; disabled_engines=; enabled_engines=\"archive is__general\\054yep__general\\054curlie__general\\054currency__general\\054ddg definitions__general\\054wikidata__general\\054duckduckgo__general\\054tineye__general\\054lingva__general\\054startpage__general\\054yahoo__general\\054wiby__general\\054marginalia__general\\054alexandria__general\\054wikibooks__general\\054wikiquote__general\\054wikisource__general\\054wikiversity__general\\054wikivoyage__general\\054dictzone__general\\054seznam__general\\054mojeek__general\\054naver__general\\054wikimini__general\\054brave__general\\054petalsearch__general\\054goo__general\"; disabled_plugins=; enabled_plugins=\"searx.plugins.hostname_replace\\054searx.plugins.oa_doi_rewrite\\054searx.plugins.vim_hotkeys\"; tokens=; maintab=on; enginetab=on".parse()?); +#[async_trait::async_trait] +impl SearchEngine for Searx { + async fn results( + &self, + query: &str, + page: u32, + user_agent: &str, + request_timeout: u8, + mut safe_search: u8, + ) -> Result, EngineError> { + // Page number can be missing or empty string and so appropriate handling is required + // so that upstream server recieves valid page number. + if safe_search == 3 { + safe_search = 2; + }; - // fetch the html from upstream searx instance engine - // TODO: Write better error handling code to handle no results case. - let results: String = reqwest::Client::new() - .get(url) - .headers(header_map) // add spoofed headers to emulate human behaviours. - .send() - .await? - .text() - .await?; + let url: String = match page { + 0 | 1 => { + format!("https://searx.work/search?q={query}&pageno=1&safesearch={safe_search}") + } + _ => format!( + "https://searx.work/search?q={query}&pageno={page}&safesearch={safe_search}" + ), + }; - let document: Html = Html::parse_document(&results); - let results: Selector = Selector::parse(".result")?; - let result_title: Selector = Selector::parse("h3>a")?; - let result_url: Selector = Selector::parse("h3>a")?; - let result_desc: Selector = Selector::parse(".content")?; + // initializing headers and adding appropriate headers. + let header_map = HeaderMap::try_from(&HashMap::from([ + ("USER_AGENT".to_string(), user_agent.to_string()), + ("REFERER".to_string(), "https://google.com/".to_string()), + ("CONTENT_TYPE".to_string(), "application/x-www-form-urlencoded".to_string()), + ("COOKIE".to_string(), "categories=general; language=auto; locale=en; autocomplete=duckduckgo; image_proxy=1; method=POST; safesearch=2; theme=simple; results_on_new_tab=1; doi_resolver=oadoi.org; simple_style=auto; center_alignment=1; query_in_title=1; infinite_scroll=0; disabled_engines=; enabled_engines=\"archive is__general\\054yep__general\\054curlie__general\\054currency__general\\054ddg definitions__general\\054wikidata__general\\054duckduckgo__general\\054tineye__general\\054lingva__general\\054startpage__general\\054yahoo__general\\054wiby__general\\054marginalia__general\\054alexandria__general\\054wikibooks__general\\054wikiquote__general\\054wikisource__general\\054wikiversity__general\\054wikivoyage__general\\054dictzone__general\\054seznam__general\\054mojeek__general\\054naver__general\\054wikimini__general\\054brave__general\\054petalsearch__general\\054goo__general\"; disabled_plugins=; enabled_plugins=\"searx.plugins.hostname_replace\\054searx.plugins.oa_doi_rewrite\\054searx.plugins.vim_hotkeys\"; tokens=; maintab=on; enginetab=on".to_string()) + ])) + .change_context(EngineError::UnexpectedError)?; - // scrape all the results from the html - Ok(document - .select(&results) - .map(|result| { - RawSearchResult::new( - result - .select(&result_title) - .next() - .unwrap() - .inner_html() - .trim() - .to_string(), - result - .select(&result_url) - .next() - .unwrap() - .value() - .attr("href") - .unwrap() - .to_string(), - result - .select(&result_desc) - .next() - .unwrap() - .inner_html() - .trim() - .to_string(), - vec!["searx".to_string()], - ) - }) - .map(|search_result| (search_result.visiting_url.clone(), search_result)) - .collect()) + let document: Html = Html::parse_document( + &Searx::fetch_html_from_upstream(self, &url, header_map, request_timeout).await?, + ); + + let no_result: Selector = Selector::parse("#urls>.dialog-error>p") + .map_err(|_| Report::new(EngineError::UnexpectedError)) + .attach_printable_lazy(|| { + format!("invalid CSS selector: {}", "#urls>.dialog-error>p") + })?; + + if let Some(no_result_msg) = document.select(&no_result).nth(1) { + if no_result_msg.inner_html() + == "we didn't find any results. Please use another query or search in more categories" + { + return Err(Report::new(EngineError::EmptyResultSet)); + } + } + + let results: Selector = Selector::parse(".result") + .map_err(|_| Report::new(EngineError::UnexpectedError)) + .attach_printable_lazy(|| format!("invalid CSS selector: {}", ".result"))?; + let result_title: Selector = Selector::parse("h3>a") + .map_err(|_| Report::new(EngineError::UnexpectedError)) + .attach_printable_lazy(|| format!("invalid CSS selector: {}", "h3>a"))?; + let result_url: Selector = Selector::parse("h3>a") + .map_err(|_| Report::new(EngineError::UnexpectedError)) + .attach_printable_lazy(|| format!("invalid CSS selector: {}", "h3>a"))?; + + let result_desc: Selector = Selector::parse(".content") + .map_err(|_| Report::new(EngineError::UnexpectedError)) + .attach_printable_lazy(|| format!("invalid CSS selector: {}", ".content"))?; + + // scrape all the results from the html + Ok(document + .select(&results) + .map(|result| { + SearchResult::new( + result + .select(&result_title) + .next() + .unwrap() + .inner_html() + .trim(), + result + .select(&result_url) + .next() + .unwrap() + .value() + .attr("href") + .unwrap(), + result + .select(&result_desc) + .next() + .unwrap() + .inner_html() + .trim(), + &["searx"], + ) + }) + .map(|search_result| (search_result.url.clone(), search_result)) + .collect()) + } } diff --git a/src/handler/mod.rs b/src/handler/mod.rs new file mode 100644 index 0000000..188767d --- /dev/null +++ b/src/handler/mod.rs @@ -0,0 +1,5 @@ +//! This module provides modules which provide the functionality to handle paths for different +//! files present on different paths and provide one appropriate path on which it is present and +//! can be used. + +pub mod paths; diff --git a/src/handler/paths.rs b/src/handler/paths.rs new file mode 100644 index 0000000..9ea5fff --- /dev/null +++ b/src/handler/paths.rs @@ -0,0 +1,119 @@ +//! This module provides the functionality to handle theme folder present on different paths and +//! provide one appropriate path on which it is present and can be used. + +use std::collections::HashMap; +use std::io::Error; +use std::path::Path; +use std::sync::OnceLock; + +// ------- Constants -------- +/// The constant holding the name of the theme folder. +const PUBLIC_DIRECTORY_NAME: &str = "public"; +/// The constant holding the name of the common folder. +const COMMON_DIRECTORY_NAME: &str = "websurfx"; +/// The constant holding the name of the config file. +const CONFIG_FILE_NAME: &str = "config.lua"; +/// The constant holding the name of the AllowList text file. +const ALLOWLIST_FILE_NAME: &str = "allowlist.txt"; +/// The constant holding the name of the BlockList text file. +const BLOCKLIST_FILE_NAME: &str = "blocklist.txt"; + +/// An enum type which provides different variants to handle paths for various files/folders. +#[derive(Hash, PartialEq, Eq, Debug)] +pub enum FileType { + /// This variant handles all the paths associated with the config file. + Config, + /// This variant handles all the paths associated with the Allowlist text file. + AllowList, + /// This variant handles all the paths associated with the BlockList text file. + BlockList, + /// This variant handles all the paths associated with the public folder (Theme folder). + Theme, +} + +/// A static variable which stores the different filesystem paths for various file/folder types. +static FILE_PATHS_FOR_DIFF_FILE_TYPES: OnceLock>> = OnceLock::new(); + +/// A function which returns an appropriate path for thr provided file type by checking if the path +/// for the given file type exists on that path. +/// +/// # Error +/// +/// Returns a ` folder/file not found!!` error if the give file_type folder/file is not +/// present on the path on which it is being tested. +/// +/// # Example +/// +/// If this function is give the file_type of Theme variant then the theme folder is checked by the +/// following steps: +/// +/// 1. `/opt/websurfx` if it not present here then it fallbacks to the next one (2) +/// 2. Under project folder ( or codebase in other words) if it is not present +/// here then it returns an error as mentioned above. +pub fn file_path(file_type: FileType) -> Result<&'static str, Error> { + let file_path: &Vec = FILE_PATHS_FOR_DIFF_FILE_TYPES + .get_or_init(|| { + HashMap::from([ + ( + FileType::Config, + vec![ + format!( + "{}/.config/{}/{}", + std::env::var("HOME").unwrap(), + COMMON_DIRECTORY_NAME, + CONFIG_FILE_NAME + ), + format!("/etc/xdg/{}/{}", COMMON_DIRECTORY_NAME, CONFIG_FILE_NAME), + format!("./{}/{}", COMMON_DIRECTORY_NAME, CONFIG_FILE_NAME), + ], + ), + ( + FileType::Theme, + vec![ + format!("/opt/websurfx/{}/", PUBLIC_DIRECTORY_NAME), + format!("./{}/", PUBLIC_DIRECTORY_NAME), + ], + ), + ( + FileType::AllowList, + vec![ + format!( + "{}/.config/{}/{}", + std::env::var("HOME").unwrap(), + COMMON_DIRECTORY_NAME, + ALLOWLIST_FILE_NAME + ), + format!("/etc/xdg/{}/{}", COMMON_DIRECTORY_NAME, ALLOWLIST_FILE_NAME), + format!("./{}/{}", COMMON_DIRECTORY_NAME, ALLOWLIST_FILE_NAME), + ], + ), + ( + FileType::BlockList, + vec![ + format!( + "{}/.config/{}/{}", + std::env::var("HOME").unwrap(), + COMMON_DIRECTORY_NAME, + BLOCKLIST_FILE_NAME + ), + format!("/etc/xdg/{}/{}", COMMON_DIRECTORY_NAME, BLOCKLIST_FILE_NAME), + format!("./{}/{}", COMMON_DIRECTORY_NAME, BLOCKLIST_FILE_NAME), + ], + ), + ]) + }) + .get(&file_type) + .unwrap(); + + for (idx, _) in file_path.iter().enumerate() { + if Path::new(file_path[idx].as_str()).exists() { + return Ok(std::mem::take(&mut &*file_path[idx])); + } + } + + // if no of the configs above exist, return error + Err(Error::new( + std::io::ErrorKind::NotFound, + format!("{:?} file/folder not found!!", file_type), + )) +} diff --git a/src/lib.rs b/src/lib.rs index c234658..73e9364 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,20 +1,30 @@ //! This main library module provides the functionality to provide and handle the Tcp server //! and register all the routes for the `websurfx` meta search engine website. +#![forbid(unsafe_code, clippy::panic)] +#![deny(missing_docs, clippy::missing_docs_in_private_items, clippy::perf)] +#![warn(clippy::cognitive_complexity, rust_2018_idioms)] + pub mod cache; -pub mod config_parser; +pub mod config; pub mod engines; -pub mod search_results_handler; +pub mod handler; +pub mod models; +pub mod results; pub mod server; use std::net::TcpListener; -use crate::server::routes; +use crate::server::router; +use actix_cors::Cors; use actix_files as fs; -use actix_web::{dev::Server, middleware::Logger, web, App, HttpServer}; -use config_parser::parser::Config; +use actix_governor::{Governor, GovernorConfigBuilder}; +use actix_web::{dev::Server, http::header, middleware::Logger, web, App, HttpServer}; +use cache::cacher::{Cache, SharedCache}; +use config::parser::Config; use handlebars::Handlebars; +use handler::paths::{file_path, FileType}; /// Runs the web server on the provided TCP listener and returns a `Server` instance. /// @@ -30,36 +40,69 @@ use handlebars::Handlebars; /// /// ```rust /// use std::net::TcpListener; -/// use websurfx::{config_parser::parser::Config, run}; +/// use websurfx::{config::parser::Config, run, cache::cacher::Cache}; /// -/// let config = Config::parse().unwrap(); +/// let config = Config::parse(true).unwrap(); /// let listener = TcpListener::bind("127.0.0.1:8080").expect("Failed to bind address"); -/// let server = run(listener,config).expect("Failed to start server"); +/// let cache = Cache::new_in_memory(); +/// let server = run(listener,config,cache).expect("Failed to start server"); /// ``` -pub fn run(listener: TcpListener, config: Config) -> std::io::Result { - let mut handlebars: Handlebars = Handlebars::new(); +pub fn run(listener: TcpListener, config: Config, cache: Cache) -> std::io::Result { + let mut handlebars: Handlebars<'_> = Handlebars::new(); + + let public_folder_path: &str = file_path(FileType::Theme)?; handlebars - .register_templates_directory(".html", "./public/templates") + .register_templates_directory(".html", format!("{}/templates", public_folder_path)) .unwrap(); - let handlebars_ref: web::Data = web::Data::new(handlebars); + let handlebars_ref: web::Data> = web::Data::new(handlebars); + + let cloned_config_threads_opt: u8 = config.threads; + + let cache = web::Data::new(SharedCache::new(cache)); let server = HttpServer::new(move || { + let cors: Cors = Cors::default() + .allow_any_origin() + .allowed_methods(vec!["GET"]) + .allowed_headers(vec![ + header::ORIGIN, + header::CONTENT_TYPE, + header::REFERER, + header::COOKIE, + ]); + App::new() + .wrap(Logger::default()) // added logging middleware for logging. .app_data(handlebars_ref.clone()) .app_data(web::Data::new(config.clone())) - .wrap(Logger::default()) // added logging middleware for logging. + .app_data(cache.clone()) + .wrap(cors) + .wrap(Governor::new( + &GovernorConfigBuilder::default() + .per_second(config.rate_limiter.time_limit as u64) + .burst_size(config.rate_limiter.number_of_requests as u32) + .finish() + .unwrap(), + )) // Serve images and static files (css and js files). - .service(fs::Files::new("/static", "./public/static").show_files_listing()) - .service(fs::Files::new("/images", "./public/images").show_files_listing()) - .service(routes::robots_data) // robots.txt - .service(routes::index) // index page - .service(routes::search) // search page - .service(routes::about) // about page - .service(routes::settings) // settings page - .default_service(web::route().to(routes::not_found)) // error page + .service( + fs::Files::new("/static", format!("{}/static", public_folder_path)) + .show_files_listing(), + ) + .service( + fs::Files::new("/images", format!("{}/images", public_folder_path)) + .show_files_listing(), + ) + .service(router::robots_data) // robots.txt + .service(router::index) // index page + .service(server::routes::search::search) // search page + .service(router::about) // about page + .service(router::settings) // settings page + .default_service(web::route().to(router::not_found)) // error page }) + .workers(cloned_config_threads_opt as usize) // Start server on 127.0.0.1 with the user provided port number. for example 127.0.0.1:8080. .listen(listener)? .run(); diff --git a/src/models/aggregation_models.rs b/src/models/aggregation_models.rs new file mode 100644 index 0000000..72bbf08 --- /dev/null +++ b/src/models/aggregation_models.rs @@ -0,0 +1,203 @@ +//! This module provides public models for handling, storing and serializing of search results +//! data scraped from the upstream search engines. + +use serde::{Deserialize, Serialize}; +use smallvec::SmallVec; + +use super::{engine_models::EngineError, parser_models::Style}; + +/// A named struct to store the raw scraped search results scraped search results from the +/// upstream search engines before aggregating it.It derives the Clone trait which is needed +/// to write idiomatic rust using `Iterators`. +/// (href url in html in simple words). +#[derive(Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct SearchResult { + /// The title of the search result. + pub title: String, + /// The url which is accessed when clicked on it + pub url: String, + /// The description of the search result. + pub description: String, + /// The names of the upstream engines from which this results were provided. + pub engine: SmallVec<[String; 0]>, +} + +impl SearchResult { + /// Constructs a new `RawSearchResult` with the given arguments needed for the struct. + /// + /// # Arguments + /// + /// * `title` - The title of the search result. + /// * `url` - The url which is accessed when clicked on it + /// (href url in html in simple words). + /// * `description` - The description of the search result. + /// * `engine` - The names of the upstream engines from which this results were provided. + pub fn new(title: &str, url: &str, description: &str, engine: &[&str]) -> Self { + SearchResult { + title: title.to_owned(), + url: url.to_owned(), + description: description.to_owned(), + engine: engine.iter().map(|name| name.to_string()).collect(), + } + } + + /// A function which adds the engine name provided as a string into a vector of strings. + /// + /// # Arguments + /// + /// * `engine` - Takes an engine name provided as a String. + pub fn add_engines(&mut self, engine: &str) { + self.engine.push(engine.to_owned()) + } + + /// A function which returns the engine name stored from the struct as a string. + /// + /// # Returns + /// + /// An engine name stored as a string from the struct. + pub fn engine(&mut self) -> String { + std::mem::take(&mut self.engine[0]) + } +} + +/// A named struct that stores the error info related to the upstream search engines. +#[derive(Serialize, Deserialize, Clone)] +pub struct EngineErrorInfo { + /// It stores the error type which occured while fetching the result from a particular search + /// engine. + pub error: String, + /// It stores the name of the engine that failed to provide the requested search results. + pub engine: String, + /// It stores the name of the color to indicate whether how severe the particular error is (In + /// other words it indicates the severity of the error/issue). + pub severity_color: String, +} + +impl EngineErrorInfo { + /// Constructs a new `SearchResult` with the given arguments needed for the struct. + /// + /// # Arguments + /// + /// * `error` - It takes the error type which occured while fetching the result from a particular + /// search engine. + /// * `engine` - It takes the name of the engine that failed to provide the requested search results. + pub fn new(error: &EngineError, engine: &str) -> Self { + Self { + error: match error { + EngineError::RequestError => "RequestError".to_owned(), + EngineError::EmptyResultSet => "EmptyResultSet".to_owned(), + EngineError::UnexpectedError => "UnexpectedError".to_owned(), + }, + engine: engine.to_owned(), + severity_color: match error { + EngineError::RequestError => "green".to_owned(), + EngineError::EmptyResultSet => "blue".to_owned(), + EngineError::UnexpectedError => "red".to_owned(), + }, + } + } +} + +/// A named struct to store, serialize, deserialize the all the search results scraped and +/// aggregated from the upstream search engines. +/// `SearchResult` structs. +#[derive(Serialize, Deserialize, Default, Clone)] +#[serde(rename_all = "camelCase")] +pub struct SearchResults { + /// Stores the individual serializable `SearchResult` struct into a vector of + pub results: Vec, + /// Stores the current pages search query `q` provided in the search url. + pub page_query: String, + /// Stores the theming options for the website. + pub style: Style, + /// Stores the information on which engines failed with their engine name + /// and the type of error that caused it. + pub engine_errors_info: Vec, + /// Stores the flag option which holds the check value that the following + /// search query was disallowed when the safe search level set to 4 and it + /// was present in the `Blocklist` file. + pub disallowed: bool, + /// Stores the flag option which holds the check value that the following + /// search query was filtered when the safe search level set to 3 and it + /// was present in the `Blocklist` file. + pub filtered: bool, + /// Stores the safe search level `safesearch` provided in the search url. + pub safe_search_level: u8, + /// Stores the flag option which holds the check value that whether any search engines were + /// selected or not. + pub no_engines_selected: bool, +} + +impl SearchResults { + /// Constructs a new `SearchResult` with the given arguments needed for the struct. + /// + /// # Arguments + /// + /// * `results` - Takes an argument of individual serializable `SearchResult` struct + /// and stores it into a vector of `SearchResult` structs. + /// * `page_query` - Takes an argument of current page`s search query `q` provided in + /// the search url. + /// * `engine_errors_info` - Takes an array of structs which contains information regarding + /// which engines failed with their names, reason and their severity color name. + pub fn new( + results: Vec, + page_query: &str, + engine_errors_info: &[EngineErrorInfo], + ) -> Self { + Self { + results, + page_query: page_query.to_owned(), + style: Style::default(), + engine_errors_info: engine_errors_info.to_owned(), + disallowed: Default::default(), + filtered: Default::default(), + safe_search_level: Default::default(), + no_engines_selected: Default::default(), + } + } + + /// A setter function to add website style to the return search results. + pub fn add_style(&mut self, style: &Style) { + self.style = style.clone(); + } + + /// A setter function that sets disallowed to true. + pub fn set_disallowed(&mut self) { + self.disallowed = true; + } + + /// A setter function to set the current page search query. + pub fn set_page_query(&mut self, page: &str) { + self.page_query = page.to_owned(); + } + + /// A setter function that sets the filtered to true. + pub fn set_filtered(&mut self) { + self.filtered = true; + } + + /// A getter function that gets the value of `engine_errors_info`. + pub fn engine_errors_info(&mut self) -> Vec { + std::mem::take(&mut self.engine_errors_info) + } + /// A getter function that gets the value of `results`. + pub fn results(&mut self) -> Vec { + self.results.clone() + } + + /// A setter function to set the current page safe search level. + pub fn set_safe_search_level(&mut self, safe_search_level: u8) { + self.safe_search_level = safe_search_level; + } + + /// A getter function that gets the value of `no_engines_selected`. + pub fn no_engines_selected(&self) -> bool { + self.no_engines_selected + } + + /// A setter function to set the `no_engines_selected` to true. + pub fn set_no_engines_selected(&mut self) { + self.no_engines_selected = true; + } +} diff --git a/src/models/engine_models.rs b/src/models/engine_models.rs new file mode 100644 index 0000000..d4a4e72 --- /dev/null +++ b/src/models/engine_models.rs @@ -0,0 +1,159 @@ +//! This module provides the error enum to handle different errors associated while requesting data from +//! the upstream search engines with the search query provided by the user. + +use super::aggregation_models::SearchResult; +use error_stack::{Result, ResultExt}; +use std::{collections::HashMap, fmt, time::Duration}; + +/// A custom error type used for handle engine associated errors. +#[derive(Debug)] +pub enum EngineError { + /// This variant handles all request related errors like forbidden, not found, + /// etc. + EmptyResultSet, + /// This variant handles the not results found error provide by the upstream + /// search engines. + RequestError, + /// This variant handles all the errors which are unexpected or occur rarely + /// and are errors mostly related to failure in initialization of HeaderMap, + /// Selector errors and all other errors occurring within the code handling + /// the `upstream search engines`. + UnexpectedError, +} + +impl fmt::Display for EngineError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + EngineError::EmptyResultSet => { + write!(f, "The upstream search engine returned an empty result set") + } + EngineError::RequestError => { + write!( + f, + "Error occurred while requesting data from upstream search engine" + ) + } + EngineError::UnexpectedError => { + write!(f, "An unexpected error occurred while processing the data") + } + } + } +} + +impl error_stack::Context for EngineError {} + +/// A trait to define common behavior for all search engines. +#[async_trait::async_trait] +pub trait SearchEngine: Sync + Send { + /// This helper function fetches/requests the search results from the upstream search engine in + /// an html form. + /// + /// # Arguments + /// + /// * `url` - It takes the url of the upstream search engine with the user requested search + /// query appended in the search parameters. + /// * `header_map` - It takes the http request headers to be sent to the upstream engine in + /// order to prevent being detected as a bot. It takes the header as a HeaderMap type. + /// * `request_timeout` - It takes the request timeout value as seconds which is used to limit + /// the amount of time for each request to remain connected when until the results can be provided + /// by the upstream engine. + /// + /// # Error + /// + /// It returns the html data as a string if the upstream engine provides the data as expected + /// otherwise it returns a custom `EngineError`. + async fn fetch_html_from_upstream( + &self, + url: &str, + header_map: reqwest::header::HeaderMap, + request_timeout: u8, + ) -> Result { + // fetch the html from upstream search engine + Ok(reqwest::Client::new() + .get(url) + .timeout(Duration::from_secs(request_timeout as u64)) // Add timeout to request to avoid DDOSing the server + .headers(header_map) // add spoofed headers to emulate human behavior + .send() + .await + .change_context(EngineError::RequestError)? + .text() + .await + .change_context(EngineError::RequestError)?) + } + + /// This function scrapes results from the upstream engine and puts all the scraped results like + /// title, visiting_url (href in html),engine (from which engine it was fetched from) and description + /// in a RawSearchResult and then adds that to HashMap whose keys are url and values are RawSearchResult + /// struct and then returns it within a Result enum. + /// + /// # Arguments + /// + /// * `query` - Takes the user provided query to query to the upstream search engine with. + /// * `page` - Takes an u32 as an argument. + /// * `user_agent` - Takes a random user agent string as an argument. + /// * `request_timeout` - Takes a time (secs) as a value which controls the server request timeout. + /// + /// # Errors + /// + /// Returns an `EngineErrorKind` if the user is not connected to the internet or if their is failure to + /// reach the above `upstream search engine` page or if the `upstream search engine` is unable to + /// provide results for the requested search query and also returns error if the scraping selector + /// or HeaderMap fails to initialize. + async fn results( + &self, + query: &str, + page: u32, + user_agent: &str, + request_timeout: u8, + safe_search: u8, + ) -> Result, EngineError>; +} + +/// A named struct which stores the engine struct with the name of the associated engine. +pub struct EngineHandler { + /// It stores the engine struct wrapped in a box smart pointer as the engine struct implements + /// the `SearchEngine` trait. + engine: Box, + /// It stores the name of the engine to which the struct is associated to. + name: &'static str, +} + +impl Clone for EngineHandler { + fn clone(&self) -> Self { + Self::new(self.name).unwrap() + } +} + +impl EngineHandler { + /// Parses an engine name into an engine handler. + /// + /// # Arguments + /// + /// * `engine_name` - It takes the name of the engine to which the struct was associated to. + /// + /// # Returns + /// + /// It returns an option either containing the value or a none if the engine is unknown + pub fn new(engine_name: &str) -> Option { + let engine: (&'static str, Box) = + match engine_name.to_lowercase().as_str() { + "duckduckgo" => ( + "duckduckgo", + Box::new(crate::engines::duckduckgo::DuckDuckGo), + ), + "searx" => ("searx", Box::new(crate::engines::searx::Searx)), + _ => return None, + }; + + Some(Self { + engine: engine.1, + name: engine.0, + }) + } + + /// This function converts the EngineHandler type into a tuple containing the engine name and + /// the associated engine struct. + pub fn into_name_engine(self) -> (&'static str, Box) { + (self.name, self.engine) + } +} diff --git a/src/models/mod.rs b/src/models/mod.rs new file mode 100644 index 0000000..6a7d235 --- /dev/null +++ b/src/models/mod.rs @@ -0,0 +1,8 @@ +//! This module provides modules which in turn provides various models for aggregrating search +//! results, parsing config file, providing trait to standardize search engine handling code, +//! custom engine error for the search engine, etc. + +pub mod aggregation_models; +pub mod engine_models; +pub mod parser_models; +pub mod server_models; diff --git a/src/config_parser/parser_models.rs b/src/models/parser_models.rs similarity index 63% rename from src/config_parser/parser_models.rs rename to src/models/parser_models.rs index 42baf0d..9dad348 100644 --- a/src/config_parser/parser_models.rs +++ b/src/models/parser_models.rs @@ -1,5 +1,5 @@ //! This module provides public models for handling, storing and serializing parsed config file -//! options from config.lua by grouping them togather. +//! options from config.lua by grouping them together. use serde::{Deserialize, Serialize}; @@ -12,15 +12,12 @@ use serde::{Deserialize, Serialize}; /// order to allow the deserializing the json back to struct in aggregate function in /// aggregator.rs and create a new struct out of it and then serialize it back to json and pass /// it to the template files. -/// -/// # Fields -// -/// * `theme` - It stores the parsed theme option used to set a theme for the website. -/// * `colorscheme` - It stores the parsed colorscheme option used to set a colorscheme for the -/// theme being used. -#[derive(Serialize, Deserialize, Clone)] +#[derive(Serialize, Deserialize, Clone, Default)] pub struct Style { + /// It stores the parsed theme option used to set a theme for the website. pub theme: String, + /// It stores the parsed colorscheme option used to set a colorscheme for the + /// theme being used. pub colorscheme: String, } @@ -36,3 +33,20 @@ impl Style { Style { theme, colorscheme } } } + +/// Configuration options for the aggregator. +#[derive(Clone)] +pub struct AggregatorConfig { + /// It stores the option to whether enable or disable random delays between + /// requests. + pub random_delay: bool, +} + +/// Configuration options for the rate limiter middleware. +#[derive(Clone)] +pub struct RateLimiter { + /// The number of request that are allowed within a provided time limit. + pub number_of_requests: u8, + /// The time limit in which the quantity of requests that should be accepted. + pub time_limit: u8, +} diff --git a/src/models/server_models.rs b/src/models/server_models.rs new file mode 100644 index 0000000..4772b98 --- /dev/null +++ b/src/models/server_models.rs @@ -0,0 +1,31 @@ +//! This module provides the models to parse cookies and search parameters from the search +//! engine website. +use serde::Deserialize; + +/// A named struct which deserializes all the user provided search parameters and stores them. +#[derive(Deserialize)] +pub struct SearchParams { + /// It stores the search parameter option `q` (or query in simple words) + /// of the search url. + pub q: Option, + /// It stores the search parameter `page` (or pageno in simple words) + /// of the search url. + pub page: Option, + /// It stores the search parameter `safesearch` (or safe search level in simple words) of the + /// search url. + pub safesearch: Option, +} + +/// A named struct which is used to deserialize the cookies fetched from the client side. +#[allow(dead_code)] +#[derive(Deserialize)] +pub struct Cookie<'a> { + /// It stores the theme name used in the website. + pub theme: &'a str, + /// It stores the colorscheme name used for the website theme. + pub colorscheme: &'a str, + /// It stores the user selected upstream search engines selected from the UI. + pub engines: Vec<&'a str>, + /// It stores the user selected safe search level from the UI. + pub safe_search_level: u8, +} diff --git a/src/results/aggregator.rs b/src/results/aggregator.rs new file mode 100644 index 0000000..8c9be2c --- /dev/null +++ b/src/results/aggregator.rs @@ -0,0 +1,359 @@ +//! This module provides the functionality to scrape and gathers all the results from the upstream +//! search engines and then removes duplicate results. + +use super::user_agent::random_user_agent; +use crate::handler::paths::{file_path, FileType}; +use crate::models::{ + aggregation_models::{EngineErrorInfo, SearchResult, SearchResults}, + engine_models::{EngineError, EngineHandler}, +}; +use error_stack::Report; +use rand::Rng; +use regex::Regex; +use std::{ + collections::HashMap, + io::{BufReader, Read}, + time::Duration, +}; +use std::{fs::File, io::BufRead}; +use tokio::task::JoinHandle; + +/// Aliases for long type annotations +type FutureVec = Vec, Report>>>; + +/// The function aggregates the scraped results from the user-selected upstream search engines. +/// These engines can be chosen either from the user interface (UI) or from the configuration file. +/// The code handles this process by matching the selected search engines and adding them to a vector. +/// This vector is then used to create an asynchronous task vector using `tokio::spawn`, which returns +/// a future. This future is awaited in another loop. Once the results are collected, they are filtered +/// to remove any errors and ensure only proper results are included. If an error is encountered, it is +/// sent to the UI along with the name of the engine and the type of error. This information is finally +/// placed in the returned `SearchResults` struct. +/// +/// Additionally, the function eliminates duplicate results. If two results are identified as coming from +/// multiple engines, their names are combined to indicate that the results were fetched from these upstream +/// engines. After this, all the data in the `HashMap` is removed and placed into a struct that contains all +/// the aggregated results in a vector. Furthermore, the query used is also added to the struct. This step is +/// necessary to ensure that the search bar in the search remains populated even when searched from the query URL. +/// +/// Overall, this function serves to aggregate scraped results from user-selected search engines, handling errors, +/// removing duplicates, and organizing the data for display in the UI. +/// +/// # Example: +/// +/// If you search from the url like `https://127.0.0.1/search?q=huston` then the search bar should +/// contain the word huston and not remain empty. +/// +/// # Arguments +/// +/// * `query` - Accepts a string to query with the above upstream search engines. +/// * `page` - Accepts an u32 page number. +/// * `random_delay` - Accepts a boolean value to add a random delay before making the request. +/// * `debug` - Accepts a boolean value to enable or disable debug mode option. +/// * `upstream_search_engines` - Accepts a vector of search engine names which was selected by the +/// * `request_timeout` - Accepts a time (secs) as a value which controls the server request timeout. +/// user through the UI or the config file. +/// +/// # Error +/// +/// Returns an error a reqwest and scraping selector errors if any error occurs in the results +/// function in either `searx` or `duckduckgo` or both otherwise returns a `SearchResults struct` +/// containing appropriate values. +pub async fn aggregate( + query: &str, + page: u32, + random_delay: bool, + debug: bool, + upstream_search_engines: &[EngineHandler], + request_timeout: u8, + safe_search: u8, +) -> Result> { + let user_agent: &str = random_user_agent(); + + // Add a random delay before making the request. + if random_delay || !debug { + let mut rng = rand::thread_rng(); + let delay_secs = rng.gen_range(1..10); + tokio::time::sleep(Duration::from_secs(delay_secs)).await; + } + + let mut names: Vec<&str> = Vec::with_capacity(0); + + // create tasks for upstream result fetching + let mut tasks: FutureVec = FutureVec::new(); + + for engine_handler in upstream_search_engines { + let (name, search_engine) = engine_handler.to_owned().into_name_engine(); + names.push(name); + let query: String = query.to_owned(); + tasks.push(tokio::spawn(async move { + search_engine + .results( + &query, + page, + user_agent.clone(), + request_timeout, + safe_search, + ) + .await + })); + } + + // get upstream responses + let mut responses = Vec::with_capacity(tasks.len()); + + for task in tasks { + if let Ok(result) = task.await { + responses.push(result) + } + } + + // aggregate search results, removing duplicates and handling errors the upstream engines returned + let mut result_map: HashMap = HashMap::new(); + let mut engine_errors_info: Vec = Vec::new(); + + let mut handle_error = |error: &Report, engine_name: &'static str| { + log::error!("Engine Error: {:?}", error); + engine_errors_info.push(EngineErrorInfo::new( + error.downcast_ref::().unwrap(), + engine_name, + )); + }; + + for _ in 0..responses.len() { + let response = responses.pop().unwrap(); + let engine = names.pop().unwrap(); + + if result_map.is_empty() { + match response { + Ok(results) => { + result_map = results.clone(); + } + Err(error) => { + handle_error(&error, engine); + } + } + continue; + } + + match response { + Ok(result) => { + result.into_iter().for_each(|(key, value)| { + result_map + .entry(key) + .and_modify(|result| { + result.add_engines(engine); + }) + .or_insert_with(|| -> SearchResult { value }); + }); + } + Err(error) => { + handle_error(&error, engine); + } + } + } + + if safe_search >= 3 { + let mut blacklist_map: HashMap = HashMap::new(); + filter_with_lists( + &mut result_map, + &mut blacklist_map, + file_path(FileType::BlockList)?, + )?; + + filter_with_lists( + &mut blacklist_map, + &mut result_map, + file_path(FileType::AllowList)?, + )?; + + drop(blacklist_map); + } + + let results: Vec = result_map.into_values().collect(); + + Ok(SearchResults::new(results, query, &engine_errors_info)) +} + +/// Filters a map of search results using a list of regex patterns. +/// +/// # Arguments +/// +/// * `map_to_be_filtered` - A mutable reference to a `HashMap` of search results to filter, where the filtered results will be removed from. +/// * `resultant_map` - A mutable reference to a `HashMap` to hold the filtered results. +/// * `file_path` - A `&str` representing the path to a file containing regex patterns to use for filtering. +/// +/// # Errors +/// +/// Returns an error if the file at `file_path` cannot be opened or read, or if a regex pattern is invalid. +pub fn filter_with_lists( + map_to_be_filtered: &mut HashMap, + resultant_map: &mut HashMap, + file_path: &str, +) -> Result<(), Box> { + let mut reader = BufReader::new(File::open(file_path)?); + + for line in reader.by_ref().lines() { + let re = Regex::new(line?.trim())?; + + // Iterate over each search result in the map and check if it matches the regex pattern + for (url, search_result) in map_to_be_filtered.clone().into_iter() { + if re.is_match(&url.to_lowercase()) + || re.is_match(&search_result.title.to_lowercase()) + || re.is_match(&search_result.description.to_lowercase()) + { + // If the search result matches the regex pattern, move it from the original map to the resultant map + resultant_map.insert( + url.to_owned(), + map_to_be_filtered.remove(&url.to_owned()).unwrap(), + ); + } + } + } + + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + use smallvec::smallvec; + use std::collections::HashMap; + use std::io::Write; + use tempfile::NamedTempFile; + + #[test] + fn test_filter_with_lists() -> Result<(), Box> { + // Create a map of search results to filter + let mut map_to_be_filtered = HashMap::new(); + map_to_be_filtered.insert( + "https://www.example.com".to_owned(), + SearchResult { + title: "Example Domain".to_owned(), + url: "https://www.example.com".to_owned(), + description: "This domain is for use in illustrative examples in documents." + .to_owned(), + engine: smallvec!["Google".to_owned(), "Bing".to_owned()], + }, + ); + map_to_be_filtered.insert( + "https://www.rust-lang.org/".to_owned(), + SearchResult { + title: "Rust Programming Language".to_owned(), + url: "https://www.rust-lang.org/".to_owned(), + description: "A systems programming language that runs blazingly fast, prevents segfaults, and guarantees thread safety.".to_owned(), + engine: smallvec!["Google".to_owned(), "DuckDuckGo".to_owned()], + }, + ); + + // Create a temporary file with regex patterns + let mut file = NamedTempFile::new()?; + writeln!(file, "example")?; + writeln!(file, "rust")?; + file.flush()?; + + let mut resultant_map = HashMap::new(); + filter_with_lists( + &mut map_to_be_filtered, + &mut resultant_map, + file.path().to_str().unwrap(), + )?; + + assert_eq!(resultant_map.len(), 2); + assert!(resultant_map.contains_key("https://www.example.com")); + assert!(resultant_map.contains_key("https://www.rust-lang.org/")); + assert_eq!(map_to_be_filtered.len(), 0); + + Ok(()) + } + + #[test] + fn test_filter_with_lists_wildcard() -> Result<(), Box> { + let mut map_to_be_filtered = HashMap::new(); + map_to_be_filtered.insert( + "https://www.example.com".to_owned(), + SearchResult { + title: "Example Domain".to_owned(), + url: "https://www.example.com".to_owned(), + description: "This domain is for use in illustrative examples in documents." + .to_owned(), + engine: smallvec!["Google".to_owned(), "Bing".to_owned()], + }, + ); + map_to_be_filtered.insert( + "https://www.rust-lang.org/".to_owned(), + SearchResult { + title: "Rust Programming Language".to_owned(), + url: "https://www.rust-lang.org/".to_owned(), + description: "A systems programming language that runs blazingly fast, prevents segfaults, and guarantees thread safety.".to_owned(), + engine: smallvec!["Google".to_owned(), "DuckDuckGo".to_owned()], + }, + ); + + // Create a temporary file with a regex pattern containing a wildcard + let mut file = NamedTempFile::new()?; + writeln!(file, "ex.*le")?; + file.flush()?; + + let mut resultant_map = HashMap::new(); + + filter_with_lists( + &mut map_to_be_filtered, + &mut resultant_map, + file.path().to_str().unwrap(), + )?; + + assert_eq!(resultant_map.len(), 1); + assert!(resultant_map.contains_key("https://www.example.com")); + assert_eq!(map_to_be_filtered.len(), 1); + assert!(map_to_be_filtered.contains_key("https://www.rust-lang.org/")); + + Ok(()) + } + + #[test] + fn test_filter_with_lists_file_not_found() { + let mut map_to_be_filtered = HashMap::new(); + + let mut resultant_map = HashMap::new(); + + // Call the `filter_with_lists` function with a non-existent file path + let result = filter_with_lists( + &mut map_to_be_filtered, + &mut resultant_map, + "non-existent-file.txt", + ); + + assert!(result.is_err()); + } + + #[test] + fn test_filter_with_lists_invalid_regex() { + let mut map_to_be_filtered = HashMap::new(); + map_to_be_filtered.insert( + "https://www.example.com".to_owned(), + SearchResult { + title: "Example Domain".to_owned(), + url: "https://www.example.com".to_owned(), + description: "This domain is for use in illustrative examples in documents." + .to_owned(), + engine: smallvec!["Google".to_owned(), "Bing".to_owned()], + }, + ); + + let mut resultant_map = HashMap::new(); + + // Create a temporary file with an invalid regex pattern + let mut file = NamedTempFile::new().unwrap(); + writeln!(file, "example(").unwrap(); + file.flush().unwrap(); + + let result = filter_with_lists( + &mut map_to_be_filtered, + &mut resultant_map, + file.path().to_str().unwrap(), + ); + + assert!(result.is_err()); + } +} diff --git a/src/results/mod.rs b/src/results/mod.rs new file mode 100644 index 0000000..9ec3229 --- /dev/null +++ b/src/results/mod.rs @@ -0,0 +1,6 @@ +//! This module provides modules that handle the functionality to aggregate the fetched search +//! results from the upstream search engines and filters it if safe search is set to 3 or 4. Also, +//! provides various models to aggregate search results into a standardized form. + +pub mod aggregator; +pub mod user_agent; diff --git a/src/results/user_agent.rs b/src/results/user_agent.rs new file mode 100644 index 0000000..ab2811b --- /dev/null +++ b/src/results/user_agent.rs @@ -0,0 +1,34 @@ +//! This module provides the functionality to generate random user agent string. + +use std::sync::OnceLock; + +use fake_useragent::{Browsers, UserAgents, UserAgentsBuilder}; + +/// A static variable which stores the initially build `UserAgents` struct. So as it can be resused +/// again and again without the need of reinitializing the `UserAgents` struct. +static USER_AGENTS: OnceLock = OnceLock::new(); + +/// A function to generate random user agent to improve privacy of the user. +/// +/// # Returns +/// +/// A randomly generated user agent string. +pub fn random_user_agent() -> &'static str { + USER_AGENTS + .get_or_init(|| { + UserAgentsBuilder::new() + .cache(false) + .dir("/tmp") + .thread(1) + .set_browsers( + Browsers::new() + .set_chrome() + .set_safari() + .set_edge() + .set_firefox() + .set_mozilla(), + ) + .build() + }) + .random() +} diff --git a/src/search_results_handler/aggregation_models.rs b/src/search_results_handler/aggregation_models.rs deleted file mode 100644 index 4fe670e..0000000 --- a/src/search_results_handler/aggregation_models.rs +++ /dev/null @@ -1,155 +0,0 @@ -//! This module provides public models for handling, storing and serializing of search results -//! data scraped from the upstream search engines. - -use serde::{Deserialize, Serialize}; - -use crate::config_parser::parser_models::Style; - -/// A named struct to store, serialize and deserializes the individual search result from all the -/// scraped and aggregated search results from the upstream search engines. -/// -/// # Fields -/// -/// * `title` - The title of the search result. -/// * `visiting_url` - The url which is accessed when clicked on it (href url in html in simple -/// words). -/// * `url` - The url to be displayed below the search result title in html. -/// * `description` - The description of the search result. -/// * `engine` - The names of the upstream engines from which this results were provided. -#[derive(Debug, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct SearchResult { - pub title: String, - pub visiting_url: String, - pub url: String, - pub description: String, - pub engine: Vec, -} - -impl SearchResult { - /// Constructs a new `SearchResult` with the given arguments needed for the struct. - /// - /// # Arguments - /// - /// * `title` - The title of the search result. - /// * `visiting_url` - The url which is accessed when clicked on it - /// (href url in html in simple words). - /// * `url` - The url to be displayed below the search result title in html. - /// * `description` - The description of the search result. - /// * `engine` - The names of the upstream engines from which this results were provided. - pub fn new( - title: String, - visiting_url: String, - url: String, - description: String, - engine: Vec, - ) -> Self { - SearchResult { - title, - visiting_url, - url, - description, - engine, - } - } -} - -/// A named struct to store the raw scraped search results scraped search results from the -/// upstream search engines before aggregating it.It derives the Clone trait which is needed -/// to write idiomatic rust using `Iterators`. -/// -/// # Fields -/// -/// * `title` - The title of the search result. -/// * `visiting_url` - The url which is accessed when clicked on it -/// (href url in html in simple words). -/// * `description` - The description of the search result. -/// * `engine` - The names of the upstream engines from which this results were provided. -#[derive(Clone)] -pub struct RawSearchResult { - pub title: String, - pub visiting_url: String, - pub description: String, - pub engine: Vec, -} - -impl RawSearchResult { - /// Constructs a new `RawSearchResult` with the given arguments needed for the struct. - /// - /// # Arguments - /// - /// * `title` - The title of the search result. - /// * `visiting_url` - The url which is accessed when clicked on it - /// (href url in html in simple words). - /// * `description` - The description of the search result. - /// * `engine` - The names of the upstream engines from which this results were provided. - pub fn new( - title: String, - visiting_url: String, - description: String, - engine: Vec, - ) -> Self { - RawSearchResult { - title, - visiting_url, - description, - engine, - } - } - - /// A function which adds the engine name provided as a string into a vector of strings. - /// - /// # Arguments - /// - /// * `engine` - Takes an engine name provided as a String. - pub fn add_engines(&mut self, engine: String) { - self.engine.push(engine) - } - - /// A function which returns the engine name stored from the struct as a string. - /// - /// # Returns - /// - /// An engine name stored as a string from the struct. - pub fn engine(self) -> String { - self.engine.get(0).unwrap().to_string() - } -} - -/// A named struct to store, serialize, deserialize the all the search results scraped and -/// aggregated from the upstream search engines. -/// -/// # Fields -/// -/// * `results` - Stores the individual serializable `SearchResult` struct into a vector of -/// `SearchResult` structs. -/// * `page_query` - Stores the current pages search query `q` provided in the search url. -#[derive(Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct SearchResults { - pub results: Vec, - pub page_query: String, - pub style: Style, -} - -impl SearchResults { - /// Constructs a new `SearchResult` with the given arguments needed for the struct. - /// - /// # Arguments - /// - /// * `results` - Takes an argument of individual serializable `SearchResult` struct - /// and stores it into a vector of `SearchResult` structs. - /// * `page_query` - Takes an argument of current page`s search query `q` provided in - /// the search url. - pub fn new(results: Vec, page_query: String) -> Self { - SearchResults { - results, - page_query, - style: Style::new("".to_string(), "".to_string()), - } - } - - pub fn add_style(&mut self, style: Style) { - self.style = style; - } -} diff --git a/src/search_results_handler/aggregator.rs b/src/search_results_handler/aggregator.rs deleted file mode 100644 index 5133094..0000000 --- a/src/search_results_handler/aggregator.rs +++ /dev/null @@ -1,92 +0,0 @@ -//! This module provides the functionality to scrape and gathers all the results from the upstream -//! search engines and then removes duplicate results. - -use std::{collections::HashMap, time::Duration}; - -use rand::Rng; -use tokio::join; - -use super::{ - aggregation_models::{RawSearchResult, SearchResult, SearchResults}, - user_agent::random_user_agent, -}; - -use crate::engines::{duckduckgo, searx}; - -/// A function that aggregates all the scraped results from the above upstream engines and -/// then removes duplicate results and if two results are found to be from two or more engines -/// then puts their names together to show the results are fetched from these upstream engines -/// and then removes all data from the HashMap and puts into a struct of all results aggregated -/// into a vector and also adds the query used into the struct this is neccessory because -/// otherwise the search bar in search remains empty if searched from the query url -/// -/// # Example: -/// -/// If you search from the url like `https://127.0.0.1/search?q=huston` then the search bar should -/// contain the word huston and not remain empty. -/// -/// # Arguments -/// -/// * `query` - Accepts a string to query with the above upstream search engines. -/// * `page` - Accepts an u32 page number. -/// -/// # Error -/// -/// Returns an error a reqwest and scraping selector errors if any error occurs in the results -/// function in either `searx` or `duckduckgo` or both otherwise returns a `SearchResults struct` -/// containing appropriate values. -pub async fn aggregate( - query: &str, - page: u32, -) -> Result> { - let user_agent: String = random_user_agent(); - let mut result_map: HashMap = HashMap::new(); - - // Add a random delay before making the request. - let mut rng = rand::thread_rng(); - let delay_secs = rng.gen_range(1..10); - std::thread::sleep(Duration::from_secs(delay_secs)); - - // fetch results from upstream search engines simultaneously/concurrently. - let (ddg_map_results, searx_map_results) = join!( - duckduckgo::results(query, page, &user_agent), - searx::results(query, page, &user_agent) - ); - - let ddg_map_results: HashMap = ddg_map_results?; - let searx_map_results: HashMap = searx_map_results?; - - result_map.extend(ddg_map_results); - - searx_map_results.into_iter().for_each(|(key, value)| { - result_map - .entry(key) - .and_modify(|result| { - result.add_engines(value.clone().engine()); - }) - .or_insert_with(|| -> RawSearchResult { - RawSearchResult::new( - value.title.clone(), - value.visiting_url.clone(), - value.description.clone(), - value.engine.clone(), - ) - }); - }); - - Ok(SearchResults::new( - result_map - .into_iter() - .map(|(key, value)| { - SearchResult::new( - value.title, - value.visiting_url, - key, - value.description, - value.engine, - ) - }) - .collect(), - query.to_string(), - )) -} diff --git a/src/search_results_handler/mod.rs b/src/search_results_handler/mod.rs deleted file mode 100644 index 0c13442..0000000 --- a/src/search_results_handler/mod.rs +++ /dev/null @@ -1,3 +0,0 @@ -pub mod aggregation_models; -pub mod aggregator; -pub mod user_agent; diff --git a/src/search_results_handler/user_agent.rs b/src/search_results_handler/user_agent.rs deleted file mode 100644 index 09dd684..0000000 --- a/src/search_results_handler/user_agent.rs +++ /dev/null @@ -1,26 +0,0 @@ -//! This module provides the functionality to generate random user agent string. - -use fake_useragent::{Browsers, UserAgentsBuilder}; - -/// A function to generate random user agent to improve privacy of the user. -/// -/// # Returns -/// -/// A randomly generated user agent string. -pub fn random_user_agent() -> String { - UserAgentsBuilder::new() - .cache(false) - .dir("/tmp") - .thread(1) - .set_browsers( - Browsers::new() - .set_chrome() - .set_safari() - .set_edge() - .set_firefox() - .set_mozilla(), - ) - .build() - .random() - .to_string() -} diff --git a/src/server/mod.rs b/src/server/mod.rs index 6a664ab..7f4274f 100644 --- a/src/server/mod.rs +++ b/src/server/mod.rs @@ -1 +1,7 @@ +//! This module provides modules that handle the functionality of handling different routes/paths +//! for the `websurfx` search engine website. Also it handles the parsing of search parameters in +//! the search route. Also, caches the next, current and previous search results in the search +//! routes with the help of the redis server. + +pub mod router; pub mod routes; diff --git a/src/server/router.rs b/src/server/router.rs new file mode 100644 index 0000000..69a3ede --- /dev/null +++ b/src/server/router.rs @@ -0,0 +1,64 @@ +//! This module provides the functionality to handle different routes of the `websurfx` +//! meta search engine website and provide appropriate response to each route/page +//! when requested. + +use crate::{ + config::parser::Config, + handler::paths::{file_path, FileType}, +}; +use actix_web::{get, web, HttpRequest, HttpResponse}; +use handlebars::Handlebars; +use std::fs::read_to_string; + +/// Handles the route of index page or main page of the `websurfx` meta search engine website. +#[get("/")] +pub async fn index( + hbs: web::Data>, + config: web::Data, +) -> Result> { + let page_content: String = hbs.render("index", &config.style).unwrap(); + Ok(HttpResponse::Ok().body(page_content)) +} + +/// Handles the route of any other accessed route/page which is not provided by the +/// website essentially the 404 error page. +pub async fn not_found( + hbs: web::Data>, + config: web::Data, +) -> Result> { + let page_content: String = hbs.render("404", &config.style)?; + + Ok(HttpResponse::Ok() + .content_type("text/html; charset=utf-8") + .body(page_content)) +} + +/// Handles the route of robots.txt page of the `websurfx` meta search engine website. +#[get("/robots.txt")] +pub async fn robots_data(_req: HttpRequest) -> Result> { + let page_content: String = + read_to_string(format!("{}/robots.txt", file_path(FileType::Theme)?))?; + Ok(HttpResponse::Ok() + .content_type("text/plain; charset=ascii") + .body(page_content)) +} + +/// Handles the route of about page of the `websurfx` meta search engine website. +#[get("/about")] +pub async fn about( + hbs: web::Data>, + config: web::Data, +) -> Result> { + let page_content: String = hbs.render("about", &config.style)?; + Ok(HttpResponse::Ok().body(page_content)) +} + +/// Handles the route of settings page of the `websurfx` meta search engine website. +#[get("/settings")] +pub async fn settings( + hbs: web::Data>, + config: web::Data, +) -> Result> { + let page_content: String = hbs.render("settings", &config.style)?; + Ok(HttpResponse::Ok().body(page_content)) +} diff --git a/src/server/routes.rs b/src/server/routes.rs deleted file mode 100644 index 1ee9f35..0000000 --- a/src/server/routes.rs +++ /dev/null @@ -1,176 +0,0 @@ -//! This module provides the functionality to handle different routes of the `websurfx` -//! meta search engine website and provide approriate response to each route/page -//! when requested. - -use std::fs::read_to_string; - -use crate::{ - cache::cacher::RedisCache, - config_parser::parser::Config, - search_results_handler::{aggregation_models::SearchResults, aggregator::aggregate}, -}; -use actix_web::{get, web, HttpRequest, HttpResponse}; -use handlebars::Handlebars; -use serde::Deserialize; - -/// A named struct which deserializes all the user provided search parameters and stores them. -/// -/// # Fields -/// -/// * `q` - It stores the search parameter option `q` (or query in simple words) -/// of the search url. -/// * `page` - It stores the search parameter `page` (or pageno in simple words) -/// of the search url. -#[derive(Debug, Deserialize)] -struct SearchParams { - q: Option, - page: Option, -} - -/// Handles the route of index page or main page of the `websurfx` meta search engine website. -#[get("/")] -pub async fn index( - hbs: web::Data>, - config: web::Data, -) -> Result> { - let page_content: String = hbs.render("index", &config.style).unwrap(); - Ok(HttpResponse::Ok().body(page_content)) -} - -/// Handles the route of any other accessed route/page which is not provided by the -/// website essentially the 404 error page. -pub async fn not_found( - hbs: web::Data>, - config: web::Data, -) -> Result> { - let page_content: String = hbs.render("404", &config.style)?; - - Ok(HttpResponse::Ok() - .content_type("text/html; charset=utf-8") - .body(page_content)) -} - -/// Handles the route of search page of the `websurfx` meta search engine website and it takes -/// two search url parameters `q` and `page` where `page` parameter is optional. -/// -/// # Example -/// -/// ```bash -/// curl "http://127.0.0.1:8080/search?q=sweden&page=1" -/// ``` -/// -/// Or -/// -/// ```bash -/// curl "http://127.0.0.1:8080/search?q=sweden" -/// ``` -#[get("/search")] -pub async fn search( - hbs: web::Data>, - req: HttpRequest, - config: web::Data, -) -> Result> { - let params = web::Query::::from_query(req.query_string())?; - - //Initialize redis cache connection struct - let redis_cache = RedisCache::new(config.redis_connection_url.clone()); - match ¶ms.q { - Some(query) => { - if query.trim().is_empty() { - Ok(HttpResponse::Found() - .insert_header(("location", "/")) - .finish()) - } else { - // Initialize the page url as an empty string - let mut page_url = String::new(); - - // Find whether the page is valid page number if not then return - // the first page number and also construct the page_url accordingly - let page = match params.page { - Some(page_number) => { - if page_number <= 1 { - page_url = format!( - "http://{}:{}/search?q={}&page={}", - config.binding_ip_addr, config.port, query, 1 - ); - 1 - } else { - page_url = format!( - "http://{}:{}/search?q={}&page={}", - config.binding_ip_addr, config.port, query, page_number - ); - - page_number - } - } - None => { - page_url = format!( - "http://{}:{}{}&page={}", - config.binding_ip_addr, - config.port, - req.uri(), - 1 - ); - - 1 - } - }; - - // fetch the cached results json. - let cached_results_json = redis_cache.clone().cached_results_json(page_url.clone()); - // check if fetched results was indeed fetched or it was an error and if so - // handle the data accordingly. - match cached_results_json { - Ok(results_json) => { - let new_results_json: SearchResults = serde_json::from_str(&results_json)?; - let page_content: String = hbs.render("search", &new_results_json)?; - Ok(HttpResponse::Ok().body(page_content)) - } - Err(_) => { - let mut results_json: crate::search_results_handler::aggregation_models::SearchResults = - aggregate(query, page).await?; - results_json.add_style(config.style.clone()); - redis_cache.clone().cache_results( - serde_json::to_string(&results_json)?, - page_url.clone(), - )?; - let page_content: String = hbs.render("search", &results_json)?; - Ok(HttpResponse::Ok().body(page_content)) - } - } - } - } - None => Ok(HttpResponse::Found() - .insert_header(("location", "/")) - .finish()), - } -} - -/// Handles the route of robots.txt page of the `websurfx` meta search engine website. -#[get("/robots.txt")] -pub async fn robots_data(_req: HttpRequest) -> Result> { - let page_content: String = read_to_string("./public/robots.txt")?; - Ok(HttpResponse::Ok() - .content_type("text/plain; charset=ascii") - .body(page_content)) -} - -/// Handles the route of about page of the `websurfx` meta search engine website. -#[get("/about")] -pub async fn about( - hbs: web::Data>, - config: web::Data, -) -> Result> { - let page_content: String = hbs.render("about", &config.style)?; - Ok(HttpResponse::Ok().body(page_content)) -} - -/// Handles the route of settings page of the `websurfx` meta search engine website. -#[get("/settings")] -pub async fn settings( - hbs: web::Data>, - config: web::Data, -) -> Result> { - let page_content: String = hbs.render("settings", &config.style)?; - Ok(HttpResponse::Ok().body(page_content)) -} diff --git a/src/server/routes/mod.rs b/src/server/routes/mod.rs new file mode 100644 index 0000000..6bc5750 --- /dev/null +++ b/src/server/routes/mod.rs @@ -0,0 +1,3 @@ +//! This module provides modules to handle various routes in the search engine website. + +pub mod search; diff --git a/src/server/routes/search.rs b/src/server/routes/search.rs new file mode 100644 index 0000000..80db98f --- /dev/null +++ b/src/server/routes/search.rs @@ -0,0 +1,284 @@ +//! This module handles the search route of the search engine website. + +use crate::{ + cache::cacher::SharedCache, + config::parser::Config, + handler::paths::{file_path, FileType}, + models::{ + aggregation_models::SearchResults, + engine_models::EngineHandler, + server_models::{Cookie, SearchParams}, + }, + results::aggregator::aggregate, +}; +use actix_web::{get, web, HttpRequest, HttpResponse}; +use handlebars::Handlebars; +use regex::Regex; +use std::{ + fs::File, + io::{BufRead, BufReader, Read}, +}; +use tokio::join; + +/// Handles the route of any other accessed route/page which is not provided by the +/// website essentially the 404 error page. +pub async fn not_found( + hbs: web::Data>, + config: web::Data, +) -> Result> { + let page_content: String = hbs.render("404", &config.style)?; + + Ok(HttpResponse::Ok() + .content_type("text/html; charset=utf-8") + .body(page_content)) +} + +/// Handles the route of search page of the `websurfx` meta search engine website and it takes +/// two search url parameters `q` and `page` where `page` parameter is optional. +/// +/// # Example +/// +/// ```bash +/// curl "http://127.0.0.1:8080/search?q=sweden&page=1" +/// ``` +/// +/// Or +/// +/// ```bash +/// curl "http://127.0.0.1:8080/search?q=sweden" +/// ``` +#[get("/search")] +pub async fn search( + hbs: web::Data>, + req: HttpRequest, + config: web::Data, + cache: web::Data, +) -> Result> { + let params = web::Query::::from_query(req.query_string())?; + match ¶ms.q { + Some(query) => { + if query.trim().is_empty() { + return Ok(HttpResponse::Found() + .insert_header(("location", "/")) + .finish()); + } + let page = match ¶ms.page { + Some(page) => *page, + None => 1, + }; + + let (_, results, _) = join!( + results( + format!( + "http://{}:{}/search?q={}&page={}&safesearch=", + config.binding_ip, + config.port, + query, + page - 1, + ), + &config, + &cache, + query, + page - 1, + req.clone(), + ¶ms.safesearch + ), + results( + format!( + "http://{}:{}/search?q={}&page={}&safesearch=", + config.binding_ip, config.port, query, page + ), + &config, + &cache, + query, + page, + req.clone(), + ¶ms.safesearch + ), + results( + format!( + "http://{}:{}/search?q={}&page={}&safesearch=", + config.binding_ip, + config.port, + query, + page + 1, + ), + &config, + &cache, + query, + page + 1, + req.clone(), + ¶ms.safesearch + ) + ); + + let page_content: String = hbs.render("search", &results?)?; + Ok(HttpResponse::Ok().body(page_content)) + } + None => Ok(HttpResponse::Found() + .insert_header(("location", "/")) + .finish()), + } +} + +/// Fetches the results for a query and page. It First checks the redis cache, if that +/// fails it gets proper results by requesting from the upstream search engines. +/// +/// # Arguments +/// +/// * `url` - It takes the url of the current page that requested the search results for a +/// particular search query. +/// * `config` - It takes a parsed config struct. +/// * `query` - It takes the page number as u32 value. +/// * `req` - It takes the `HttpRequest` struct as a value. +/// +/// # Error +/// +/// It returns the `SearchResults` struct if the search results could be successfully fetched from +/// the cache or from the upstream search engines otherwise it returns an appropriate error. +async fn results( + url: String, + config: &Config, + cache: &web::Data, + query: &str, + page: u32, + req: HttpRequest, + safe_search: &Option, +) -> Result> { + // fetch the cached results json. + let cached_results = cache.cached_json(&url).await; + // check if fetched cache results was indeed fetched or it was an error and if so + // handle the data accordingly. + match cached_results { + Ok(results) => Ok(results), + Err(_) => { + let mut safe_search_level: u8 = match config.safe_search { + 3..=4 => config.safe_search, + _ => match safe_search { + Some(safesearch) => match safesearch { + 0..=2 => *safesearch, + _ => config.safe_search, + }, + None => config.safe_search, + }, + }; + + if safe_search_level == 4 { + let mut results: SearchResults = SearchResults::default(); + let mut _flag: bool = + is_match_from_filter_list(file_path(FileType::BlockList)?, query)?; + _flag = !is_match_from_filter_list(file_path(FileType::AllowList)?, query)?; + + if _flag { + results.set_disallowed(); + results.add_style(&config.style); + results.set_page_query(query); + cache.cache_results(&results, &url).await?; + results.set_safe_search_level(safe_search_level); + return Ok(results); + } + } + + // check if the cookie value is empty or not if it is empty then use the + // default selected upstream search engines from the config file otherwise + // parse the non-empty cookie and grab the user selected engines from the + // UI and use that. + let mut results: SearchResults = match req.cookie("appCookie") { + Some(cookie_value) => { + let cookie_value: Cookie<'_> = + serde_json::from_str(cookie_value.name_value().1)?; + + let engines: Vec = cookie_value + .engines + .iter() + .filter_map(|name| EngineHandler::new(name)) + .collect(); + + safe_search_level = match config.safe_search { + 3..=4 => config.safe_search, + _ => match safe_search { + Some(safesearch) => match safesearch { + 0..=2 => *safesearch, + _ => config.safe_search, + }, + None => cookie_value.safe_search_level, + }, + }; + + match engines.is_empty() { + false => { + aggregate( + query, + page, + config.aggregator.random_delay, + config.debug, + &engines, + config.request_timeout, + safe_search_level, + ) + .await? + } + true => { + let mut search_results = SearchResults::default(); + search_results.set_no_engines_selected(); + search_results.set_page_query(query); + search_results + } + } + } + None => { + aggregate( + query, + page, + config.aggregator.random_delay, + config.debug, + &config.upstream_search_engines, + config.request_timeout, + safe_search_level, + ) + .await? + } + }; + if results.engine_errors_info().is_empty() + && results.results().is_empty() + && !results.no_engines_selected() + { + results.set_filtered(); + } + results.add_style(&config.style); + cache + .cache_results(&results, &(format!("{url}{safe_search_level}"))) + .await?; + results.set_safe_search_level(safe_search_level); + Ok(results) + } + } +} + +/// A helper function which checks whether the search query contains any keywords which should be +/// disallowed/allowed based on the regex based rules present in the blocklist and allowlist files. +/// +/// # Arguments +/// +/// * `file_path` - It takes the file path of the list as the argument. +/// * `query` - It takes the search query to be checked against the list as an argument. +/// +/// # Error +/// +/// Returns a bool indicating whether the results were found in the list or not on success +/// otherwise returns a standard error type on a failure. +fn is_match_from_filter_list( + file_path: &str, + query: &str, +) -> Result> { + let mut flag = false; + let mut reader = BufReader::new(File::open(file_path)?); + for line in reader.by_ref().lines() { + let re = Regex::new(&line?)?; + if re.is_match(query) { + flag = true; + break; + } + } + Ok(flag) +} diff --git a/tests/index.rs b/tests/index.rs index e3059bf..91d0814 100644 --- a/tests/index.rs +++ b/tests/index.rs @@ -1,15 +1,21 @@ use std::net::TcpListener; use handlebars::Handlebars; -use websurfx::{config_parser::parser::Config, run}; +use websurfx::{config::parser::Config, run}; // Starts a new instance of the HTTP server, bound to a random available port fn spawn_app() -> String { // Binding to port 0 will trigger the OS to assign a port for us. let listener = TcpListener::bind("127.0.0.1:0").expect("Failed to bind random port"); let port = listener.local_addr().unwrap().port(); - let config = Config::parse().unwrap(); - let server = run(listener, config).expect("Failed to bind address"); + let config = Config::parse(false).unwrap(); + let server = run( + listener, + config, + #[cfg(all(feature = "memory-cache", not(feature = "redis-cache")))] + websurfx::cache::cacher::Cache::new_in_memory(), + ) + .expect("Failed to bind address"); tokio::spawn(server); format!("http://127.0.0.1:{}/", port) @@ -36,10 +42,10 @@ async fn test_index() { assert_eq!(res.status(), 200); let handlebars = handlebars(); - let config = Config::parse().unwrap(); + let config = Config::parse(true).unwrap(); let template = handlebars.render("index", &config.style).unwrap(); assert_eq!(res.text().await.unwrap(), template); } -// TODO: Write tests for tesing parameters for search function that if provided with something +// TODO: Write tests for testing parameters for search function that if provided with something // other than u32 like alphabets and special characters than it should panic diff --git a/websurfx/allowlist.txt b/websurfx/allowlist.txt new file mode 100644 index 0000000..e69de29 diff --git a/websurfx/blocklist.txt b/websurfx/blocklist.txt new file mode 100644 index 0000000..e69de29 diff --git a/websurfx/config.lua b/websurfx/config.lua index 916a9b3..09b418d 100644 --- a/websurfx/config.lua +++ b/websurfx/config.lua @@ -1,21 +1,55 @@ --- Server -port = "8080" -- port on which server should be launched -binding_ip_addr = "127.0.0.1" --ip address on the which server should be launched. +-- ### General ### +logging = true -- an option to enable or disable logs. +debug = false -- an option to enable or disable debug mode. +threads = 10 -- the amount of threads that the app will use to run (the value should be greater than 0). --- Website +-- ### Server ### +port = "8080" -- port on which server should be launched +binding_ip = "127.0.0.1" --ip address on the which server should be launched. +production_use = false -- whether to use production mode or not (in other words this option should be used if it is to be used to host it on the server to provide a service to a large number of users (more than one)) +-- if production_use is set to true +-- There will be a random delay before sending the request to the search engines, this is to prevent DDoSing the upstream search engines from a large number of simultaneous requests. +request_timeout = 30 -- timeout for the search requests sent to the upstream search engines to be fetched (value in seconds). +rate_limiter = { + number_of_requests = 20, -- The number of request that are allowed within a provided time limit. + time_limit = 3, -- The time limit in which the quantity of requests that should be accepted. +} + +-- ### Search ### +-- Filter results based on different levels. The levels provided are: +-- {{ +-- 0 - None +-- 1 - Low +-- 2 - Moderate +-- 3 - High +-- 4 - Aggressive +-- }} +safe_search = 2 + +-- ### Website ### -- The different colorschemes provided are: -- {{ -- catppuccin-mocha +-- dark-chocolate -- dracula +-- gruvbox-dark -- monokai -- nord -- oceanic-next +-- one-dark -- solarized-dark -- solarized-light +-- tokyo-night -- tomorrow-night -- }} colorscheme = "catppuccin-mocha" -- the colorscheme name which should be used for the website theme theme = "simple" -- the theme name which should be used for the website --- Caching -redis_connection_url = "redis://127.0.0.1:8082" -- redis connection url address on which the client should connect on. +-- ### Caching ### +redis_url = "redis://127.0.0.1:8082" -- redis connection url address on which the client should connect on. + +-- ### Search Engines ### +upstream_search_engines = { + DuckDuckGo = true, + Searx = false, +} -- select the upstream search engines from which the results should be fetched.