0
0
mirror of https://github.com/neon-mmd/websurfx.git synced 2024-11-21 21:48:21 -05:00

Compare commits

...

5 Commits

Author SHA1 Message Date
Scott
b1333aba24
Merge 31ccc4f301 into 669e365913 2024-01-30 18:16:21 +00:00
Scott
31ccc4f301 ci: adding in steps to regenerate the Cargo.lock file - fixing commit issue with cargo.toml 2024-01-30 13:16:04 -05:00
Scott
df6378ba58 ci: adding in steps to regenerate the Cargo.lock file 2024-01-30 13:03:07 -05:00
alamin655
4cb656e863
Merge branch 'rolling' into FEATURE/400_GitHubActions-automate-release-versions-PR 2024-01-30 19:36:51 +05:30
Jann Marc Villablanca
669e365913
feat: add new helper function to fetch upstream search engine JSON response (#504)
Co-authored-by: neon_arch <mustafadhuleb53@gmail.com>
2024-01-30 13:37:50 +00:00
2 changed files with 43 additions and 3 deletions

View File

@ -19,7 +19,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@v3 uses: actions/checkout@v4
with: with:
ref: ${{ github.sha }} ref: ${{ github.sha }}
fetch-depth: 0 fetch-depth: 0
@ -37,12 +37,16 @@ jobs:
branch: update-from-${{ github.sha }} branch: update-from-${{ github.sha }}
- name: update cargo.toml - name: update cargo.toml
run: | run: |
appversion=$(echo "${{ steps.version-bump.outputs.new_tag }}" | sed 's/[v]//') appversion=$(echo "${{ steps.version-bump.outputs.new_tag }}" | grep -oE '[0-9]+\.[0-9]+\.[0-9]+')
sed -i -e "s/^version = .*/version = \"$appversion\"/" Cargo.toml sed -i -e "s/^version = .*/version = \"$appversion\"/" Cargo.toml
- run: rustup toolchain install stable --profile minimal
- run: rustup update stable && rustup default stable
- name: regenerate cargo.lock
run: cargo generate-lockfile
- name: auto commit - name: auto commit
uses: stefanzweifel/git-auto-commit-action@v5 uses: stefanzweifel/git-auto-commit-action@v5
with: with:
commit_message: "[skip ci] update Cargo.toml to ${{ steps.version-bump.outputs.new_tag }}" commit_message: "[skip ci] updating app version to ${{ steps.version-bump.outputs.new_tag }}"
branch: update-from-${{ github.sha }} branch: update-from-${{ github.sha }}
# create PR using GitHub CLI # create PR using GitHub CLI
- name: create PR with update info - name: create PR with update info

View File

@ -86,6 +86,42 @@ pub trait SearchEngine: Sync + Send {
.change_context(EngineError::RequestError)?) .change_context(EngineError::RequestError)?)
} }
/// This helper function fetches/requests the json search results from the upstream search engine as a vector of bytes.
///
/// # Arguments
///
/// * `url` - It takes the url of the upstream search engine with the user requested search
/// query appended in the search parameters.
/// * `header_map` - It takes the http request headers to be sent to the upstream engine in
/// order to prevent being detected as a bot. It takes the header as a HeaderMap type.
/// * `request_timeout` - It takes the request timeout value as seconds which is used to limit
/// the amount of time for each request to remain connected when until the results can be provided
/// by the upstream engine.
///
/// # Error
///
/// It returns the html data as a vector of bytes if the upstream engine provides the data as expected
/// otherwise it returns a custom `EngineError`.
async fn fetch_json_as_bytes_from_upstream(
&self,
url: &str,
header_map: reqwest::header::HeaderMap,
client: &Client,
) -> Result<Vec<u8>, EngineError> {
// fetch the json response from upstream search engine
Ok(client
.get(url)
.headers(header_map) // add spoofed headers to emulate human behavior
.send()
.await
.change_context(EngineError::RequestError)?
.bytes()
.await
.change_context(EngineError::RequestError)?
.to_vec())
}
/// This function scrapes results from the upstream engine and puts all the scraped results like /// This function scrapes results from the upstream engine and puts all the scraped results like
/// title, visiting_url (href in html),engine (from which engine it was fetched from) and description /// title, visiting_url (href in html),engine (from which engine it was fetched from) and description
/// in a RawSearchResult and then adds that to HashMap whose keys are url and values are RawSearchResult /// in a RawSearchResult and then adds that to HashMap whose keys are url and values are RawSearchResult