From 44216e4d4c379df39532424841a842f494eac647 Mon Sep 17 00:00:00 2001 From: neon_arch Date: Wed, 23 Aug 2023 13:11:09 +0300 Subject: [PATCH] =?UTF-8?q?=E2=9C=A8=20feat:=20optimise=20search=20results?= =?UTF-8?q?=20filtering=20code=20(#163)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/results/aggregator.rs | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/src/results/aggregator.rs b/src/results/aggregator.rs index 8e92b10..23ed091 100644 --- a/src/results/aggregator.rs +++ b/src/results/aggregator.rs @@ -1,7 +1,11 @@ //! This module provides the functionality to scrape and gathers all the results from the upstream //! search engines and then removes duplicate results. -use std::{collections::HashMap, io::BufReader, time::Duration}; +use std::{ + collections::HashMap, + io::{BufReader, Read}, + time::Duration, +}; use super::{ aggregation_models::{EngineErrorInfo, SearchResult, SearchResults}, @@ -176,10 +180,10 @@ fn filter_with_lists( resultant_map: &mut HashMap, file_path: &str, ) -> Result<(), Box> { - for (url, search_result) in map_to_be_filtered.clone().into_iter() { - let reader = BufReader::new(File::open(file_path)?); - for line in reader.lines() { - let re = Regex::new(&line?)?; + let mut reader = BufReader::new(File::open(file_path)?); + for line in reader.by_ref().lines() { + let re = Regex::new(&line?)?; + for (url, search_result) in map_to_be_filtered.clone().into_iter() { if re.is_match(&url.to_lowercase()) || re.is_match(&search_result.title.to_lowercase()) || re.is_match(&search_result.description.to_lowercase())