|
|
|
|
|
use crate::{ |
|
cache::cacher::SharedCache, |
|
config::parser::Config, |
|
handler::paths::{file_path, FileType}, |
|
models::{ |
|
aggregation_models::SearchResults, |
|
engine_models::EngineHandler, |
|
server_models::{Cookie, SearchParams}, |
|
}, |
|
results::aggregator::aggregate, |
|
}; |
|
use actix_web::{get, web, HttpRequest, HttpResponse}; |
|
use handlebars::Handlebars; |
|
use regex::Regex; |
|
use std::{ |
|
fs::File, |
|
io::{BufRead, BufReader, Read}, |
|
}; |
|
use tokio::join; |
|
|
|
|
|
|
|
pub async fn not_found( |
|
hbs: web::Data<Handlebars<'_>>, |
|
config: web::Data<Config>, |
|
) -> Result<HttpResponse, Box<dyn std::error::Error>> { |
|
let page_content: String = hbs.render("404", &config.style)?; |
|
|
|
Ok(HttpResponse::Ok() |
|
.content_type("text/html; charset=utf-8") |
|
.body(page_content)) |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
#[get("/search")] |
|
pub async fn search( |
|
hbs: web::Data<Handlebars<'_>>, |
|
req: HttpRequest, |
|
config: web::Data<Config>, |
|
cache: web::Data<SharedCache>, |
|
) -> Result<HttpResponse, Box<dyn std::error::Error>> { |
|
let params = web::Query::<SearchParams>::from_query(req.query_string())?; |
|
match ¶ms.q { |
|
Some(query) => { |
|
if query.trim().is_empty() { |
|
return Ok(HttpResponse::Found() |
|
.insert_header(("location", "/")) |
|
.finish()); |
|
} |
|
let page = match ¶ms.page { |
|
Some(page) => *page, |
|
None => 1, |
|
}; |
|
|
|
let (_, results, _) = join!( |
|
results( |
|
format!( |
|
"http://{}:{}/search?q={}&page={}&safesearch=", |
|
config.binding_ip, |
|
config.port, |
|
query, |
|
page - 1, |
|
), |
|
&config, |
|
&cache, |
|
query, |
|
page - 1, |
|
req.clone(), |
|
¶ms.safesearch |
|
), |
|
results( |
|
format!( |
|
"http://{}:{}/search?q={}&page={}&safesearch=", |
|
config.binding_ip, config.port, query, page |
|
), |
|
&config, |
|
&cache, |
|
query, |
|
page, |
|
req.clone(), |
|
¶ms.safesearch |
|
), |
|
results( |
|
format!( |
|
"http://{}:{}/search?q={}&page={}&safesearch=", |
|
config.binding_ip, |
|
config.port, |
|
query, |
|
page + 1, |
|
), |
|
&config, |
|
&cache, |
|
query, |
|
page + 1, |
|
req.clone(), |
|
¶ms.safesearch |
|
) |
|
); |
|
|
|
let page_content: String = hbs.render("search", &results?)?; |
|
Ok(HttpResponse::Ok().body(page_content)) |
|
} |
|
None => Ok(HttpResponse::Found() |
|
.insert_header(("location", "/")) |
|
.finish()), |
|
} |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async fn results( |
|
url: String, |
|
config: &Config, |
|
cache: &web::Data<SharedCache>, |
|
query: &str, |
|
page: u32, |
|
req: HttpRequest, |
|
safe_search: &Option<u8>, |
|
) -> Result<SearchResults, Box<dyn std::error::Error>> { |
|
|
|
let cached_results = cache.cached_json(&url).await; |
|
|
|
|
|
match cached_results { |
|
Ok(results) => Ok(results), |
|
Err(_) => { |
|
let mut safe_search_level: u8 = match config.safe_search { |
|
3..=4 => config.safe_search, |
|
_ => match safe_search { |
|
Some(safesearch) => match safesearch { |
|
0..=2 => *safesearch, |
|
_ => config.safe_search, |
|
}, |
|
None => config.safe_search, |
|
}, |
|
}; |
|
|
|
if safe_search_level == 4 { |
|
let mut results: SearchResults = SearchResults::default(); |
|
let mut _flag: bool = |
|
is_match_from_filter_list(file_path(FileType::BlockList)?, query)?; |
|
_flag = !is_match_from_filter_list(file_path(FileType::AllowList)?, query)?; |
|
|
|
if _flag { |
|
results.set_disallowed(); |
|
results.add_style(&config.style); |
|
results.set_page_query(query); |
|
cache.cache_results(&results, &url).await?; |
|
results.set_safe_search_level(safe_search_level); |
|
return Ok(results); |
|
} |
|
} |
|
|
|
|
|
|
|
|
|
|
|
let mut results: SearchResults = match req.cookie("appCookie") { |
|
Some(cookie_value) => { |
|
let cookie_value: Cookie<'_> = |
|
serde_json::from_str(cookie_value.name_value().1)?; |
|
|
|
let engines: Vec<EngineHandler> = cookie_value |
|
.engines |
|
.iter() |
|
.filter_map(|name| EngineHandler::new(name)) |
|
.collect(); |
|
|
|
safe_search_level = match config.safe_search { |
|
3..=4 => config.safe_search, |
|
_ => match safe_search { |
|
Some(safesearch) => match safesearch { |
|
0..=2 => *safesearch, |
|
_ => config.safe_search, |
|
}, |
|
None => cookie_value.safe_search_level, |
|
}, |
|
}; |
|
|
|
match engines.is_empty() { |
|
false => { |
|
aggregate( |
|
query, |
|
page, |
|
config.aggregator.random_delay, |
|
config.debug, |
|
&engines, |
|
config.request_timeout, |
|
safe_search_level, |
|
) |
|
.await? |
|
} |
|
true => { |
|
let mut search_results = SearchResults::default(); |
|
search_results.set_no_engines_selected(); |
|
search_results.set_page_query(query); |
|
search_results |
|
} |
|
} |
|
} |
|
None => { |
|
aggregate( |
|
query, |
|
page, |
|
config.aggregator.random_delay, |
|
config.debug, |
|
&config.upstream_search_engines, |
|
config.request_timeout, |
|
safe_search_level, |
|
) |
|
.await? |
|
} |
|
}; |
|
if results.engine_errors_info().is_empty() |
|
&& results.results().is_empty() |
|
&& !results.no_engines_selected() |
|
{ |
|
results.set_filtered(); |
|
} |
|
results.add_style(&config.style); |
|
cache |
|
.cache_results(&results, &(format!("{url}{safe_search_level}"))) |
|
.await?; |
|
results.set_safe_search_level(safe_search_level); |
|
Ok(results) |
|
} |
|
} |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
fn is_match_from_filter_list( |
|
file_path: &str, |
|
query: &str, |
|
) -> Result<bool, Box<dyn std::error::Error>> { |
|
let mut flag = false; |
|
let mut reader = BufReader::new(File::open(file_path)?); |
|
for line in reader.by_ref().lines() { |
|
let re = Regex::new(&line?)?; |
|
if re.is_match(query) { |
|
flag = true; |
|
break; |
|
} |
|
} |
|
Ok(flag) |
|
} |
|
|