neon_arch
commited on
Commit
•
b170574
1
Parent(s):
09227d8
✨ feat: rename functions (#163)
Browse files- src/lib.rs +2 -2
- src/server/routes.rs +2 -2
src/lib.rs
CHANGED
@@ -17,7 +17,7 @@ use actix_files as fs;
|
|
17 |
use actix_web::{dev::Server, http::header, middleware::Logger, web, App, HttpServer};
|
18 |
use config::parser::Config;
|
19 |
use handlebars::Handlebars;
|
20 |
-
use handler::
|
21 |
|
22 |
/// Runs the web server on the provided TCP listener and returns a `Server` instance.
|
23 |
///
|
@@ -42,7 +42,7 @@ use handler::public_paths::public_path;
|
|
42 |
pub fn run(listener: TcpListener, config: Config) -> std::io::Result<Server> {
|
43 |
let mut handlebars: Handlebars = Handlebars::new();
|
44 |
|
45 |
-
let public_folder_path: String =
|
46 |
|
47 |
handlebars
|
48 |
.register_templates_directory(".html", format!("{}/templates", public_folder_path))
|
|
|
17 |
use actix_web::{dev::Server, http::header, middleware::Logger, web, App, HttpServer};
|
18 |
use config::parser::Config;
|
19 |
use handlebars::Handlebars;
|
20 |
+
use handler::paths::{file_path, FileType};
|
21 |
|
22 |
/// Runs the web server on the provided TCP listener and returns a `Server` instance.
|
23 |
///
|
|
|
42 |
pub fn run(listener: TcpListener, config: Config) -> std::io::Result<Server> {
|
43 |
let mut handlebars: Handlebars = Handlebars::new();
|
44 |
|
45 |
+
let public_folder_path: String = file_path(FileType::Theme)?;
|
46 |
|
47 |
handlebars
|
48 |
.register_templates_directory(".html", format!("{}/templates", public_folder_path))
|
src/server/routes.rs
CHANGED
@@ -8,7 +8,7 @@ use crate::{
|
|
8 |
cache::cacher::RedisCache,
|
9 |
config::parser::Config,
|
10 |
engines::engine_models::EngineHandler,
|
11 |
-
handler::
|
12 |
results::{aggregation_models::SearchResults, aggregator::aggregate},
|
13 |
};
|
14 |
use actix_web::{get, web, HttpRequest, HttpResponse};
|
@@ -215,7 +215,7 @@ async fn results(
|
|
215 |
/// Handles the route of robots.txt page of the `websurfx` meta search engine website.
|
216 |
#[get("/robots.txt")]
|
217 |
pub async fn robots_data(_req: HttpRequest) -> Result<HttpResponse, Box<dyn std::error::Error>> {
|
218 |
-
let page_content: String = read_to_string(format!("{}/robots.txt",
|
219 |
Ok(HttpResponse::Ok()
|
220 |
.content_type("text/plain; charset=ascii")
|
221 |
.body(page_content))
|
|
|
8 |
cache::cacher::RedisCache,
|
9 |
config::parser::Config,
|
10 |
engines::engine_models::EngineHandler,
|
11 |
+
handler::paths::{file_path, FileType},
|
12 |
results::{aggregation_models::SearchResults, aggregator::aggregate},
|
13 |
};
|
14 |
use actix_web::{get, web, HttpRequest, HttpResponse};
|
|
|
215 |
/// Handles the route of robots.txt page of the `websurfx` meta search engine website.
|
216 |
#[get("/robots.txt")]
|
217 |
pub async fn robots_data(_req: HttpRequest) -> Result<HttpResponse, Box<dyn std::error::Error>> {
|
218 |
+
let page_content: String = read_to_string(format!("{}/robots.txt", file_path(FileType::Theme)?))?;
|
219 |
Ok(HttpResponse::Ok()
|
220 |
.content_type("text/plain; charset=ascii")
|
221 |
.body(page_content))
|