Kekma neon_arch alamin655 commited on
Commit
c584a7d
1 Parent(s): ce4912b

✨ feat(config): config option to keep `tcp` connection alive for a certain period for subsequent requests (#548)

Browse files

* Added new HTTP connection setting to the reqwest::ClientBuilder to timeout requests for fetching the search results from the upstream search engines.

* Provided a config option under the server section of the config to allow users to keep tcp connections alive for each request for a certain period of time

* Update src/results/aggregator.rs

Co-authored-by: neon_arch <mustafadhuleb53@gmail.com>

* Update src/results/aggregator.rs

Co-authored-by: neon_arch <mustafadhuleb53@gmail.com>

* Fixed import issue in `tcp_connection_keepalive`

* updated size to u8

Co-authored-by: neon_arch <mustafadhuleb53@gmail.com>

* Fixed sizer eror in `parser.rs`

---------

Co-authored-by: neon_arch <mustafadhuleb53@gmail.com>
Co-authored-by: alamin655 <129589283+alamin655@users.noreply.github.com>

src/config/parser.rs CHANGED
@@ -42,6 +42,8 @@ pub struct Config {
42
  /// It stores the level of safe search to be used for restricting content in the
43
  /// search results.
44
  pub safe_search: u8,
 
 
45
  }
46
 
47
  impl Config {
@@ -131,6 +133,7 @@ impl Config {
131
  upstream_search_engines: globals
132
  .get::<_, HashMap<String, bool>>("upstream_search_engines")?,
133
  request_timeout: globals.get::<_, u8>("request_timeout")?,
 
134
  threads,
135
  rate_limiter: RateLimiter {
136
  number_of_requests: rate_limiter["number_of_requests"],
 
42
  /// It stores the level of safe search to be used for restricting content in the
43
  /// search results.
44
  pub safe_search: u8,
45
+ /// It stores the TCP connection keepalive duration in seconds.
46
+ pub tcp_connection_keepalive: u8,
47
  }
48
 
49
  impl Config {
 
133
  upstream_search_engines: globals
134
  .get::<_, HashMap<String, bool>>("upstream_search_engines")?,
135
  request_timeout: globals.get::<_, u8>("request_timeout")?,
136
+ tcp_connection_keepalive: globals.get::<_, u8>("tcp_connection_keepalive")?,
137
  threads,
138
  rate_limiter: RateLimiter {
139
  number_of_requests: rate_limiter["number_of_requests"],
src/results/aggregator.rs CHANGED
@@ -77,6 +77,7 @@ pub async fn aggregate(
77
  let client = CLIENT.get_or_init(|| {
78
  ClientBuilder::new()
79
  .timeout(Duration::from_secs(config.request_timeout as u64)) // Add timeout to request to avoid DDOSing the server
 
80
  .connect_timeout(Duration::from_secs(config.request_timeout as u64)) // Add timeout to request to avoid DDOSing the server
81
  .https_only(true)
82
  .gzip(true)
 
77
  let client = CLIENT.get_or_init(|| {
78
  ClientBuilder::new()
79
  .timeout(Duration::from_secs(config.request_timeout as u64)) // Add timeout to request to avoid DDOSing the server
80
+ .tcp_keepalive(Duration::from_secs(config.tcp_connection_keepalive as u64))
81
  .connect_timeout(Duration::from_secs(config.request_timeout as u64)) // Add timeout to request to avoid DDOSing the server
82
  .https_only(true)
83
  .gzip(true)
websurfx/config.lua CHANGED
@@ -10,6 +10,7 @@ production_use = false -- whether to use production mode or not (in other words
10
  -- if production_use is set to true
11
  -- There will be a random delay before sending the request to the search engines, this is to prevent DDoSing the upstream search engines from a large number of simultaneous requests.
12
  request_timeout = 30 -- timeout for the search requests sent to the upstream search engines to be fetched (value in seconds).
 
13
  rate_limiter = {
14
  number_of_requests = 20, -- The number of request that are allowed within a provided time limit.
15
  time_limit = 3, -- The time limit in which the quantity of requests that should be accepted.
 
10
  -- if production_use is set to true
11
  -- There will be a random delay before sending the request to the search engines, this is to prevent DDoSing the upstream search engines from a large number of simultaneous requests.
12
  request_timeout = 30 -- timeout for the search requests sent to the upstream search engines to be fetched (value in seconds).
13
+ tcp_connection_keepalive = 30 -- the amount of time the tcp connection should remain alive (or connected to the server). (value in seconds).
14
  rate_limiter = {
15
  number_of_requests = 20, -- The number of request that are allowed within a provided time limit.
16
  time_limit = 3, -- The time limit in which the quantity of requests that should be accepted.