Spaces:
Running
Running
ddotthomas
commited on
✨ feat(config): option to keep the `websurfx` server connection alive for a certain period for subsequent requests (#568)
Browse files- src/config/parser.rs +5 -2
- src/lib.rs +5 -1
- src/results/aggregator.rs +1 -1
- websurfx/config.lua +3 -1
src/config/parser.rs
CHANGED
@@ -37,13 +37,15 @@ pub struct Config {
|
|
37 |
pub request_timeout: u8,
|
38 |
/// It stores the number of threads which controls the app will use to run.
|
39 |
pub threads: u8,
|
|
|
|
|
40 |
/// It stores configuration options for the ratelimiting middleware.
|
41 |
pub rate_limiter: RateLimiter,
|
42 |
/// It stores the level of safe search to be used for restricting content in the
|
43 |
/// search results.
|
44 |
pub safe_search: u8,
|
45 |
/// It stores the TCP connection keepalive duration in seconds.
|
46 |
-
pub
|
47 |
/// It stores the pool idle connection timeout in seconds.
|
48 |
pub pool_idle_connection_timeout: u8,
|
49 |
}
|
@@ -135,9 +137,10 @@ impl Config {
|
|
135 |
upstream_search_engines: globals
|
136 |
.get::<_, HashMap<String, bool>>("upstream_search_engines")?,
|
137 |
request_timeout: globals.get::<_, u8>("request_timeout")?,
|
138 |
-
|
139 |
pool_idle_connection_timeout: globals.get::<_, u8>("pool_idle_connection_timeout")?,
|
140 |
threads,
|
|
|
141 |
rate_limiter: RateLimiter {
|
142 |
number_of_requests: rate_limiter["number_of_requests"],
|
143 |
time_limit: rate_limiter["time_limit"],
|
|
|
37 |
pub request_timeout: u8,
|
38 |
/// It stores the number of threads which controls the app will use to run.
|
39 |
pub threads: u8,
|
40 |
+
/// Set the keep-alive time for client connections to the HTTP server
|
41 |
+
pub client_connection_keep_alive: u8,
|
42 |
/// It stores configuration options for the ratelimiting middleware.
|
43 |
pub rate_limiter: RateLimiter,
|
44 |
/// It stores the level of safe search to be used for restricting content in the
|
45 |
/// search results.
|
46 |
pub safe_search: u8,
|
47 |
/// It stores the TCP connection keepalive duration in seconds.
|
48 |
+
pub tcp_connection_keep_alive: u8,
|
49 |
/// It stores the pool idle connection timeout in seconds.
|
50 |
pub pool_idle_connection_timeout: u8,
|
51 |
}
|
|
|
137 |
upstream_search_engines: globals
|
138 |
.get::<_, HashMap<String, bool>>("upstream_search_engines")?,
|
139 |
request_timeout: globals.get::<_, u8>("request_timeout")?,
|
140 |
+
tcp_connection_keep_alive: globals.get::<_, u8>("tcp_connection_keep_alive")?,
|
141 |
pool_idle_connection_timeout: globals.get::<_, u8>("pool_idle_connection_timeout")?,
|
142 |
threads,
|
143 |
+
client_connection_keep_alive: globals.get::<_, u8>("client_connection_keep_alive")?,
|
144 |
rate_limiter: RateLimiter {
|
145 |
number_of_requests: rate_limiter["number_of_requests"],
|
146 |
time_limit: rate_limiter["time_limit"],
|
src/lib.rs
CHANGED
@@ -14,7 +14,7 @@ pub mod results;
|
|
14 |
pub mod server;
|
15 |
pub mod templates;
|
16 |
|
17 |
-
use std::{net::TcpListener, sync::OnceLock};
|
18 |
|
19 |
use crate::server::router;
|
20 |
|
@@ -113,6 +113,10 @@ pub fn run(
|
|
113 |
.default_service(web::route().to(router::not_found)) // error page
|
114 |
})
|
115 |
.workers(config.threads as usize)
|
|
|
|
|
|
|
|
|
116 |
// Start server on 127.0.0.1 with the user provided port number. for example 127.0.0.1:8080.
|
117 |
.listen(listener)?
|
118 |
.run();
|
|
|
14 |
pub mod server;
|
15 |
pub mod templates;
|
16 |
|
17 |
+
use std::{net::TcpListener, sync::OnceLock, time::Duration};
|
18 |
|
19 |
use crate::server::router;
|
20 |
|
|
|
113 |
.default_service(web::route().to(router::not_found)) // error page
|
114 |
})
|
115 |
.workers(config.threads as usize)
|
116 |
+
// Set the keep-alive timer for client connections
|
117 |
+
.keep_alive(Duration::from_secs(
|
118 |
+
config.client_connection_keep_alive as u64,
|
119 |
+
))
|
120 |
// Start server on 127.0.0.1 with the user provided port number. for example 127.0.0.1:8080.
|
121 |
.listen(listener)?
|
122 |
.run();
|
src/results/aggregator.rs
CHANGED
@@ -81,7 +81,7 @@ pub async fn aggregate(
|
|
81 |
.pool_idle_timeout(Duration::from_secs(
|
82 |
config.pool_idle_connection_timeout as u64,
|
83 |
))
|
84 |
-
.tcp_keepalive(Duration::from_secs(config.
|
85 |
.connect_timeout(Duration::from_secs(config.request_timeout as u64)) // Add timeout to request to avoid DDOSing the server
|
86 |
.https_only(true)
|
87 |
.gzip(true)
|
|
|
81 |
.pool_idle_timeout(Duration::from_secs(
|
82 |
config.pool_idle_connection_timeout as u64,
|
83 |
))
|
84 |
+
.tcp_keepalive(Duration::from_secs(config.tcp_connection_keep_alive as u64))
|
85 |
.connect_timeout(Duration::from_secs(config.request_timeout as u64)) // Add timeout to request to avoid DDOSing the server
|
86 |
.https_only(true)
|
87 |
.gzip(true)
|
websurfx/config.lua
CHANGED
@@ -10,7 +10,7 @@ production_use = false -- whether to use production mode or not (in other words
|
|
10 |
-- if production_use is set to true
|
11 |
-- There will be a random delay before sending the request to the search engines, this is to prevent DDoSing the upstream search engines from a large number of simultaneous requests.
|
12 |
request_timeout = 30 -- timeout for the search requests sent to the upstream search engines to be fetched (value in seconds).
|
13 |
-
|
14 |
pool_idle_connection_timeout = 30 -- timeout for the idle connections in the reqwest HTTP connection pool (value in seconds).
|
15 |
rate_limiter = {
|
16 |
number_of_requests = 20, -- The number of request that are allowed within a provided time limit.
|
@@ -18,6 +18,8 @@ rate_limiter = {
|
|
18 |
}
|
19 |
-- Set whether the server will use an adaptive/dynamic HTTPS window size, see https://httpwg.org/specs/rfc9113.html#fc-principles
|
20 |
https_adaptive_window_size = false
|
|
|
|
|
21 |
|
22 |
-- ### Search ###
|
23 |
-- Filter results based on different levels. The levels provided are:
|
|
|
10 |
-- if production_use is set to true
|
11 |
-- There will be a random delay before sending the request to the search engines, this is to prevent DDoSing the upstream search engines from a large number of simultaneous requests.
|
12 |
request_timeout = 30 -- timeout for the search requests sent to the upstream search engines to be fetched (value in seconds).
|
13 |
+
tcp_connection_keep_alive = 30 -- the amount of time the tcp connection should remain alive to the upstream search engines (or connected to the server). (value in seconds).
|
14 |
pool_idle_connection_timeout = 30 -- timeout for the idle connections in the reqwest HTTP connection pool (value in seconds).
|
15 |
rate_limiter = {
|
16 |
number_of_requests = 20, -- The number of request that are allowed within a provided time limit.
|
|
|
18 |
}
|
19 |
-- Set whether the server will use an adaptive/dynamic HTTPS window size, see https://httpwg.org/specs/rfc9113.html#fc-principles
|
20 |
https_adaptive_window_size = false
|
21 |
+
-- Set keep-alive timer in seconds; keeps clients connected to the HTTP server, different from the connection to upstream search engines
|
22 |
+
client_connection_keep_alive = 120
|
23 |
|
24 |
-- ### Search ###
|
25 |
-- Filter results based on different levels. The levels provided are:
|