Kekma neon_arch mergify[bot] commited on
Commit
2a4dd07
·
unverified ·
1 Parent(s): 193b4e3

✨ feat: config option to configure the number of connections to have in the `reqwest` pool (#552)

Browse files

* 🔧 Config: Updated `number_of_https_connections` for the HTTPS connection pool.

* (feat) : added the max idle pool per host option

* :memo: docs(config): add documentation back to the `http_adaptive_window_size` option (#532)

---------

Co-authored-by: neon_arch <[email protected]>
Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com>

src/cache/cacher.rs CHANGED
@@ -546,7 +546,7 @@ impl SharedCache {
546
  /// # Arguments
547
  ///
548
  /// * `url` - It takes the search url as an argument which will be used as the key to fetch the
549
- /// cached results from the cache.
550
  ///
551
  /// # Error
552
  ///
@@ -563,9 +563,9 @@ impl SharedCache {
563
  /// # Arguments
564
  ///
565
  /// * `search_results` - It takes the `SearchResults` as an argument which are results that
566
- /// needs to be cached.
567
  /// * `url` - It takes the search url as an argument which will be used as the key for storing
568
- /// results in the cache.
569
  ///
570
  /// # Error
571
  ///
 
546
  /// # Arguments
547
  ///
548
  /// * `url` - It takes the search url as an argument which will be used as the key to fetch the
549
+ /// cached results from the cache.
550
  ///
551
  /// # Error
552
  ///
 
563
  /// # Arguments
564
  ///
565
  /// * `search_results` - It takes the `SearchResults` as an argument which are results that
566
+ /// needs to be cached.
567
  /// * `url` - It takes the search url as an argument which will be used as the key for storing
568
+ /// results in the cache.
569
  ///
570
  /// # Error
571
  ///
src/config/parser.rs CHANGED
@@ -48,6 +48,8 @@ pub struct Config {
48
  pub tcp_connection_keep_alive: u8,
49
  /// It stores the pool idle connection timeout in seconds.
50
  pub pool_idle_connection_timeout: u8,
 
 
51
  }
52
 
53
  impl Config {
@@ -57,7 +59,7 @@ impl Config {
57
  /// # Arguments
58
  ///
59
  /// * `logging_initialized` - It takes a boolean which ensures that the logging doesn't get
60
- /// initialized twice. Pass false if the logger has not yet been initialized.
61
  ///
62
  /// # Error
63
  ///
@@ -139,6 +141,7 @@ impl Config {
139
  request_timeout: globals.get::<_, u8>("request_timeout")?,
140
  tcp_connection_keep_alive: globals.get::<_, u8>("tcp_connection_keep_alive")?,
141
  pool_idle_connection_timeout: globals.get::<_, u8>("pool_idle_connection_timeout")?,
 
142
  threads,
143
  client_connection_keep_alive: globals.get::<_, u8>("client_connection_keep_alive")?,
144
  rate_limiter: RateLimiter {
 
48
  pub tcp_connection_keep_alive: u8,
49
  /// It stores the pool idle connection timeout in seconds.
50
  pub pool_idle_connection_timeout: u8,
51
+ /// It stores the number of https connections to keep in the pool.
52
+ pub number_of_https_connections: u8,
53
  }
54
 
55
  impl Config {
 
59
  /// # Arguments
60
  ///
61
  /// * `logging_initialized` - It takes a boolean which ensures that the logging doesn't get
62
+ /// initialized twice. Pass false if the logger has not yet been initialized.
63
  ///
64
  /// # Error
65
  ///
 
141
  request_timeout: globals.get::<_, u8>("request_timeout")?,
142
  tcp_connection_keep_alive: globals.get::<_, u8>("tcp_connection_keep_alive")?,
143
  pool_idle_connection_timeout: globals.get::<_, u8>("pool_idle_connection_timeout")?,
144
+ number_of_https_connections: globals.get::<_, u8>("number_of_https_connections")?,
145
  threads,
146
  client_connection_keep_alive: globals.get::<_, u8>("client_connection_keep_alive")?,
147
  rate_limiter: RateLimiter {
src/models/aggregation_models.rs CHANGED
@@ -11,7 +11,9 @@ use thesaurus::synonyms;
11
  /// A named struct to store the raw scraped search results scraped search results from the
12
  /// upstream search engines before aggregating it.It derives the Clone trait which is needed
13
  /// to write idiomatic rust using `Iterators`.
14
- /// (href url in html in simple words).
 
 
15
  #[derive(Clone, Serialize, Deserialize)]
16
  #[serde(rename_all = "camelCase")]
17
  pub struct SearchResult {
@@ -34,7 +36,7 @@ impl SearchResult {
34
  ///
35
  /// * `title` - The title of the search result.
36
  /// * `url` - The url which is accessed when clicked on it
37
- /// (href url in html in simple words).
38
  /// * `description` - The description of the search result.
39
  /// * `engine` - The names of the upstream engines from which this results were provided.
40
  pub fn new(title: &str, url: &str, description: &str, engine: &[&str]) -> Self {
@@ -124,7 +126,7 @@ impl EngineErrorInfo {
124
  /// # Arguments
125
  ///
126
  /// * `error` - It takes the error type which occured while fetching the result from a particular
127
- /// search engine.
128
  /// * `engine` - It takes the name of the engine that failed to provide the requested search results.
129
  pub fn new(error: &EngineError, engine: &str) -> Self {
130
  Self {
@@ -177,11 +179,11 @@ impl SearchResults {
177
  /// # Arguments
178
  ///
179
  /// * `results` - Takes an argument of individual serializable `SearchResult` struct
180
- /// and stores it into a vector of `SearchResult` structs.
181
  /// * `page_query` - Takes an argument of current page`s search query `q` provided in
182
- /// the search url.
183
  /// * `engine_errors_info` - Takes an array of structs which contains information regarding
184
- /// which engines failed with their names, reason and their severity color name.
185
  pub fn new(results: Box<[SearchResult]>, engine_errors_info: Box<[EngineErrorInfo]>) -> Self {
186
  Self {
187
  results,
 
11
  /// A named struct to store the raw scraped search results scraped search results from the
12
  /// upstream search engines before aggregating it.It derives the Clone trait which is needed
13
  /// to write idiomatic rust using `Iterators`.
14
+ ///
15
+ /// (href url in html in simple words).
16
+ ///
17
  #[derive(Clone, Serialize, Deserialize)]
18
  #[serde(rename_all = "camelCase")]
19
  pub struct SearchResult {
 
36
  ///
37
  /// * `title` - The title of the search result.
38
  /// * `url` - The url which is accessed when clicked on it
39
+ /// (href url in html in simple words).
40
  /// * `description` - The description of the search result.
41
  /// * `engine` - The names of the upstream engines from which this results were provided.
42
  pub fn new(title: &str, url: &str, description: &str, engine: &[&str]) -> Self {
 
126
  /// # Arguments
127
  ///
128
  /// * `error` - It takes the error type which occured while fetching the result from a particular
129
+ /// search engine.
130
  /// * `engine` - It takes the name of the engine that failed to provide the requested search results.
131
  pub fn new(error: &EngineError, engine: &str) -> Self {
132
  Self {
 
179
  /// # Arguments
180
  ///
181
  /// * `results` - Takes an argument of individual serializable `SearchResult` struct
182
+ /// and stores it into a vector of `SearchResult` structs.
183
  /// * `page_query` - Takes an argument of current page`s search query `q` provided in
184
+ /// the search url.
185
  /// * `engine_errors_info` - Takes an array of structs which contains information regarding
186
+ /// which engines failed with their names, reason and their severity color name.
187
  pub fn new(results: Box<[SearchResult]>, engine_errors_info: Box<[EngineErrorInfo]>) -> Self {
188
  Self {
189
  results,
src/models/parser_models.rs CHANGED
@@ -29,7 +29,7 @@ impl Style {
29
  ///
30
  /// * `theme` - It takes the parsed theme option used to set a theme for the website.
31
  /// * `colorscheme` - It takes the parsed colorscheme option used to set a colorscheme
32
- /// for the theme being used.
33
  pub fn new(theme: String, colorscheme: String, animation: Option<String>) -> Self {
34
  Style {
35
  theme,
 
29
  ///
30
  /// * `theme` - It takes the parsed theme option used to set a theme for the website.
31
  /// * `colorscheme` - It takes the parsed colorscheme option used to set a colorscheme
32
+ /// for the theme being used.
33
  pub fn new(theme: String, colorscheme: String, animation: Option<String>) -> Self {
34
  Style {
35
  theme,
src/results/aggregator.rs CHANGED
@@ -60,7 +60,7 @@ type FutureVec =
60
  /// * `debug` - Accepts a boolean value to enable or disable debug mode option.
61
  /// * `upstream_search_engines` - Accepts a vector of search engine names which was selected by the
62
  /// * `request_timeout` - Accepts a time (secs) as a value which controls the server request timeout.
63
- /// user through the UI or the config file.
64
  ///
65
  /// # Error
66
  ///
@@ -81,6 +81,7 @@ pub async fn aggregate(
81
  config.pool_idle_connection_timeout as u64,
82
  ))
83
  .tcp_keepalive(Duration::from_secs(config.tcp_connection_keep_alive as u64))
 
84
  .connect_timeout(Duration::from_secs(config.request_timeout as u64)) // Add timeout to request to avoid DDOSing the server
85
  .https_only(true)
86
  .gzip(true)
 
60
  /// * `debug` - Accepts a boolean value to enable or disable debug mode option.
61
  /// * `upstream_search_engines` - Accepts a vector of search engine names which was selected by the
62
  /// * `request_timeout` - Accepts a time (secs) as a value which controls the server request timeout.
63
+ /// user through the UI or the config file.
64
  ///
65
  /// # Error
66
  ///
 
81
  config.pool_idle_connection_timeout as u64,
82
  ))
83
  .tcp_keepalive(Duration::from_secs(config.tcp_connection_keep_alive as u64))
84
+ .pool_max_idle_per_host(config.number_of_https_connections as usize)
85
  .connect_timeout(Duration::from_secs(config.request_timeout as u64)) // Add timeout to request to avoid DDOSing the server
86
  .https_only(true)
87
  .gzip(true)
src/server/routes/search.rs CHANGED
@@ -146,7 +146,7 @@ pub async fn search(
146
  /// # Arguments
147
  ///
148
  /// * `url` - It takes the url of the current page that requested the search results for a
149
- /// particular search query.
150
  /// * `config` - It takes a parsed config struct.
151
  /// * `query` - It takes the page number as u32 value.
152
  /// * `req` - It takes the `HttpRequest` struct as a value.
 
146
  /// # Arguments
147
  ///
148
  /// * `url` - It takes the url of the current page that requested the search results for a
149
+ /// particular search query.
150
  /// * `config` - It takes a parsed config struct.
151
  /// * `query` - It takes the page number as u32 value.
152
  /// * `req` - It takes the `HttpRequest` struct as a value.
src/templates/partials/search_bar.rs CHANGED
@@ -12,7 +12,7 @@ const SAFE_SEARCH_LEVELS_NAME: [&str; 3] = ["None", "Low", "Moderate"];
12
  /// # Arguments
13
  ///
14
  /// * `engine_errors_info` - It takes the engine errors list containing errors for each upstream
15
- /// search engine which failed to provide results as an argument.
16
  /// * `safe_search_level` - It takes the safe search level with values from 0-2 as an argument.
17
  /// * `query` - It takes the current search query provided by user as an argument.
18
  ///
 
12
  /// # Arguments
13
  ///
14
  /// * `engine_errors_info` - It takes the engine errors list containing errors for each upstream
15
+ /// search engine which failed to provide results as an argument.
16
  /// * `safe_search_level` - It takes the safe search level with values from 0-2 as an argument.
17
  /// * `query` - It takes the current search query provided by user as an argument.
18
  ///
src/templates/partials/settings_tabs/engines.rs CHANGED
@@ -9,7 +9,7 @@ use maud::{html, Markup};
9
  /// # Arguments
10
  ///
11
  /// * `engine_names` - It takes the key value pair list of all available engine names and there corresponding
12
- /// selected (enabled/disabled) value as an argument.
13
  ///
14
  /// # Returns
15
  ///
 
9
  /// # Arguments
10
  ///
11
  /// * `engine_names` - It takes the key value pair list of all available engine names and there corresponding
12
+ /// selected (enabled/disabled) value as an argument.
13
  ///
14
  /// # Returns
15
  ///
src/templates/partials/settings_tabs/user_interface.rs CHANGED
@@ -11,9 +11,9 @@ use std::fs::read_dir;
11
  /// # Arguments
12
  ///
13
  /// * `style_type` - It takes the style type of the values `theme` and `colorscheme` as an
14
- /// argument.
15
  /// * `selected_style` - It takes the currently selected style value provided via the config file
16
- /// as an argument.
17
  ///
18
  /// # Error
19
  ///
 
11
  /// # Arguments
12
  ///
13
  /// * `style_type` - It takes the style type of the values `theme` and `colorscheme` as an
14
+ /// argument.
15
  /// * `selected_style` - It takes the currently selected style value provided via the config file
16
+ /// as an argument.
17
  ///
18
  /// # Error
19
  ///
websurfx/config.lua CHANGED
@@ -18,6 +18,8 @@ rate_limiter = {
18
  }
19
  -- Set whether the server will use an adaptive/dynamic HTTPS window size, see https://httpwg.org/specs/rfc9113.html#fc-principles
20
  https_adaptive_window_size = false
 
 
21
  -- Set keep-alive timer in seconds; keeps clients connected to the HTTP server, different from the connection to upstream search engines
22
  client_connection_keep_alive = 120
23
 
 
18
  }
19
  -- Set whether the server will use an adaptive/dynamic HTTPS window size, see https://httpwg.org/specs/rfc9113.html#fc-principles
20
  https_adaptive_window_size = false
21
+
22
+ number_of_https_connections = 10 -- the number of https connections that should be available in the connection pool.
23
  -- Set keep-alive timer in seconds; keeps clients connected to the HTTP server, different from the connection to upstream search engines
24
  client_connection_keep_alive = 120
25