Zsombor mergify[bot] commited on
Commit
ca6b271
·
unverified ·
1 Parent(s): ab126b9

Add Wikipedia as a search engine (#633)

Browse files

Currently, it only search in the English wikipedia, but it can be
customized to use different ones.

Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com>

src/engines/bing.rs CHANGED
@@ -15,6 +15,7 @@ use crate::models::engine_models::{EngineError, SearchEngine};
15
 
16
  use error_stack::{Report, Result, ResultExt};
17
 
 
18
  use super::search_result_parser::SearchResultParser;
19
 
20
  /// A new Bing engine type defined in-order to implement the `SearchEngine` trait which allows to
@@ -73,10 +74,7 @@ impl SearchEngine for Bing {
73
  ("_UR=QS=0&TQS", "0"),
74
  ];
75
 
76
- let mut cookie_string = String::new();
77
- for (k, v) in &query_params {
78
- cookie_string.push_str(&format!("{k}={v}; "));
79
- }
80
 
81
  let header_map = HeaderMap::try_from(&HashMap::from([
82
  ("User-Agent".to_string(), user_agent.to_string()),
 
15
 
16
  use error_stack::{Report, Result, ResultExt};
17
 
18
+ use super::common::build_cookie;
19
  use super::search_result_parser::SearchResultParser;
20
 
21
  /// A new Bing engine type defined in-order to implement the `SearchEngine` trait which allows to
 
74
  ("_UR=QS=0&TQS", "0"),
75
  ];
76
 
77
+ let cookie_string = build_cookie(&query_params);
 
 
 
78
 
79
  let header_map = HeaderMap::try_from(&HashMap::from([
80
  ("User-Agent".to_string(), user_agent.to_string()),
src/engines/common.rs ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ //! This module provides common functionalities for engines
2
+
3
+ /**
4
+ * Build a query from a list of key value pairs.
5
+ */
6
+ pub fn build_query(query_params: &[(&str, &str)]) -> String {
7
+ let mut query_params_string = String::new();
8
+ for (k, v) in query_params {
9
+ query_params_string.push_str(&format!("&{k}={v}"));
10
+ }
11
+ query_params_string
12
+ }
13
+
14
+ /**
15
+ * Build a cookie from a list of key value pairs.
16
+ */
17
+ pub fn build_cookie(cookie_params: &[(&str, &str)]) -> String {
18
+ let mut cookie_string = String::new();
19
+ for (k, v) in cookie_params {
20
+ cookie_string.push_str(&format!("{k}={v}; "));
21
+ }
22
+ cookie_string
23
+ }
src/engines/mod.rs CHANGED
@@ -5,9 +5,11 @@
5
 
6
  pub mod bing;
7
  pub mod brave;
 
8
  pub mod duckduckgo;
9
  pub mod librex;
10
  pub mod mojeek;
11
  pub mod search_result_parser;
12
  pub mod searx;
13
  pub mod startpage;
 
 
5
 
6
  pub mod bing;
7
  pub mod brave;
8
+ pub mod common;
9
  pub mod duckduckgo;
10
  pub mod librex;
11
  pub mod mojeek;
12
  pub mod search_result_parser;
13
  pub mod searx;
14
  pub mod startpage;
15
+ pub mod wikipedia;
src/engines/mojeek.rs CHANGED
@@ -14,6 +14,7 @@ use crate::models::engine_models::{EngineError, SearchEngine};
14
 
15
  use error_stack::{Report, Result, ResultExt};
16
 
 
17
  use super::search_result_parser::SearchResultParser;
18
 
19
  /// A new Mojeek engine type defined in-order to implement the `SearchEngine` trait which allows to
@@ -107,10 +108,7 @@ impl SearchEngine for Mojeek {
107
  ("safe", &safe),
108
  ];
109
 
110
- let mut query_params_string = String::new();
111
- for (k, v) in &query_params {
112
- query_params_string.push_str(&format!("&{k}={v}"));
113
- }
114
 
115
  let url: String = match page {
116
  0 => {
@@ -123,10 +121,7 @@ impl SearchEngine for Mojeek {
123
  }
124
  };
125
 
126
- let mut cookie_string = String::new();
127
- for (k, v) in &query_params {
128
- cookie_string.push_str(&format!("{k}={v}; "));
129
- }
130
 
131
  let header_map = HeaderMap::try_from(&HashMap::from([
132
  ("User-Agent".to_string(), user_agent.to_string()),
 
14
 
15
  use error_stack::{Report, Result, ResultExt};
16
 
17
+ use super::common::{build_cookie, build_query};
18
  use super::search_result_parser::SearchResultParser;
19
 
20
  /// A new Mojeek engine type defined in-order to implement the `SearchEngine` trait which allows to
 
108
  ("safe", &safe),
109
  ];
110
 
111
+ let query_params_string = build_query(&query_params);
 
 
 
112
 
113
  let url: String = match page {
114
  0 => {
 
121
  }
122
  };
123
 
124
+ let cookie_string = build_cookie(&query_params);
 
 
 
125
 
126
  let header_map = HeaderMap::try_from(&HashMap::from([
127
  ("User-Agent".to_string(), user_agent.to_string()),
src/engines/wikipedia.rs ADDED
@@ -0,0 +1,101 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ //! The `wikipedia` module handles the scraping of results from wikipedia
2
+ //! with user provided query and with a page number if provided.
3
+
4
+ use std::collections::HashMap;
5
+
6
+ use reqwest::header::HeaderMap;
7
+ use reqwest::Client;
8
+ use scraper::Html;
9
+
10
+ use crate::models::aggregation_models::SearchResult;
11
+
12
+ use crate::models::engine_models::{EngineError, SearchEngine};
13
+
14
+ use error_stack::{Report, Result, ResultExt};
15
+
16
+ use super::common::build_query;
17
+ use super::search_result_parser::SearchResultParser;
18
+
19
+ /// A new Wikipedia engine type defined in-order to implement the `SearchEngine` trait which allows to
20
+ /// reduce code duplication as well as allows to create vector of different search engines easily.
21
+ pub struct Wikipedia {
22
+ /// The parser, used to interpret the search result.
23
+ parser: SearchResultParser,
24
+ /// The id of the engine, equals to 'wikipedia-' + language
25
+ id: String,
26
+ /// The host where wikipedia can be accessed.
27
+ host: String,
28
+ }
29
+
30
+ impl Wikipedia {
31
+ /// Creates the Wikipedia parser.
32
+ pub fn new(language: &str) -> Result<Self, EngineError> {
33
+ let host = format!("https://{}.wikipedia.org", &language);
34
+ let id = format!("wikipedia-{}", &language);
35
+ Ok(Self {
36
+ parser: SearchResultParser::new(
37
+ "p.mw-search-nonefound",
38
+ ".mw-search-results li.mw-search-result",
39
+ ".mw-search-result-heading a",
40
+ ".mw-search-result-heading a",
41
+ ".searchresult",
42
+ )?,
43
+ id,
44
+ host,
45
+ })
46
+ }
47
+ }
48
+
49
+ #[async_trait::async_trait]
50
+ impl SearchEngine for Wikipedia {
51
+ async fn results(
52
+ &self,
53
+ query: &str,
54
+ page: u32,
55
+ user_agent: &str,
56
+ client: &Client,
57
+ _safe_search: u8,
58
+ ) -> Result<Vec<(String, SearchResult)>, EngineError> {
59
+ let header_map = HeaderMap::try_from(&HashMap::from([
60
+ ("User-Agent".to_string(), user_agent.to_string()),
61
+ ("Referer".to_string(), self.host.to_string()),
62
+ ]))
63
+ .change_context(EngineError::UnexpectedError)?;
64
+
65
+ let offset = (page * 20).to_string();
66
+ let query_params: Vec<(&str, &str)> = vec![
67
+ ("limit", "20"),
68
+ ("offset", &offset),
69
+ ("profile", "default"),
70
+ ("search", query),
71
+ ("title", "Special:Search"),
72
+ ("ns0", "1"),
73
+ ];
74
+
75
+ let query_params_string = build_query(&query_params);
76
+
77
+ let url: String = format!("{}/w/index.php?{}", self.host, query_params_string);
78
+
79
+ let document: Html = Html::parse_document(
80
+ &Wikipedia::fetch_html_from_upstream(self, &url, header_map, client).await?,
81
+ );
82
+
83
+ if self.parser.parse_for_no_results(&document).next().is_some() {
84
+ return Err(Report::new(EngineError::EmptyResultSet));
85
+ }
86
+
87
+ // scrape all the results from the html
88
+ self.parser
89
+ .parse_for_results(&document, |title, url, desc| {
90
+ let found_url = url.attr("href");
91
+ found_url.map(|relative_url| {
92
+ SearchResult::new(
93
+ title.inner_html().trim(),
94
+ &format!("{}{relative_url}", self.host),
95
+ desc.inner_html().trim(),
96
+ &[&self.id],
97
+ )
98
+ })
99
+ })
100
+ }
101
+ }
src/models/engine_models.rs CHANGED
@@ -206,6 +206,10 @@ impl EngineHandler {
206
  let engine = crate::engines::bing::Bing::new()?;
207
  ("bing", Box::new(engine))
208
  }
 
 
 
 
209
  _ => {
210
  return Err(Report::from(EngineError::NoSuchEngineFound(
211
  engine_name.to_string(),
 
206
  let engine = crate::engines::bing::Bing::new()?;
207
  ("bing", Box::new(engine))
208
  }
209
+ "wikipedia" => {
210
+ let engine = crate::engines::wikipedia::Wikipedia::new("en")?;
211
+ ("wikipedia", Box::new(engine))
212
+ }
213
  _ => {
214
  return Err(Report::from(EngineError::NoSuchEngineFound(
215
  engine_name.to_string(),
websurfx/config.lua CHANGED
@@ -76,6 +76,7 @@ upstream_search_engines = {
76
  LibreX = false,
77
  Mojeek = false,
78
  Bing = false,
 
79
  } -- select the upstream search engines from which the results should be fetched.
80
 
81
  proxy = nil -- Proxy to send outgoing requests through. Set to nil to disable.
 
76
  LibreX = false,
77
  Mojeek = false,
78
  Bing = false,
79
+ Wikipedia = true,
80
  } -- select the upstream search engines from which the results should be fetched.
81
 
82
  proxy = nil -- Proxy to send outgoing requests through. Set to nil to disable.