ptrdvn commited on
Commit
198eca9
·
verified ·
1 Parent(s): 1fe03d2

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +72 -0
README.md CHANGED
@@ -17,3 +17,75 @@ configs:
17
  - split: train
18
  path: data/train-*
19
  ---
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
17
  - split: train
18
  path: data/train-*
19
  ---
20
+
21
+ Japanese construction themes FAQs scraped from [https://www.city.yokohama.lg.jp/business/bunyabetsu/kenchiku/annai/faq/qa.html](https://www.city.yokohama.lg.jp/business/bunyabetsu/kenchiku/annai/faq/qa.html).
22
+
23
+ Downloaded using the following code:
24
+
25
+ ```python
26
+ import requests
27
+ from lxml import html
28
+ import pandas as pd
29
+ from datasets import Dataset
30
+
31
+ hrefs = [
32
+ "/business/bunyabetsu/kenchiku/annai/faq/ji-annnai.html",
33
+ "/business/bunyabetsu/kenchiku/tetsuduki/kakunin/qa-kakunin.html",
34
+ "/business/bunyabetsu/kenchiku/tetsuduki/teikihoukoku/seido/01.html",
35
+ "/business/bunyabetsu/kenchiku/tetsuduki/teikihoukoku/seido/07.html",
36
+ "/business/bunyabetsu/kenchiku/tetsuduki/doro/qa-doro.html",
37
+ "/business/bunyabetsu/kenchiku/tetsuduki/doro/qa-doro.html",
38
+ "/business/bunyabetsu/kenchiku/bosai/kyoai/jigyou/qanda.html",
39
+ "/business/bunyabetsu/kenchiku/tetsuduki/kyoka/43.html",
40
+ "/business/bunyabetsu/kenchiku/takuchi/toiawase/keikakuho/tokeihou.html",
41
+ "/business/bunyabetsu/kenchiku/takuchi/toiawase/kiseiho/takuzo.html",
42
+ "/business/bunyabetsu/kenchiku/takuchi/toiawase/keikakuho/q4-1.html",
43
+ "/business/bunyabetsu/kenchiku/kankyo-shoene/casbee/hairyo/qa.html",
44
+ "/business/bunyabetsu/kenchiku/tetsuduki/jorei/machizukuri/fukumachiqa.html",
45
+ "/business/bunyabetsu/kenchiku/kankyo-shoene/chouki/qa-chouki.html",
46
+ "/business/bunyabetsu/kenchiku/kankyo-shoene/huuti/qa-huuchi.html",
47
+ "/kurashi/machizukuri-kankyo/kotsu/toshikotsu/chushajo/jorei/qa.html",
48
+ ]
49
+
50
+ url_stem = "https://www.city.yokohama.lg.jp"
51
+
52
+ def get_question_text(url):
53
+ # Send a GET request to the webpage
54
+ response = requests.get(url)
55
+
56
+ # Parse the HTML content
57
+ tree = html.fromstring(response.content)
58
+
59
+ question_data = []
60
+
61
+ # Use XPath to find the desired elements
62
+ for qa_element in tree.xpath('//div[@class="contents-area"]/section'):
63
+ question_data.append({
64
+ "question": qa_element.xpath('.//div[@class="question-text"]/text()')[0],
65
+ "answer": "\n".join(qa_element.xpath('.//div[@class="answer-text"]/div/p/text()'))
66
+ })
67
+
68
+ return question_data
69
+
70
+ qa_list = []
71
+ for href in hrefs:
72
+ print(href)
73
+ qa_list.extend(get_question_text(url_stem + href))
74
+
75
+ df = pd.DataFrame(qa_list)
76
+
77
+ df.question = df.question.apply(lambda x: x[len(x.split()[0]):] if " " in x[:7] or " " in x[:7] else x)
78
+ df.answer = df.answer.apply(lambda x: x[len(x.split()[0]):] if " " in x[:7] or " " in x[:7] else x)
79
+
80
+ df.question = df.question.str.strip()
81
+ df.answer = df.answer.str.strip()
82
+
83
+ df.question = df.question.apply(lambda x: x[:-len(x.split("<")[-1])-1] if "<" in x else x)
84
+ df.answer = df.answer.apply(lambda x: x[:-len(x.split("<")[-1])-1] if "<" in x else x)
85
+
86
+ df.question = df.question.str.strip()
87
+ df.answer = df.answer.str.strip()
88
+
89
+ Dataset.from_pandas(df).push_to_hub("lightblue/architecture_faqs")
90
+ ```
91
+