Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -3,25 +3,39 @@ import requests
|
|
3 |
from bs4 import BeautifulSoup
|
4 |
import re
|
5 |
|
6 |
-
def
|
7 |
url = "https://finance.naver.com/research/company_list.naver"
|
8 |
response = requests.get(url)
|
9 |
soup = BeautifulSoup(response.text, 'html.parser')
|
10 |
|
11 |
-
#
|
12 |
-
|
13 |
-
|
14 |
-
|
15 |
-
|
16 |
-
|
17 |
-
|
18 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
19 |
|
20 |
# Gradio 인터페이스
|
21 |
with gr.Blocks() as app:
|
22 |
-
gr.
|
23 |
-
|
24 |
-
|
25 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
26 |
|
27 |
app.launch()
|
|
|
3 |
from bs4 import BeautifulSoup
|
4 |
import re
|
5 |
|
6 |
+
def fetch_pdf_links():
|
7 |
url = "https://finance.naver.com/research/company_list.naver"
|
8 |
response = requests.get(url)
|
9 |
soup = BeautifulSoup(response.text, 'html.parser')
|
10 |
|
11 |
+
# 모든 PDF 링크를 찾습니다.
|
12 |
+
pdf_links = soup.find_all('a', href=re.compile("\.pdf$"))
|
13 |
+
links = []
|
14 |
+
for link in pdf_links:
|
15 |
+
full_url = f"https://finance.naver.com{link['href']}"
|
16 |
+
links.append(full_url)
|
17 |
+
return links
|
18 |
+
|
19 |
+
def create_download_buttons(links):
|
20 |
+
items = []
|
21 |
+
for link in links:
|
22 |
+
# 각 PDF 링크에 대한 다운로드 버튼 생성
|
23 |
+
items.append(gr.Button(value="Download", elem_id=link))
|
24 |
+
return items
|
25 |
|
26 |
# Gradio 인터페이스
|
27 |
with gr.Blocks() as app:
|
28 |
+
btn_fetch = gr.Button("PDF 링크 조회")
|
29 |
+
output_links = gr.Dataframe()
|
30 |
+
download_buttons = gr.Column()
|
31 |
+
btn_fetch.click(
|
32 |
+
fn=fetch_pdf_links,
|
33 |
+
outputs=output_links
|
34 |
+
)
|
35 |
+
output_links.change(
|
36 |
+
fn=create_download_buttons,
|
37 |
+
inputs=output_links,
|
38 |
+
outputs=download_buttons
|
39 |
+
)
|
40 |
|
41 |
app.launch()
|