Spaces:
Building
Building
import gradio as gr | |
import requests | |
import json | |
import os | |
from datetime import datetime, timedelta | |
from bs4 import BeautifulSoup # ์น ํ์ด์ง์์ ํ ์คํธ๋ฅผ ์ถ์ถํ๊ธฐ ์ํด ์ฌ์ฉ | |
from huggingface_hub import InferenceClient # LLM ์ฌ์ฉ์ ์ํด ํ์ | |
# ํ์ํ ํจํค์ง ์ค์น (ํ์ํ ๊ฒฝ์ฐ ์ฃผ์์ ์ ๊ฑฐํ๊ณ ์คํ) | |
# !pip install bs4 huggingface_hub | |
# ํ๊ฒฝ ๋ณ์์์ API ํค ๊ฐ์ ธ์ค๊ธฐ (API ํค๋ ์์ ํ๊ฒ ๊ด๋ฆฌ๋์ด์ผ ํฉ๋๋ค) | |
API_KEY = os.getenv("SERPHOUSE_API_KEY") # ๋ณธ์ธ์ SerpHouse API ํค๋ฅผ ํ๊ฒฝ ๋ณ์๋ก ์ค์ ํ์ธ์. | |
HF_TOKEN = os.getenv("HF_TOKEN") # Hugging Face API ํ ํฐ์ ํ๊ฒฝ ๋ณ์๋ก ์ค์ ํ์ธ์. | |
MAJOR_COUNTRIES = [ | |
"United States", "United Kingdom", "Canada", "Australia", "Germany", | |
"France", "Japan", "South Korea", "China", "India", | |
"Brazil", "Mexico", "Russia", "Italy", "Spain", | |
"Netherlands", "Sweden", "Switzerland", "Norway", "Denmark", | |
"Finland", "Belgium", "Austria", "New Zealand", "Ireland", | |
"Singapore", "Hong Kong", "Israel", "United Arab Emirates", "Saudi Arabia", | |
"South Africa", "Turkey", "Egypt", "Poland", "Czech Republic", | |
"Hungary", "Greece", "Portugal", "Argentina", "Chile", | |
"Colombia", "Peru", "Venezuela", "Thailand", "Malaysia", | |
"Indonesia", "Philippines", "Vietnam", "Pakistan", "Bangladesh" | |
] | |
def search_serphouse(query, country, page=1, num_result=100): | |
url = "https://api.serphouse.com/serp/live" | |
now = datetime.utcnow() | |
yesterday = now - timedelta(days=1) | |
date_range = f"{yesterday.strftime('%Y-%m-%d')},{now.strftime('%Y-%m-%d')}" | |
payload = { | |
"data": { | |
"q": query, | |
"domain": "google.com", | |
"loc": country, | |
"lang": "en", | |
"device": "desktop", | |
"serp_type": "news", | |
"page": str(page), | |
"verbatim": "1", | |
"num": str(num_result), | |
"date_range": date_range | |
} | |
} | |
headers = { | |
"accept": "application/json", | |
"content-type": "application/json", | |
"authorization": f"Bearer {API_KEY}" | |
} | |
try: | |
response = requests.post(url, json=payload, headers=headers) | |
response.raise_for_status() | |
return response.json() | |
except requests.RequestException as e: | |
error_msg = f"Error: {str(e)}" | |
if response.text: | |
error_msg += f"\nResponse content: {response.text}" | |
return {"error": error_msg} | |
def format_results_from_raw(results): | |
try: | |
if isinstance(results, dict) and "error" in results: | |
return "Error: " + results["error"], [] | |
if not isinstance(results, dict): | |
raise ValueError("๊ฒฐ๊ณผ๊ฐ ์ฌ์ ํ์์ด ์๋๋๋ค.") | |
# 'results' ํค ๋ด๋ถ์ ๊ตฌ์กฐ ํ์ธ (์ค์ฒฉ๋ 'results' ์ฒ๋ฆฌ) | |
if 'results' in results: | |
results_content = results['results'] | |
if 'results' in results_content: | |
results_content = results_content['results'] | |
# 'news' ํค ํ์ธ | |
if 'news' in results_content: | |
news_results = results_content['news'] | |
else: | |
news_results = [] | |
else: | |
news_results = [] | |
else: | |
news_results = [] | |
if not news_results: | |
return "๊ฒ์ ๊ฒฐ๊ณผ๊ฐ ์์ต๋๋ค.", [] | |
articles = [] | |
for idx, result in enumerate(news_results, 1): | |
title = result.get("title", "์ ๋ชฉ ์์") | |
link = result.get("url", result.get("link", "#")) | |
snippet = result.get("snippet", "๋ด์ฉ ์์") | |
channel = result.get("channel", result.get("source", "์ ์ ์์")) | |
time = result.get("time", result.get("date", "์ ์ ์๋ ์๊ฐ")) | |
image_url = result.get("img", result.get("thumbnail", "")) | |
articles.append({ | |
"index": idx, | |
"title": title, | |
"link": link, | |
"snippet": snippet, | |
"channel": channel, | |
"time": time, | |
"image_url": image_url | |
}) | |
return "", articles | |
except Exception as e: | |
error_message = f"๊ฒฐ๊ณผ ์ฒ๋ฆฌ ์ค ์ค๋ฅ ๋ฐ์: {str(e)}" | |
return "Error: " + error_message, [] | |
def serphouse_search(query, country): | |
# ํ์ด์ง์ ๊ฒฐ๊ณผ ์์ ๊ธฐ๋ณธ๊ฐ์ ์ค์ ํฉ๋๋ค. | |
page = 1 | |
num_result = 100 | |
results = search_serphouse(query, country, page, num_result) | |
error_message, articles = format_results_from_raw(results) | |
return error_message, articles | |
# LLM ์ค์ | |
hf_client = InferenceClient("CohereForAI/c4ai-command-r-plus-08-2024", token=HF_TOKEN) | |
def summarize_article(url): | |
try: | |
# ์น ํ์ด์ง์์ ํ ์คํธ ์ถ์ถ | |
response = requests.get(url) | |
response.raise_for_status() | |
soup = BeautifulSoup(response.text, 'html.parser') | |
# ๋ชจ๋ ํ ์คํธ๋ฅผ ์ถ์ถ (๊ฐ๋จํ ์์) | |
text = ' '.join([p.get_text() for p in soup.find_all('p')]) | |
if not text.strip(): | |
return "๊ธฐ์ฌ ๋ด์ฉ์ ๊ฐ์ ธ์ฌ ์ ์์ต๋๋ค." | |
# ์์ฝ ์์ฑ | |
prompt = f"๋ค์ ์์ด ๊ธฐ์ฌ๋ฅผ ํ๊ตญ์ด๋ก 3๋ฌธ์ฅ์ผ๋ก ์์ฝํ์ธ์:\n{text}" | |
summary = hf_client.text_generation(prompt, max_new_tokens=500) | |
return summary | |
except Exception as e: | |
return f"์์ฝ ์ค ์ค๋ฅ ๋ฐ์: {str(e)}" | |
css = """ | |
footer { | |
visibility: hidden; | |
} | |
""" | |
# Gradio ์ธํฐํ์ด์ค ๊ตฌ์ฑ | |
with gr.Blocks(theme="Nymbo/Nymbo_Theme", css=css, title="NewsAI ์๋น์ค") as iface: | |
gr.Markdown("๊ฒ์์ด๋ฅผ ์ ๋ ฅํ๊ณ ์ํ๋ ๊ตญ๊ฐ๋ฅผ ์ ํํ๋ฉด, ๊ฒ์์ด์ ์ผ์นํ๋ 24์๊ฐ ์ด๋ด ๋ด์ค๋ฅผ ์ต๋ 100๊ฐ ์ถ๋ ฅํฉ๋๋ค.") | |
with gr.Column(): | |
with gr.Row(): | |
query = gr.Textbox(label="๊ฒ์์ด") | |
country = gr.Dropdown(MAJOR_COUNTRIES, label="๊ตญ๊ฐ", value="South Korea") | |
search_button = gr.Button("๊ฒ์") | |
output_table = gr.HTML() | |
summary_output = gr.Markdown(visible=False) | |
def search_and_display(query, country): | |
error_message, articles = serphouse_search(query, country) | |
if error_message: | |
return f"<p>{error_message}</p>", gr.update(visible=False) | |
else: | |
# ๊ธฐ์ฌ ๋ชฉ๋ก์ HTML ํ ์ด๋ธ๋ก ์์ฑ | |
table_html = "<table border='1' style='width:100%; text-align:left;'><tr><th>๋ฒํธ</th><th>์ ๋ชฉ</th><th>์ถ์ฒ</th><th>์๊ฐ</th><th>๋ถ์</th></tr>" | |
for article in articles: | |
# ๊ฐ ๊ธฐ์ฌ์ ๋ํด ๋ฒํผ์ ํด๋นํ๋ JavaScript ์ฝ๋๋ฅผ ์ฝ์ | |
analyze_button = f"""<button onclick="analyzeArticle('{article['link']}')">๋ถ์</button>""" | |
row = f""" | |
<tr> | |
<td>{article['index']}</td> | |
<td><a href="{article['link']}" target="_blank">{article['title']}</a></td> | |
<td>{article['channel']}</td> | |
<td>{article['time']}</td> | |
<td>{analyze_button}</td> | |
</tr> | |
""" | |
table_html += row | |
table_html += "</table>" | |
# JavaScript ํจ์ ์ ์ | |
js_code = """ | |
<script> | |
function analyzeArticle(url) { | |
// Gradio์ handle_function์ ์ฌ์ฉํ์ฌ Python ํจ์ ํธ์ถ | |
gradioApp().querySelector('#article_url_input textarea').value = url; | |
gradioApp().querySelector('#analyze_button').click(); | |
} | |
</script> | |
""" | |
full_html = table_html + js_code | |
return full_html, gr.update(visible=True, value="") # summary_output ์ด๊ธฐํ | |
def analyze_article(url): | |
summary = summarize_article(url) | |
return summary | |
article_url_input = gr.Textbox(visible=False, elem_id="article_url_input") | |
analyze_button = gr.Button("๋ถ์", visible=False, elem_id="analyze_button") | |
search_button.click( | |
search_and_display, | |
inputs=[query, country], | |
outputs=[output_table, summary_output] | |
) | |
analyze_button.click( | |
analyze_article, | |
inputs=[article_url_input], | |
outputs=[summary_output] | |
) | |
iface.launch(auth=("gini", "pick")) | |