Spaces:
Building
Building
import gradio as gr | |
import requests | |
import json | |
import os | |
from datetime import datetime, timedelta | |
from concurrent.futures import ThreadPoolExecutor, as_completed | |
from functools import lru_cache | |
from requests.adapters import HTTPAdapter | |
from requests.packages.urllib3.util.retry import Retry | |
from openai import OpenAI | |
from bs4 import BeautifulSoup | |
import re | |
import pathlib | |
import sqlite3 | |
import pytz | |
# ํ๊ตญ ๊ธฐ์ ๋ฆฌ์คํธ | |
KOREAN_COMPANIES = [ | |
"NVIDIA", | |
"ALPHABET", | |
"APPLE", | |
"TESLA", | |
"AMAZON", | |
"MICROSOFT", | |
"META", | |
"INTEL", | |
"SAMSUNG", | |
"HYNIX", | |
"BITCOIN", | |
"crypto", | |
"stock", | |
"Economics", | |
"Finance", | |
"investing" | |
] | |
###################################################################### | |
# ๊ณตํต ํจ์: ์๊ฐ ๋ณํ | |
###################################################################### | |
def convert_to_seoul_time(timestamp_str): | |
""" | |
์ฃผ์ด์ง 'YYYY-MM-DD HH:MM:SS' ํํ์ ์๊ฐ(UTC ๊ธฐ์ค ๋ฑ)์ | |
'YYYY-MM-DD HH:MM:SS KST' ํํ๋ก ๋ณํํ์ฌ ๋ฐํ. | |
""" | |
try: | |
dt = datetime.strptime(timestamp_str, '%Y-%m-%d %H:%M:%S') | |
seoul_tz = pytz.timezone('Asia/Seoul') | |
seoul_time = seoul_tz.localize(dt) | |
return seoul_time.strftime('%Y-%m-%d %H:%M:%S KST') | |
except Exception as e: | |
print(f"์๊ฐ ๋ณํ ์ค๋ฅ: {str(e)}") | |
return timestamp_str | |
###################################################################### | |
# ๊ณตํต ํจ์: ๊ฐ์ฑ ๋ถ์ | |
###################################################################### | |
def analyze_sentiment_batch(articles, client): | |
""" | |
OpenAI API๋ฅผ ํตํด, ๋ด์ค ๊ธฐ์ฌ๋ค์ ์ ๋ชฉ+๋ด์ฉ์ ์ข ํฉํ์ฌ ๊ฐ์ฑ ๋ถ์์ ์ํ. | |
- ๊ฒฐ๊ณผ๋ฅผ ํ๊ตญ์ด๋ก ์์ฑํ๋๋ก ํ๋กฌํํธ ๋ด์ ๋ช ์. | |
""" | |
try: | |
# ๊ธฐ์ฌ๋ค์ title/snippet ๊ฒฐํฉ | |
combined_text = "\n\n".join([ | |
f"์ ๋ชฉ: {article.get('title', '')}\n๋ด์ฉ: {article.get('snippet', '')}" | |
for article in articles | |
]) | |
# ํ๊ตญ์ด๋ก ์์ฑํ ๊ฒ์ ์ ๋ํ๋ ๋ฌธ๊ตฌ | |
prompt = f"""๋ค์ ๋ด์ค ๋ชจ์์ ๋ํด ์ ๋ฐ์ ์ธ ๊ฐ์ฑ ๋ถ์์ ์ํํ์ธ์. (ํ๊ตญ์ด๋ก ์์ฑํ์ธ์) | |
๋ด์ค ๋ด์ฉ: | |
{combined_text} | |
๋ค์ ํ์์ผ๋ก ๋ถ์ํด์ฃผ์ธ์: | |
1. ์ ๋ฐ์ ๊ฐ์ฑ: [๊ธ์ /๋ถ์ /์ค๋ฆฝ] | |
2. ์ฃผ์ ๊ธ์ ์ ์์: | |
- [ํญ๋ชฉ1] | |
- [ํญ๋ชฉ2] | |
3. ์ฃผ์ ๋ถ์ ์ ์์: | |
- [ํญ๋ชฉ1] | |
- [ํญ๋ชฉ2] | |
4. ์ข ํฉ ํ๊ฐ: [์์ธ ์ค๋ช ] | |
""" | |
response = client.chat.completions.create( | |
model="CohereForAI/c4ai-command-r-plus-08-2024", | |
messages=[{"role": "user", "content": prompt}], | |
temperature=0.3, | |
max_tokens=1000 | |
) | |
return response.choices[0].message.content | |
except Exception as e: | |
return f"๊ฐ์ฑ ๋ถ์ ์คํจ: {str(e)}" | |
###################################################################### | |
# DB ์ด๊ธฐํ ๋ฐ ์ ์ถ๋ ฅ | |
###################################################################### | |
def init_db(): | |
""" | |
SQLite DB ํ์ผ(search_results.db)์ด ์์ผ๋ฉด ์์ฑ, | |
'searches' ํ ์ด๋ธ์ด ์์ผ๋ฉด ์์ฑ | |
""" | |
db_path = pathlib.Path("search_results.db") | |
conn = sqlite3.connect(db_path) | |
c = conn.cursor() | |
c.execute('''CREATE TABLE IF NOT EXISTS searches | |
(id INTEGER PRIMARY KEY AUTOINCREMENT, | |
keyword TEXT, | |
country TEXT, | |
results TEXT, | |
timestamp DATETIME DEFAULT CURRENT_TIMESTAMP)''') | |
conn.commit() | |
conn.close() | |
def save_to_db(keyword, country, results): | |
""" | |
(keyword, country)์ ๋ํ ๊ฒฐ๊ณผ(JSON)๋ฅผ DB์ insert. | |
""" | |
conn = sqlite3.connect("search_results.db") | |
c = conn.cursor() | |
seoul_tz = pytz.timezone('Asia/Seoul') | |
now = datetime.now(seoul_tz) | |
timestamp = now.strftime('%Y-%m-%d %H:%M:%S') | |
c.execute("""INSERT INTO searches | |
(keyword, country, results, timestamp) | |
VALUES (?, ?, ?, ?)""", | |
(keyword, country, json.dumps(results), timestamp)) | |
conn.commit() | |
conn.close() | |
def load_from_db(keyword, country): | |
""" | |
DB์์ (keyword, country)์ ํด๋นํ๋ ๊ฐ์ฅ ์ต๊ทผ ๊ธฐ๋ก์ ๋ก๋ | |
- ์ฑ๊ณต์ (json.loads(...)๋ results, KST ์๊ฐ) | |
- ์คํจ์ (None, None) | |
""" | |
conn = sqlite3.connect("search_results.db") | |
c = conn.cursor() | |
c.execute("""SELECT results, timestamp | |
FROM searches | |
WHERE keyword=? AND country=? | |
ORDER BY timestamp DESC | |
LIMIT 1""", | |
(keyword, country)) | |
row = c.fetchone() | |
conn.close() | |
if row: | |
return json.loads(row[0]), convert_to_seoul_time(row[1]) | |
return None, None | |
###################################################################### | |
# SerpHouse API (๊ฒ์ ํจ์๋ค) | |
###################################################################### | |
API_KEY = os.getenv("SERPHOUSE_API_KEY") | |
def is_english(text): | |
""" | |
ํ ์คํธ๊ฐ ์ ๋ถ ASCII ๋ฒ์๋ฉด True, ์๋๋ฉด False | |
""" | |
return all(ord(char) < 128 for char in text.replace(' ', '').replace('-', '').replace('_', '')) | |
def translate_query(query, country): | |
""" | |
query๋ฅผ ํด๋น country ์ธ์ด๋ก ๋ฒ์ญ | |
""" | |
try: | |
# ์ด๋ฏธ ์์ด๋ฉด ๊ทธ๋ฅ ๋ฐํ | |
if is_english(query): | |
return query | |
if country in COUNTRY_LANGUAGES: | |
target_lang = COUNTRY_LANGUAGES[country] | |
url = "https://translate.googleapis.com/translate_a/single" | |
params = { | |
"client": "gtx", | |
"sl": "auto", | |
"tl": target_lang, | |
"dt": "t", | |
"q": query | |
} | |
session = requests.Session() | |
retries = Retry(total=3, backoff_factor=0.5) | |
session.mount('https://', HTTPAdapter(max_retries=retries)) | |
resp = session.get(url, params=params, timeout=(5, 10)) | |
translated_text = resp.json()[0][0][0] | |
return translated_text | |
return query | |
except Exception as e: | |
print(f"๋ฒ์ญ ์ค๋ฅ: {str(e)}") | |
return query | |
def translate_to_korean(text): | |
""" | |
snippet ๋ฑ์ ํ๊ธ๋ก ๋ฒ์ญํ๊ธฐ ์ํ ํจ์ | |
""" | |
try: | |
url = "https://translate.googleapis.com/translate_a/single" | |
params = { | |
"client": "gtx", | |
"sl": "auto", | |
"tl": "ko", | |
"dt": "t", | |
"q": text | |
} | |
session = requests.Session() | |
retries = Retry(total=3, backoff_factor=0.5) | |
session.mount('https://', HTTPAdapter(max_retries=retries)) | |
response = session.get(url, params=params, timeout=(5, 10)) | |
translated_text = response.json()[0][0][0] | |
return translated_text | |
except Exception as e: | |
print(f"ํ๊ธ ๋ฒ์ญ ์ค๋ฅ: {str(e)}") | |
return text | |
def search_serphouse(query, country, page=1, num_result=10): | |
""" | |
SerpHouse API ์ค์๊ฐ ๊ฒ์ -> 'news' (sort_by=date) | |
""" | |
url = "https://api.serphouse.com/serp/live" | |
now = datetime.utcnow() | |
yesterday = now - timedelta(days=1) | |
date_range = f"{yesterday.strftime('%Y-%m-%d')},{now.strftime('%Y-%m-%d')}" | |
translated_query = translate_query(query, country) | |
payload = { | |
"data": { | |
"q": translated_query, | |
"domain": "google.com", | |
"loc": COUNTRY_LOCATIONS.get(country, "United States"), | |
"lang": COUNTRY_LANGUAGES.get(country, "en"), | |
"device": "desktop", | |
"serp_type": "news", | |
"page": str(page), | |
"num": "100", | |
"date_range": date_range, | |
"sort_by": "date" | |
} | |
} | |
headers = { | |
"accept": "application/json", | |
"content-type": "application/json", | |
"authorization": f"Bearer {API_KEY}" | |
} | |
try: | |
session = requests.Session() | |
retries = Retry( | |
total=5, | |
backoff_factor=1, | |
status_forcelist=[429, 500, 502, 503, 504], | |
allowed_methods=["POST"] | |
) | |
adapter = HTTPAdapter(max_retries=retries) | |
session.mount('http://', adapter) | |
session.mount('https://', adapter) | |
resp = session.post(url, json=payload, headers=headers, timeout=(30, 30)) | |
resp.raise_for_status() | |
# ์๋ต JSON | |
return { | |
"results": resp.json(), | |
"translated_query": translated_query | |
} | |
except requests.exceptions.Timeout: | |
return { | |
"error": "๊ฒ์ ์๊ฐ์ด ์ด๊ณผ๋์์ต๋๋ค. ์ ์ ํ ๋ค์ ์๋ํด์ฃผ์ธ์.", | |
"translated_query": query | |
} | |
except requests.exceptions.RequestException as e: | |
return { | |
"error": f"๊ฒ์ ์ค ์ค๋ฅ๊ฐ ๋ฐ์ํ์ต๋๋ค: {str(e)}", | |
"translated_query": query | |
} | |
except Exception as e: | |
return { | |
"error": f"์๊ธฐ์น ์์ ์ค๋ฅ๊ฐ ๋ฐ์ํ์ต๋๋ค: {str(e)}", | |
"translated_query": query | |
} | |
def format_results_from_raw(response_data): | |
""" | |
SerpHouse API ์๋ต์ (error_message, articles_list) ํํ๋ก ๊ฐ๊ณต | |
- ํ๊ตญ ๋๋ฉ์ธ(kr, korea, etc) ์ ์ธ | |
- empty์ "๊ฒ์ ๊ฒฐ๊ณผ๊ฐ ์์ต๋๋ค." | |
""" | |
if "error" in response_data: | |
return "Error: " + response_data["error"], [] | |
try: | |
results = response_data["results"] | |
translated_query = response_data["translated_query"] | |
# ์ค์ ๋ด์ค ํญ ๊ฒฐ๊ณผ | |
news_results = results.get('results', {}).get('results', {}).get('news', []) | |
if not news_results: | |
return "๊ฒ์ ๊ฒฐ๊ณผ๊ฐ ์์ต๋๋ค.", [] | |
# ํ๊ตญ์ด ์ ์ธ | |
korean_domains = [ | |
'.kr', 'korea', 'korean', 'yonhap', 'hankyung', 'chosun', | |
'donga', 'joins', 'hani', 'koreatimes', 'koreaherald' | |
] | |
korean_keywords = [ | |
'korea', 'korean', 'seoul', 'busan', 'incheon', 'daegu', | |
'gwangju', 'daejeon', 'ulsan', 'sejong' | |
] | |
filtered_articles = [] | |
for idx, result in enumerate(news_results, 1): | |
url = result.get("url", result.get("link", "")).lower() | |
title = result.get("title", "").lower() | |
channel = result.get("channel", result.get("source", "")).lower() | |
is_korean_content = ( | |
any(domain in url or domain in channel for domain in korean_domains) | |
or any(keyword in title for keyword in korean_keywords) | |
) | |
if not is_korean_content: | |
filtered_articles.append({ | |
"index": idx, | |
"title": result.get("title", "์ ๋ชฉ ์์"), | |
"link": url, | |
"snippet": result.get("snippet", "๋ด์ฉ ์์"), | |
"channel": result.get("channel", result.get("source", "์ ์ ์์")), | |
"time": result.get("time", result.get("date", "์ ์ ์๋ ์๊ฐ")), | |
"image_url": result.get("img", result.get("thumbnail", "")), | |
"translated_query": translated_query | |
}) | |
return "", filtered_articles | |
except Exception as e: | |
return f"๊ฒฐ๊ณผ ์ฒ๋ฆฌ ์ค ์ค๋ฅ ๋ฐ์: {str(e)}", [] | |
def serphouse_search(query, country): | |
""" | |
์ ์ฒด ํ์ดํ๋ผ์ธ (search_serphouse -> format_results_from_raw) | |
๋ฐํ: (error_message, articles_list) | |
""" | |
response_data = search_serphouse(query, country) | |
return format_results_from_raw(response_data) | |
###################################################################### | |
# ๋ด์ค ๊ธฐ์ฌ ๋ชฉ๋ก -> Markdown | |
###################################################################### | |
def display_results(articles): | |
""" | |
๊ธฐ์ฌ ๋ชฉ๋ก์ Markdown ๋ฌธ์์ด๋ก ๋ณํ | |
- snippet(์๋ฌธ) + ํ๊ธ ๋ฒ์ญ(snippet)์ ํจ๊ป ํ์ | |
""" | |
output = "" | |
for idx, article in enumerate(articles, 1): | |
# snippet์ ํ๊ตญ์ด๋ก ๋ฒ์ญ | |
korean_snippet = translate_to_korean(article['snippet']) | |
output += f"### {idx}. {article['title']}\n" | |
output += f"์ถ์ฒ: {article['channel']}\n" | |
output += f"์๊ฐ: {article['time']}\n" | |
output += f"๋งํฌ: {article['link']}\n" | |
output += f"์์ฝ(์๋ฌธ): {article['snippet']}\n" | |
output += f"์์ฝ(ํ๊ตญ์ด): {korean_snippet}\n\n" | |
return output | |
###################################################################### | |
# ํ๊ตญ ๊ธฐ์ ๋ชฉ๋ก (์ด๋ฏธ ์ ์ธ๋จ) | |
###################################################################### | |
###################################################################### | |
# ๊ฒ์/์ถ๋ ฅ ํจ์ | |
###################################################################### | |
def search_company(company): | |
""" | |
๋ฏธ๊ตญ(United States) ๋ด์ค ๊ฒ์ -> ๊ฐ์ฑ๋ถ์(ํ๊ตญ์ด) -> DB์ ์ฅ -> Markdown ๋ฐํ | |
""" | |
error_message, articles = serphouse_search(company, "United States") | |
if not error_message and articles: | |
analysis = analyze_sentiment_batch(articles, client) | |
data_to_store = { | |
"articles": articles, | |
"analysis": analysis | |
} | |
save_to_db(company, "United States", data_to_store) | |
out = display_results(articles) | |
out += f"\n\n### ๋ถ์ ๋ณด๊ณ \n{analysis}\n" | |
return out | |
else: | |
if error_message: | |
return error_message | |
return f"{company}์ ๋ํ ๊ฒ์ ๊ฒฐ๊ณผ๊ฐ ์์ต๋๋ค." | |
def load_company(company): | |
""" | |
DB์์ (company, United States) ๊ฒ์ ๊ฒฐ๊ณผ๋ฅผ ๋ถ๋ฌ์ ๊ธฐ์ฌ+๋ถ์ ์ถ๋ ฅ | |
""" | |
loaded, ts = load_from_db(company, "United States") | |
if loaded: | |
articles = loaded.get("articles", []) | |
analysis = loaded.get("analysis", "") | |
out = f"### {company} ๊ฒ์ ๊ฒฐ๊ณผ\n์ ์ฅ ์๊ฐ: {ts}\n\n" | |
out += display_results(articles) | |
out += f"\n\n### ๋ถ์ ๋ณด๊ณ \n{analysis}\n" | |
return out | |
return f"{company}์ ๋ํ ์ ์ฅ๋ ๊ฒฐ๊ณผ๊ฐ ์์ต๋๋ค." | |
###################################################################### | |
# ํต๊ณ (EarnBOT ๋ถ์ ๋ฆฌํฌํธ) | |
###################################################################### | |
def show_stats(): | |
""" | |
KOREAN_COMPANIES ๋ด ๋ชจ๋ ๊ธฐ์ ์ ๊ฐ์ฅ ์ต๊ทผ DB ๊ฒฐ๊ณผ -> ๊ธฐ์ฌ์, ๋ถ์, timestamp | |
""" | |
conn = sqlite3.connect("search_results.db") | |
c = conn.cursor() | |
output = "## EarnBOT ๋ถ์ ๋ฆฌํฌํธ\n\n" | |
# DB์์ ๊ฐ ๊ธฐ์ ์ ์ต์ ์ ์ฅ ๊ธฐ๋ก | |
data_list = [] | |
for comp in KOREAN_COMPANIES: | |
c.execute(""" | |
SELECT results, timestamp | |
FROM searches | |
WHERE keyword=? | |
ORDER BY timestamp DESC | |
LIMIT 1 | |
""", (comp,)) | |
row = c.fetchone() | |
if row: | |
results_json, ts = row | |
data_list.append((comp, ts, results_json)) | |
conn.close() | |
def analyze_data(item): | |
comp, tstamp, json_str = item | |
data_obj = json.loads(json_str) | |
articles = data_obj.get("articles", []) | |
analysis = data_obj.get("analysis", "") | |
count_articles = len(articles) | |
return (comp, tstamp, count_articles, analysis) | |
results_list = [] | |
with ThreadPoolExecutor(max_workers=5) as executor: | |
futures = [executor.submit(analyze_data, dl) for dl in data_list] | |
for future in as_completed(futures): | |
results_list.append(future.result()) | |
for comp, tstamp, count_articles, analysis in results_list: | |
kst_time = convert_to_seoul_time(tstamp) | |
output += f"### {comp}\n" | |
output += f"- ๋ง์ง๋ง ์ ๋ฐ์ดํธ: {kst_time}\n" | |
output += f"- ์ ์ฅ๋ ๊ธฐ์ฌ ์: {count_articles}๊ฑด\n\n" | |
if analysis: | |
output += "#### ๋ด์ค ๊ฐ์ฑ ๋ถ์\n" | |
output += f"{analysis}\n\n" | |
output += "---\n\n" | |
return output | |
###################################################################### | |
# ์ ์ฒด ๊ฒ์+์ถ๋ ฅ+๋ถ์ ์ข ํฉ | |
###################################################################### | |
def search_all_companies(): | |
""" | |
๋ชจ๋ ๊ธฐ์ ๋ณ๋ ฌ ๊ฒ์+๋ถ์ -> DB ์ ์ฅ -> Markdown ์ถ๋ ฅ | |
""" | |
overall = "# [์ ์ฒด ๊ฒ์ ๊ฒฐ๊ณผ]\n\n" | |
def do_search(comp): | |
return comp, search_company(comp) | |
with ThreadPoolExecutor(max_workers=5) as executor: | |
futures = [executor.submit(do_search, c) for c in KOREAN_COMPANIES] | |
for future in as_completed(futures): | |
comp, res = future.result() | |
overall += f"## {comp}\n" | |
overall += res + "\n\n" | |
return overall | |
def load_all_companies(): | |
""" | |
๋ชจ๋ ๊ธฐ์ DB ๋ก๋ -> ๊ธฐ์ฌ+๋ถ์ | |
""" | |
overall = "# [์ ์ฒด ์ถ๋ ฅ ๊ฒฐ๊ณผ]\n\n" | |
for comp in KOREAN_COMPANIES: | |
overall += f"## {comp}\n" | |
overall += load_company(comp) | |
overall += "\n" | |
return overall | |
def full_summary_report(): | |
""" | |
1) ์ ์ฒด ๊ฒ์+๋ถ์ => DB | |
2) ์ ์ฒด DB ๋ก๋ | |
3) ๊ฐ์ฑ ๋ถ์ ํต๊ณ | |
""" | |
search_text = search_all_companies() | |
load_text = load_all_companies() | |
stats_text = show_stats() | |
combined = ( | |
"# ์ ์ฒด ๋ถ์ ๋ณด๊ณ ์์ฝ\n\n" | |
"์๋ ์์๋ก ์คํ๋์์ต๋๋ค:\n" | |
"1. ๋ชจ๋ ์ข ๋ชฉ ๊ฒ์(๋ณ๋ ฌ) + ๋ถ์ => 2. ๋ชจ๋ ์ข ๋ชฉ DB ๊ฒฐ๊ณผ ์ถ๋ ฅ => 3. ์ ์ฒด ๊ฐ์ฑ ๋ถ์ ํต๊ณ\n\n" | |
f"{search_text}\n\n" | |
f"{load_text}\n\n" | |
"## [์ ์ฒด ๊ฐ์ฑ ๋ถ์ ํต๊ณ]\n\n" | |
f"{stats_text}" | |
) | |
return combined | |
###################################################################### | |
# ์ฌ์ฉ์ ์์ ๊ฒ์ | |
###################################################################### | |
def search_custom(query, country): | |
""" | |
1) query & country -> ๊ฒ์+๋ถ์ | |
2) DB ์ ์ฅ | |
3) DB ์ฌ๋ก๋ -> ๊ธฐ์ฌ+๋ถ์ ์ถ๋ ฅ | |
""" | |
error_message, articles = serphouse_search(query, country) | |
if error_message: | |
return f"์ค๋ฅ ๋ฐ์: {error_message}" | |
if not articles: | |
return "๊ฒ์ ๊ฒฐ๊ณผ๊ฐ ์์ต๋๋ค." | |
analysis = analyze_sentiment_batch(articles, client) | |
store = { | |
"articles": articles, | |
"analysis": analysis | |
} | |
save_to_db(query, country, store) | |
loaded, ts = load_from_db(query, country) | |
if not loaded: | |
return "DB์์ ๋ก๋ ์คํจ" | |
arts = loaded.get("articles", []) | |
analy = loaded.get("analysis", "") | |
out = f"## [์ฌ์ฉ์ ์์ ๊ฒ์ ๊ฒฐ๊ณผ]\n\n" | |
out += f"**ํค์๋**: {query}\n\n" | |
out += f"**๊ตญ๊ฐ**: {country}\n\n" | |
out += f"**์ ์ฅ ์๊ฐ**: {ts}\n\n" | |
out += display_results(arts) | |
out += f"### ๋ด์ค ๊ฐ์ฑ ๋ถ์\n{analy}\n" | |
return out | |
###################################################################### | |
# Hugging Face openai Client | |
###################################################################### | |
ACCESS_TOKEN = os.getenv("HF_TOKEN") | |
if not ACCESS_TOKEN: | |
raise ValueError("HF_TOKEN environment variable is not set") | |
client = OpenAI( | |
base_url="https://api-inference.huggingface.co/v1/", | |
api_key=ACCESS_TOKEN, | |
) | |
###################################################################### | |
# ๊ตญ๊ฐ ์ค์ | |
###################################################################### | |
COUNTRY_LANGUAGES = { | |
"United States": "en", | |
"KOREA": "ko", | |
"United Kingdom": "en", | |
"Taiwan": "zh-TW", | |
"Canada": "en", | |
"Australia": "en", | |
"Germany": "de", | |
"France": "fr", | |
"Japan": "ja", | |
"China": "zh", | |
"India": "hi", | |
"Brazil": "pt", | |
"Mexico": "es", | |
"Russia": "ru", | |
"Italy": "it", | |
"Spain": "es", | |
"Netherlands": "nl", | |
"Singapore": "en", | |
"Hong Kong": "zh-HK", | |
"Indonesia": "id", | |
"Malaysia": "ms", | |
"Philippines": "tl", | |
"Thailand": "th", | |
"Vietnam": "vi", | |
"Belgium": "nl", | |
"Denmark": "da", | |
"Finland": "fi", | |
"Ireland": "en", | |
"Norway": "no", | |
"Poland": "pl", | |
"Sweden": "sv", | |
"Switzerland": "de", | |
"Austria": "de", | |
"Czech Republic": "cs", | |
"Greece": "el", | |
"Hungary": "hu", | |
"Portugal": "pt", | |
"Romania": "ro", | |
"Turkey": "tr", | |
"Israel": "he", | |
"Saudi Arabia": "ar", | |
"United Arab Emirates": "ar", | |
"South Africa": "en", | |
"Argentina": "es", | |
"Chile": "es", | |
"Colombia": "es", | |
"Peru": "es", | |
"Venezuela": "es", | |
"New Zealand": "en", | |
"Bangladesh": "bn", | |
"Pakistan": "ur", | |
"Egypt": "ar", | |
"Morocco": "ar", | |
"Nigeria": "en", | |
"Kenya": "sw", | |
"Ukraine": "uk", | |
"Croatia": "hr", | |
"Slovakia": "sk", | |
"Bulgaria": "bg", | |
"Serbia": "sr", | |
"Estonia": "et", | |
"Latvia": "lv", | |
"Lithuania": "lt", | |
"Slovenia": "sl", | |
"Luxembourg": "Luxembourg", | |
"Malta": "Malta", | |
"Cyprus": "Cyprus", | |
"Iceland": "Iceland" | |
} | |
COUNTRY_LOCATIONS = { | |
"United States": "United States", | |
"KOREA": "kr", | |
"United Kingdom": "United Kingdom", | |
"Taiwan": "Taiwan", | |
"Canada": "Canada", | |
"Australia": "Australia", | |
"Germany": "Germany", | |
"France": "France", | |
"Japan": "Japan", | |
"China": "China", | |
"India": "India", | |
"Brazil": "Brazil", | |
"Mexico": "Mexico", | |
"Russia": "Russia", | |
"Italy": "Italy", | |
"Spain": "Spain", | |
"Netherlands": "Netherlands", | |
"Singapore": "Singapore", | |
"Hong Kong": "Hong Kong", | |
"Indonesia": "Indonesia", | |
"Malaysia": "Malaysia", | |
"Philippines": "Philippines", | |
"Thailand": "Thailand", | |
"Vietnam": "Vietnam", | |
"Belgium": "Belgium", | |
"Denmark": "Denmark", | |
"Finland": "Finland", | |
"Ireland": "Ireland", | |
"Norway": "Norway", | |
"Poland": "Poland", | |
"Sweden": "Sweden", | |
"Switzerland": "Switzerland", | |
"Austria": "Austria", | |
"Czech Republic": "Czech Republic", | |
"Greece": "Greece", | |
"Hungary": "Hungary", | |
"Portugal": "Portugal", | |
"Romania": "Romania", | |
"Turkey": "Turkey", | |
"Israel": "Israel", | |
"Saudi Arabia": "Saudi Arabia", | |
"United Arab Emirates": "United Arab Emirates", | |
"South Africa": "South Africa", | |
"Argentina": "Argentina", | |
"Chile": "Chile", | |
"Colombia": "Colombia", | |
"Peru": "Peru", | |
"Venezuela": "Venezuela", | |
"New Zealand": "New Zealand", | |
"Bangladesh": "Bangladesh", | |
"Pakistan": "Pakistan", | |
"Egypt": "Egypt", | |
"Morocco": "Morocco", | |
"Nigeria": "Nigeria", | |
"Kenya": "Kenya", | |
"Ukraine": "Ukraine", | |
"Croatia": "Croatia", | |
"Slovakia": "Slovakia", | |
"Bulgaria": "Bulgaria", | |
"Serbia": "Serbia", | |
"Estonia": "et", | |
"Latvia": "lv", | |
"Lithuania": "lt", | |
"Slovenia": "sl", | |
"Luxembourg": "Luxembourg", | |
"Malta": "Malta", | |
"Cyprus": "Cyprus", | |
"Iceland": "Iceland" | |
} | |
css = """ | |
/* ์ ์ญ ์คํ์ผ */ | |
footer {visibility: hidden;} | |
/* ๋ ์ด์์ ์คํ์ผ, ํญ ์คํ์ผ, ๋ฑ๋ฑ */ | |
#status_area { | |
background: rgba(255, 255, 255, 0.9); | |
padding: 15px; | |
border-bottom: 1px solid #ddd; | |
margin-bottom: 20px; | |
box-shadow: 0 2px 5px rgba(0,0,0,0.1); | |
} | |
#results_area { | |
padding: 10px; | |
margin-top: 10px; | |
} | |
.tabs { | |
border-bottom: 2px solid #ddd !important; | |
margin-bottom: 20px !important; | |
} | |
.tab-nav { | |
border-bottom: none !important; | |
margin-bottom: 0 !important; | |
} | |
.tab-nav button { | |
font-weight: bold !important; | |
padding: 10px 20px !important; | |
} | |
.tab-nav button.selected { | |
border-bottom: 2px solid #1f77b4 !important; | |
color: #1f77b4 !important; | |
} | |
/* ์ํ ๋ฉ์์ง */ | |
#status_area .markdown-text { | |
font-size: 1.1em; | |
color: #2c3e50; | |
padding: 10px 0; | |
} | |
.group { | |
border: 1px solid #eee; | |
padding: 15px; | |
margin-bottom: 15px; | |
border-radius: 5px; | |
background: white; | |
} | |
/* ๋ฒํผ ์คํ์ผ */ | |
.primary-btn { | |
background: #1f77b4 !important; | |
border: none !important; | |
} | |
/* ๊ธฐํ ... */ | |
""" | |
import gradio as gr | |
with gr.Blocks(theme="Yntec/HaleyCH_Theme_Orange", css=css, title="NewsAI ์๋น์ค") as iface: | |
# DB ์ด๊ธฐํ | |
init_db() | |
with gr.Tabs(): | |
# ์ฒซ ๋ฒ์งธ ํญ | |
with gr.Tab("Earnbot"): | |
gr.Markdown("## EarnBot: ๊ธ๋ก๋ฒ ๋น ํ ํฌ ๊ธฐ์ ๋ฐ ํฌ์ ์ ๋ง AI ์๋ ๋ถ์") | |
gr.Markdown(" * '์ ์ฒด ๋ถ์ ๋ณด๊ณ ์์ฝ' ํด๋ฆญ ์ ์ ์ฒด ์๋ ๋ณด๊ณ ์์ฑ.\n * ์๋ ๊ฐ๋ณ ์ข ๋ชฉ์ '๊ฒ์(DB ์๋ ์ ์ฅ)'๊ณผ '์ถ๋ ฅ(DB ์๋ ํธ์ถ)'๋ ๊ฐ๋ฅ.\n * ์ถ๊ฐ๋ก, ์ํ๋ ์์ ํค์๋ ๋ฐ ๊ตญ๊ฐ๋ก ๊ฒ์/๋ถ์ํ ์๋ ์์ต๋๋ค.") | |
# ์ฌ์ฉ์ ์์ ๊ฒ์ ์น์ | |
with gr.Group(): | |
gr.Markdown("### ์ฌ์ฉ์ ์์ ๊ฒ์") | |
with gr.Row(): | |
with gr.Column(): | |
user_input = gr.Textbox( | |
label="๊ฒ์์ด ์ ๋ ฅ", | |
placeholder="์) Apple, Samsung ๋ฑ ์์ ๋กญ๊ฒ" | |
) | |
with gr.Column(): | |
country_selection = gr.Dropdown( | |
choices=list(COUNTRY_LOCATIONS.keys()), | |
value="United States", | |
label="๊ตญ๊ฐ ์ ํ" | |
) | |
with gr.Column(): | |
custom_search_btn = gr.Button("์คํ", variant="primary") | |
custom_search_output = gr.Markdown() | |
# ์์ ๊ฒ์ ๋ฒํผ ํด๋ฆญ | |
custom_search_btn.click( | |
fn=search_custom, | |
inputs=[user_input, country_selection], | |
outputs=custom_search_output | |
) | |
# ์ ์ฒด ๋ถ์ ๋ณด๊ณ ๋ฒํผ | |
with gr.Row(): | |
full_report_btn = gr.Button("์ ์ฒด ๋ถ์ ๋ณด๊ณ ์์ฝ", variant="primary") | |
full_report_display = gr.Markdown() | |
# ์ ์ฒด ๋ณด๊ณ -> full_summary_report | |
full_report_btn.click( | |
fn=full_summary_report, | |
outputs=full_report_display | |
) | |
# ์ง์ ๋ ๊ธฐ์ ๋ชฉ๋ก: ๊ฒ์ / ์ถ๋ ฅ | |
with gr.Column(): | |
for i in range(0, len(KOREAN_COMPANIES), 2): | |
with gr.Row(): | |
# ์ผ์ชฝ | |
with gr.Column(): | |
company = KOREAN_COMPANIES[i] | |
with gr.Group(): | |
gr.Markdown(f"### {company}") | |
with gr.Row(): | |
search_btn = gr.Button("๊ฒ์", variant="primary") | |
load_btn = gr.Button("์ถ๋ ฅ", variant="secondary") | |
result_display = gr.Markdown() | |
# ๊ฒ์ | |
search_btn.click( | |
fn=lambda c=company: search_company(c), | |
outputs=result_display | |
) | |
# ์ถ๋ ฅ | |
load_btn.click( | |
fn=lambda c=company: load_company(c), | |
outputs=result_display | |
) | |
# ์ค๋ฅธ์ชฝ | |
if i + 1 < len(KOREAN_COMPANIES): | |
with gr.Column(): | |
company = KOREAN_COMPANIES[i + 1] | |
with gr.Group(): | |
gr.Markdown(f"### {company}") | |
with gr.Row(): | |
search_btn = gr.Button("๊ฒ์", variant="primary") | |
load_btn = gr.Button("์ถ๋ ฅ", variant="secondary") | |
result_display = gr.Markdown() | |
search_btn.click( | |
fn=lambda c=company: search_company(c), | |
outputs=result_display | |
) | |
load_btn.click( | |
fn=lambda c=company: load_company(c), | |
outputs=result_display | |
) | |
iface.launch( | |
server_name="0.0.0.0", | |
server_port=7860, | |
share=True, | |
ssl_verify=False, | |
show_error=True | |
) | |