ginipick commited on
Commit
3f93809
ยท
verified ยท
1 Parent(s): a899d7b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -59
app.py CHANGED
@@ -36,21 +36,14 @@ KOREAN_COMPANIES = [
36
 
37
  def convert_to_seoul_time(timestamp_str):
38
  try:
39
- # ์ž…๋ ฅ๋œ ์‹œ๊ฐ„์„ naive datetime ๊ฐ์ฒด๋กœ ๋ณ€ํ™˜
40
  dt = datetime.strptime(timestamp_str, '%Y-%m-%d %H:%M:%S')
41
-
42
- # ์„œ์šธ ์‹œ๊ฐ„๋Œ€ ์„ค์ •
43
  seoul_tz = pytz.timezone('Asia/Seoul')
44
-
45
- # ํ˜„์žฌ ์‹œ๊ฐ„์„ ์„œ์šธ ์‹œ๊ฐ„์œผ๋กœ ์ธ์‹ํ•˜๋„๋ก ์ˆ˜์ •
46
  seoul_time = seoul_tz.localize(dt)
47
-
48
  return seoul_time.strftime('%Y-%m-%d %H:%M:%S KST')
49
  except Exception as e:
50
  print(f"์‹œ๊ฐ„ ๋ณ€ํ™˜ ์˜ค๋ฅ˜: {str(e)}")
51
  return timestamp_str
52
 
53
-
54
  def analyze_sentiment_batch(articles, client):
55
  try:
56
  # ๋ชจ๋“  ๊ธฐ์‚ฌ์˜ ์ œ๋ชฉ๊ณผ ๋‚ด์šฉ์„ ํ•˜๋‚˜์˜ ํ…์ŠคํŠธ๋กœ ๊ฒฐํ•ฉ
@@ -58,8 +51,9 @@ def analyze_sentiment_batch(articles, client):
58
  f"์ œ๋ชฉ: {article.get('title', '')}\n๋‚ด์šฉ: {article.get('snippet', '')}"
59
  for article in articles
60
  ])
61
-
62
- prompt = f\"""๋‹ค์Œ ๋‰ด์Šค ๋ชจ์Œ์— ๋Œ€ํ•ด ์ „๋ฐ˜์ ์ธ ๊ฐ์„ฑ ๋ถ„์„์„ ์ˆ˜ํ–‰ํ•˜์„ธ์š”:
 
63
 
64
  ๋‰ด์Šค ๋‚ด์šฉ:
65
  {combined_text}
@@ -74,7 +68,7 @@ def analyze_sentiment_batch(articles, client):
74
  - [ํ•ญ๋ชฉ2]
75
  4. ์ข…ํ•ฉ ํ‰๊ฐ€: [์ƒ์„ธ ์„ค๋ช…]
76
  """
77
-
78
  response = client.chat.completions.create(
79
  model="CohereForAI/c4ai-command-r-plus-08-2024",
80
  messages=[{"role": "user", "content": prompt}],
@@ -101,27 +95,20 @@ def init_db():
101
  conn.commit()
102
  conn.close()
103
 
104
-
105
  def save_to_db(keyword, country, results):
106
  conn = sqlite3.connect("search_results.db")
107
  c = conn.cursor()
108
-
109
- # ํ˜„์žฌ ์‹œ๊ฐ„์„ ์„œ์šธ ์‹œ๊ฐ„์œผ๋กœ ๊ฐ€์ ธ์˜ค๊ธฐ
110
  seoul_tz = pytz.timezone('Asia/Seoul')
111
  now = datetime.now(seoul_tz)
112
-
113
- # ์‹œ๊ฐ„๋Œ€ ์ •๋ณด๋ฅผ ์ œ๊ฑฐํ•˜๊ณ  ์ €์žฅ
114
  timestamp = now.strftime('%Y-%m-%d %H:%M:%S')
115
 
116
  c.execute("""INSERT INTO searches
117
  (keyword, country, results, timestamp)
118
  VALUES (?, ?, ?, ?)""",
119
  (keyword, country, json.dumps(results), timestamp))
120
-
121
  conn.commit()
122
  conn.close()
123
 
124
-
125
  def load_from_db(keyword, country):
126
  conn = sqlite3.connect("search_results.db")
127
  c = conn.cursor()
@@ -133,7 +120,6 @@ def load_from_db(keyword, country):
133
  return json.loads(result[0]), convert_to_seoul_time(result[1])
134
  return None, None
135
 
136
-
137
  def display_results(articles):
138
  output = ""
139
  for idx, article in enumerate(articles, 1):
@@ -144,7 +130,6 @@ def display_results(articles):
144
  output += f"์š”์•ฝ: {article['snippet']}\n\n"
145
  return output
146
 
147
-
148
  def search_company(company):
149
  error_message, articles = serphouse_search(company, "United States")
150
  if not error_message and articles:
@@ -152,14 +137,12 @@ def search_company(company):
152
  return display_results(articles)
153
  return f"{company}์— ๋Œ€ํ•œ ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ๊ฐ€ ์—†์Šต๋‹ˆ๋‹ค."
154
 
155
-
156
  def load_company(company):
157
  results, timestamp = load_from_db(company, "United States")
158
  if results:
159
  return f"### {company} ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ\n์ €์žฅ ์‹œ๊ฐ„: {timestamp}\n\n" + display_results(results)
160
  return f"{company}์— ๋Œ€ํ•œ ์ €์žฅ๋œ ๊ฒฐ๊ณผ๊ฐ€ ์—†์Šต๋‹ˆ๋‹ค."
161
 
162
-
163
  def show_stats():
164
  conn = sqlite3.connect("search_results.db")
165
  c = conn.cursor()
@@ -186,7 +169,6 @@ def show_stats():
186
  output += f"- ์ €์žฅ๋œ ๊ธฐ์‚ฌ ์ˆ˜: {len(articles)}๊ฑด\n\n"
187
 
188
  if articles:
189
- # ์ „์ฒด ๊ธฐ์‚ฌ์— ๋Œ€ํ•œ ๊ฐ์„ฑ ๋ถ„์„
190
  sentiment_analysis = analyze_sentiment_batch(articles, client)
191
  output += "#### ๋‰ด์Šค ๊ฐ์„ฑ ๋ถ„์„\n"
192
  output += f"{sentiment_analysis}\n\n"
@@ -196,11 +178,7 @@ def show_stats():
196
  conn.close()
197
  return output
198
 
199
-
200
  def search_all_companies():
201
- """
202
- ๋“ฑ๋ก๋œ ๋ชจ๋“  ๊ธฐ์—…์— ๋Œ€ํ•ด ์ˆœ์ฐจ์ ์œผ๋กœ ๊ฒ€์ƒ‰ ์ˆ˜ํ–‰ ํ›„ ๊ฒฐ๊ณผ๋ฅผ ํ•ฉ์ณ์„œ ๋ฐ˜ํ™˜
203
- """
204
  overall_result = "# [์ „์ฒด ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ]\n\n"
205
  for comp in KOREAN_COMPANIES:
206
  overall_result += f"## {comp}\n"
@@ -209,9 +187,6 @@ def search_all_companies():
209
  return overall_result
210
 
211
  def load_all_companies():
212
- """
213
- ๋“ฑ๋ก๋œ ๋ชจ๋“  ๊ธฐ์—…์— ๋Œ€ํ•ด DB์— ์ €์žฅ๋œ ๊ฒฐ๊ณผ๋ฅผ ์ˆœ์ฐจ์ ์œผ๋กœ ๋ถˆ๋Ÿฌ์™€์„œ ํ•ฉ์ณ์„œ ๋ฐ˜ํ™˜
214
- """
215
  overall_result = "# [์ „์ฒด ์ถœ๋ ฅ ๊ฒฐ๊ณผ]\n\n"
216
  for comp in KOREAN_COMPANIES:
217
  overall_result += f"## {comp}\n"
@@ -220,21 +195,13 @@ def load_all_companies():
220
  return overall_result
221
 
222
  def full_summary_report():
223
- """
224
- '์ „์ฒด ๋ถ„์„ ๋ณด๊ณ  ์š”์•ฝ' ๋ฒ„ํŠผ ํด๋ฆญ ์‹œ,
225
- 1) ์ „์ฒด ๊ฒ€์ƒ‰ -> 2) ์ „์ฒด ์ถœ๋ ฅ -> 3) ์ „์ฒด ํ†ต๊ณ„(๊ฐ์„ฑ ๋ถ„์„)
226
- ์ˆœ์œผ๋กœ ์‹คํ–‰ํ•˜๊ณ  ๊ทธ ๊ฒฐ๊ณผ๋ฅผ ํ•œ ๋ฒˆ์— ๋ฆฌํ„ด
227
- """
228
  # 1) ์ „์ฒด ๊ฒ€์ƒ‰
229
  search_result_text = search_all_companies()
230
-
231
  # 2) ์ „์ฒด ์ถœ๋ ฅ
232
  load_result_text = load_all_companies()
233
-
234
  # 3) ์ „์ฒด ํ†ต๊ณ„
235
  stats_text = show_stats()
236
 
237
- # ์ตœ์ข… ๋ณด๊ณ ์„œ ํ˜•ํƒœ๋กœ ํ•ฉ์ณ์„œ ๋ฐ˜ํ™˜
238
  combined_report = (
239
  "# ์ „์ฒด ๋ถ„์„ ๋ณด๊ณ  ์š”์•ฝ\n\n"
240
  "์•„๋ž˜ ์ˆœ์„œ๋กœ ์‹คํ–‰๋˜์—ˆ์Šต๋‹ˆ๋‹ค:\n"
@@ -246,7 +213,6 @@ def full_summary_report():
246
  )
247
  return combined_report
248
 
249
-
250
  ACCESS_TOKEN = os.getenv("HF_TOKEN")
251
  if not ACCESS_TOKEN:
252
  raise ValueError("HF_TOKEN environment variable is not set")
@@ -258,8 +224,6 @@ client = OpenAI(
258
 
259
  API_KEY = os.getenv("SERPHOUSE_API_KEY")
260
 
261
-
262
- # ๊ตญ๊ฐ€๋ณ„ ์–ธ์–ด ์ฝ”๋“œ ๋งคํ•‘ (์ฒซ ๋ฒˆ์งธ ํƒญ์—์„œ๋Š” 'United States'๋งŒ ์ฃผ๋กœ ์‚ฌ์šฉ)
263
  COUNTRY_LANGUAGES = {
264
  "United States": "en",
265
  "KOREA": "ko",
@@ -325,10 +289,10 @@ COUNTRY_LANGUAGES = {
325
  "Latvia": "lv",
326
  "Lithuania": "lt",
327
  "Slovenia": "sl",
328
- "Luxembourg": "fr",
329
- "Malta": "mt",
330
- "Cyprus": "el",
331
- "Iceland": "is"
332
  }
333
 
334
  COUNTRY_LOCATIONS = {
@@ -402,17 +366,15 @@ COUNTRY_LOCATIONS = {
402
  "Iceland": "Iceland"
403
  }
404
 
405
-
406
  @lru_cache(maxsize=100)
407
  def translate_query(query, country):
408
  try:
409
  if is_english(query):
410
  return query
411
-
412
  if country in COUNTRY_LANGUAGES:
413
  if country == "South Korea":
414
  return query
415
-
416
  target_lang = COUNTRY_LANGUAGES[country]
417
 
418
  url = "https://translate.googleapis.com/translate_a/single"
@@ -438,11 +400,9 @@ def translate_query(query, country):
438
  print(f"๋ฒˆ์—ญ ์˜ค๋ฅ˜: {str(e)}")
439
  return query
440
 
441
-
442
  def is_english(text):
443
  return all(ord(char) < 128 for char in text.replace(' ', '').replace('-', '').replace('_', ''))
444
 
445
-
446
  def search_serphouse(query, country, page=1, num_result=10):
447
  url = "https://api.serphouse.com/serp/live"
448
 
@@ -513,7 +473,6 @@ def search_serphouse(query, country, page=1, num_result=10):
513
  "translated_query": query
514
  }
515
 
516
-
517
  def format_results_from_raw(response_data):
518
  if "error" in response_data:
519
  return "Error: " + response_data["error"], []
@@ -525,8 +484,7 @@ def format_results_from_raw(response_data):
525
  news_results = results.get('results', {}).get('results', {}).get('news', [])
526
  if not news_results:
527
  return "๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ๊ฐ€ ์—†์Šต๋‹ˆ๋‹ค.", []
528
-
529
- # ํ•œ๊ตญ ๋„๋ฉ”์ธ ๋ฐ ํ•œ๊ตญ ๊ด€๋ จ ํ‚ค์›Œ๋“œ ํ•„ํ„ฐ๋ง
530
  korean_domains = [
531
  '.kr', 'korea', 'korean', 'yonhap', 'hankyung', 'chosun',
532
  'donga', 'joins', 'hani', 'koreatimes', 'koreaherald'
@@ -542,7 +500,6 @@ def format_results_from_raw(response_data):
542
  title = result.get("title", "").lower()
543
  channel = result.get("channel", result.get("source", "")).lower()
544
 
545
- # ํ•œ๊ตญ ๊ด€๋ จ ์ปจํ…์ธ  ํ•„ํ„ฐ๋ง
546
  is_korean_content = (
547
  any(domain in url or domain in channel for domain in korean_domains) or
548
  any(keyword in title.lower() for keyword in korean_keywords)
@@ -564,12 +521,10 @@ def format_results_from_raw(response_data):
564
  except Exception as e:
565
  return f"๊ฒฐ๊ณผ ์ฒ˜๋ฆฌ ์ค‘ ์˜ค๋ฅ˜ ๋ฐœ์ƒ: {str(e)}", []
566
 
567
-
568
  def serphouse_search(query, country):
569
  response_data = search_serphouse(query, country)
570
  return format_results_from_raw(response_data)
571
 
572
-
573
  css = """
574
  /* ์ „์—ญ ์Šคํƒ€์ผ */
575
  footer {visibility: hidden;}
@@ -745,7 +700,6 @@ footer {visibility: hidden;}
745
  }
746
  """
747
 
748
-
749
  with gr.Blocks(theme="Yntec/HaleyCH_Theme_Orange", css=css, title="NewsAI ์„œ๋น„์Šค") as iface:
750
  init_db()
751
 
@@ -755,18 +709,15 @@ with gr.Blocks(theme="Yntec/HaleyCH_Theme_Orange", css=css, title="NewsAI ์„œ๋น„
755
  gr.Markdown("## ํ•œ๊ตญ ์ฃผ์š” ๊ธฐ์—… ๋ฏธ๊ตญ ๋‰ด์Šค DB")
756
  gr.Markdown("๊ฐ ๊ธฐ์—…์˜ ๋ฏธ๊ตญ ๋‰ด์Šค๋ฅผ ๊ฒ€์ƒ‰ํ•˜์—ฌ DB์— ์ €์žฅํ•˜๊ณ  ๋ถˆ๋Ÿฌ์˜ฌ ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค.")
757
 
758
- # (์ˆ˜์ •) ์ƒ๋‹จ์— '์ „์ฒด ๋ถ„์„ ๋ณด๊ณ  ์š”์•ฝ' ๋ฒ„ํŠผ์„ ๋ฐฐ์น˜
759
  with gr.Row():
760
  full_report_btn = gr.Button("์ „์ฒด ๋ถ„์„ ๋ณด๊ณ  ์š”์•ฝ", variant="primary")
761
  full_report_display = gr.Markdown()
762
 
763
- # ๋ฒ„ํŠผ ํด๋ฆญ ์‹œ full_summary_report() ์‹คํ–‰
764
  full_report_btn.click(
765
  fn=full_summary_report,
766
  outputs=full_report_display
767
  )
768
 
769
- # ์ดํ›„ ๊ฐœ๋ณ„ ๊ธฐ์—… ๊ฒ€์ƒ‰/์ถœ๋ ฅ UI
770
  with gr.Column():
771
  for i in range(0, len(KOREAN_COMPANIES), 2):
772
  with gr.Row():
 
36
 
37
  def convert_to_seoul_time(timestamp_str):
38
  try:
 
39
  dt = datetime.strptime(timestamp_str, '%Y-%m-%d %H:%M:%S')
 
 
40
  seoul_tz = pytz.timezone('Asia/Seoul')
 
 
41
  seoul_time = seoul_tz.localize(dt)
 
42
  return seoul_time.strftime('%Y-%m-%d %H:%M:%S KST')
43
  except Exception as e:
44
  print(f"์‹œ๊ฐ„ ๋ณ€ํ™˜ ์˜ค๋ฅ˜: {str(e)}")
45
  return timestamp_str
46
 
 
47
  def analyze_sentiment_batch(articles, client):
48
  try:
49
  # ๋ชจ๋“  ๊ธฐ์‚ฌ์˜ ์ œ๋ชฉ๊ณผ ๋‚ด์šฉ์„ ํ•˜๋‚˜์˜ ํ…์ŠคํŠธ๋กœ ๊ฒฐํ•ฉ
 
51
  f"์ œ๋ชฉ: {article.get('title', '')}\n๋‚ด์šฉ: {article.get('snippet', '')}"
52
  for article in articles
53
  ])
54
+
55
+ # f""" ... """ ํ˜•ํƒœ๋กœ ์—ฌ๋Ÿฌ ์ค„ ๋ฌธ์ž์—ด์„ ์ •ํ™•ํžˆ ์‚ฌ์šฉ
56
+ prompt = f"""๋‹ค์Œ ๋‰ด์Šค ๋ชจ์Œ์— ๋Œ€ํ•ด ์ „๋ฐ˜์ ์ธ ๊ฐ์„ฑ ๋ถ„์„์„ ์ˆ˜ํ–‰ํ•˜์„ธ์š”:
57
 
58
  ๋‰ด์Šค ๋‚ด์šฉ:
59
  {combined_text}
 
68
  - [ํ•ญ๋ชฉ2]
69
  4. ์ข…ํ•ฉ ํ‰๊ฐ€: [์ƒ์„ธ ์„ค๋ช…]
70
  """
71
+
72
  response = client.chat.completions.create(
73
  model="CohereForAI/c4ai-command-r-plus-08-2024",
74
  messages=[{"role": "user", "content": prompt}],
 
95
  conn.commit()
96
  conn.close()
97
 
 
98
  def save_to_db(keyword, country, results):
99
  conn = sqlite3.connect("search_results.db")
100
  c = conn.cursor()
 
 
101
  seoul_tz = pytz.timezone('Asia/Seoul')
102
  now = datetime.now(seoul_tz)
 
 
103
  timestamp = now.strftime('%Y-%m-%d %H:%M:%S')
104
 
105
  c.execute("""INSERT INTO searches
106
  (keyword, country, results, timestamp)
107
  VALUES (?, ?, ?, ?)""",
108
  (keyword, country, json.dumps(results), timestamp))
 
109
  conn.commit()
110
  conn.close()
111
 
 
112
  def load_from_db(keyword, country):
113
  conn = sqlite3.connect("search_results.db")
114
  c = conn.cursor()
 
120
  return json.loads(result[0]), convert_to_seoul_time(result[1])
121
  return None, None
122
 
 
123
  def display_results(articles):
124
  output = ""
125
  for idx, article in enumerate(articles, 1):
 
130
  output += f"์š”์•ฝ: {article['snippet']}\n\n"
131
  return output
132
 
 
133
  def search_company(company):
134
  error_message, articles = serphouse_search(company, "United States")
135
  if not error_message and articles:
 
137
  return display_results(articles)
138
  return f"{company}์— ๋Œ€ํ•œ ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ๊ฐ€ ์—†์Šต๋‹ˆ๋‹ค."
139
 
 
140
  def load_company(company):
141
  results, timestamp = load_from_db(company, "United States")
142
  if results:
143
  return f"### {company} ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ\n์ €์žฅ ์‹œ๊ฐ„: {timestamp}\n\n" + display_results(results)
144
  return f"{company}์— ๋Œ€ํ•œ ์ €์žฅ๋œ ๊ฒฐ๊ณผ๊ฐ€ ์—†์Šต๋‹ˆ๋‹ค."
145
 
 
146
  def show_stats():
147
  conn = sqlite3.connect("search_results.db")
148
  c = conn.cursor()
 
169
  output += f"- ์ €์žฅ๋œ ๊ธฐ์‚ฌ ์ˆ˜: {len(articles)}๊ฑด\n\n"
170
 
171
  if articles:
 
172
  sentiment_analysis = analyze_sentiment_batch(articles, client)
173
  output += "#### ๋‰ด์Šค ๊ฐ์„ฑ ๋ถ„์„\n"
174
  output += f"{sentiment_analysis}\n\n"
 
178
  conn.close()
179
  return output
180
 
 
181
  def search_all_companies():
 
 
 
182
  overall_result = "# [์ „์ฒด ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ]\n\n"
183
  for comp in KOREAN_COMPANIES:
184
  overall_result += f"## {comp}\n"
 
187
  return overall_result
188
 
189
  def load_all_companies():
 
 
 
190
  overall_result = "# [์ „์ฒด ์ถœ๋ ฅ ๊ฒฐ๊ณผ]\n\n"
191
  for comp in KOREAN_COMPANIES:
192
  overall_result += f"## {comp}\n"
 
195
  return overall_result
196
 
197
  def full_summary_report():
 
 
 
 
 
198
  # 1) ์ „์ฒด ๊ฒ€์ƒ‰
199
  search_result_text = search_all_companies()
 
200
  # 2) ์ „์ฒด ์ถœ๋ ฅ
201
  load_result_text = load_all_companies()
 
202
  # 3) ์ „์ฒด ํ†ต๊ณ„
203
  stats_text = show_stats()
204
 
 
205
  combined_report = (
206
  "# ์ „์ฒด ๋ถ„์„ ๋ณด๊ณ  ์š”์•ฝ\n\n"
207
  "์•„๋ž˜ ์ˆœ์„œ๋กœ ์‹คํ–‰๋˜์—ˆ์Šต๋‹ˆ๋‹ค:\n"
 
213
  )
214
  return combined_report
215
 
 
216
  ACCESS_TOKEN = os.getenv("HF_TOKEN")
217
  if not ACCESS_TOKEN:
218
  raise ValueError("HF_TOKEN environment variable is not set")
 
224
 
225
  API_KEY = os.getenv("SERPHOUSE_API_KEY")
226
 
 
 
227
  COUNTRY_LANGUAGES = {
228
  "United States": "en",
229
  "KOREA": "ko",
 
289
  "Latvia": "lv",
290
  "Lithuania": "lt",
291
  "Slovenia": "sl",
292
+ "Luxembourg": "Luxembourg",
293
+ "Malta": "Malta",
294
+ "Cyprus": "Cyprus",
295
+ "Iceland": "Iceland"
296
  }
297
 
298
  COUNTRY_LOCATIONS = {
 
366
  "Iceland": "Iceland"
367
  }
368
 
 
369
  @lru_cache(maxsize=100)
370
  def translate_query(query, country):
371
  try:
372
  if is_english(query):
373
  return query
374
+
375
  if country in COUNTRY_LANGUAGES:
376
  if country == "South Korea":
377
  return query
 
378
  target_lang = COUNTRY_LANGUAGES[country]
379
 
380
  url = "https://translate.googleapis.com/translate_a/single"
 
400
  print(f"๋ฒˆ์—ญ ์˜ค๋ฅ˜: {str(e)}")
401
  return query
402
 
 
403
  def is_english(text):
404
  return all(ord(char) < 128 for char in text.replace(' ', '').replace('-', '').replace('_', ''))
405
 
 
406
  def search_serphouse(query, country, page=1, num_result=10):
407
  url = "https://api.serphouse.com/serp/live"
408
 
 
473
  "translated_query": query
474
  }
475
 
 
476
  def format_results_from_raw(response_data):
477
  if "error" in response_data:
478
  return "Error: " + response_data["error"], []
 
484
  news_results = results.get('results', {}).get('results', {}).get('news', [])
485
  if not news_results:
486
  return "๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ๊ฐ€ ์—†์Šต๋‹ˆ๋‹ค.", []
487
+
 
488
  korean_domains = [
489
  '.kr', 'korea', 'korean', 'yonhap', 'hankyung', 'chosun',
490
  'donga', 'joins', 'hani', 'koreatimes', 'koreaherald'
 
500
  title = result.get("title", "").lower()
501
  channel = result.get("channel", result.get("source", "")).lower()
502
 
 
503
  is_korean_content = (
504
  any(domain in url or domain in channel for domain in korean_domains) or
505
  any(keyword in title.lower() for keyword in korean_keywords)
 
521
  except Exception as e:
522
  return f"๊ฒฐ๊ณผ ์ฒ˜๋ฆฌ ์ค‘ ์˜ค๋ฅ˜ ๋ฐœ์ƒ: {str(e)}", []
523
 
 
524
  def serphouse_search(query, country):
525
  response_data = search_serphouse(query, country)
526
  return format_results_from_raw(response_data)
527
 
 
528
  css = """
529
  /* ์ „์—ญ ์Šคํƒ€์ผ */
530
  footer {visibility: hidden;}
 
700
  }
701
  """
702
 
 
703
  with gr.Blocks(theme="Yntec/HaleyCH_Theme_Orange", css=css, title="NewsAI ์„œ๋น„์Šค") as iface:
704
  init_db()
705
 
 
709
  gr.Markdown("## ํ•œ๊ตญ ์ฃผ์š” ๊ธฐ์—… ๋ฏธ๊ตญ ๋‰ด์Šค DB")
710
  gr.Markdown("๊ฐ ๊ธฐ์—…์˜ ๋ฏธ๊ตญ ๋‰ด์Šค๋ฅผ ๊ฒ€์ƒ‰ํ•˜์—ฌ DB์— ์ €์žฅํ•˜๊ณ  ๋ถˆ๋Ÿฌ์˜ฌ ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค.")
711
 
 
712
  with gr.Row():
713
  full_report_btn = gr.Button("์ „์ฒด ๋ถ„์„ ๋ณด๊ณ  ์š”์•ฝ", variant="primary")
714
  full_report_display = gr.Markdown()
715
 
 
716
  full_report_btn.click(
717
  fn=full_summary_report,
718
  outputs=full_report_display
719
  )
720
 
 
721
  with gr.Column():
722
  for i in range(0, len(KOREAN_COMPANIES), 2):
723
  with gr.Row():