openfree commited on
Commit
6ba1af9
ยท
verified ยท
1 Parent(s): 61cc8e8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +157 -154
app.py CHANGED
@@ -14,7 +14,7 @@ import pathlib
14
  import sqlite3
15
  import pytz
16
 
17
- # List of target companies/keywords
18
  KOREAN_COMPANIES = [
19
  "NVIDIA",
20
  "ALPHABET",
@@ -36,7 +36,7 @@ KOREAN_COMPANIES = [
36
 
37
  def convert_to_seoul_time(timestamp_str):
38
  """
39
- Convert a given timestamp string (UTC) to Seoul time (KST).
40
  """
41
  try:
42
  dt = datetime.strptime(timestamp_str, '%Y-%m-%d %H:%M:%S')
@@ -44,36 +44,36 @@ def convert_to_seoul_time(timestamp_str):
44
  seoul_time = seoul_tz.localize(dt)
45
  return seoul_time.strftime('%Y-%m-%d %H:%M:%S KST')
46
  except Exception as e:
47
- print(f"Time conversion error: {str(e)}")
48
  return timestamp_str
49
 
50
  def analyze_sentiment_batch(articles, client):
51
  """
52
- Perform a comprehensive sentiment analysis of the news articles using the OpenAI API.
 
53
  """
54
  try:
55
- # Combine all articles into a single text
56
  combined_text = "\n\n".join([
57
- f"Title: {article.get('title', '')}\nContent: {article.get('snippet', '')}"
58
  for article in articles
59
  ])
60
 
61
- prompt = f"""Please perform an overall sentiment analysis of the following collection of news articles:
62
 
63
- News content:
64
  {combined_text}
65
 
66
- Please follow this format:
67
- 1. Overall Sentiment: [Positive/Negative/Neutral]
68
- 2. Key Positive Factors:
69
- - [Item1]
70
- - [Item2]
71
- 3. Key Negative Factors:
72
- - [Item1]
73
- - [Item2]
74
- 4. Summary: [Detailed explanation]
75
  """
76
-
77
  response = client.chat.completions.create(
78
  model="CohereForAI/c4ai-command-r-plus-08-2024",
79
  messages=[{"role": "user", "content": prompt}],
@@ -83,13 +83,13 @@ Please follow this format:
83
 
84
  return response.choices[0].message.content
85
  except Exception as e:
86
- return f"Sentiment analysis failed: {str(e)}"
87
 
88
 
89
- # Initialize the database
90
  def init_db():
91
  """
92
- Initialize the SQLite database (search_results.db) if it doesn't already exist.
93
  """
94
  db_path = pathlib.Path("search_results.db")
95
  conn = sqlite3.connect(db_path)
@@ -105,7 +105,7 @@ def init_db():
105
 
106
  def save_to_db(keyword, country, results):
107
  """
108
- Save the search results for a specific (keyword, country) combination into the database.
109
  """
110
  conn = sqlite3.connect("search_results.db")
111
  c = conn.cursor()
@@ -122,8 +122,8 @@ def save_to_db(keyword, country, results):
122
 
123
  def load_from_db(keyword, country):
124
  """
125
- Load the most recent search results for a specific (keyword, country) combination from the database.
126
- Returns the data and the timestamp.
127
  """
128
  conn = sqlite3.connect("search_results.db")
129
  c = conn.cursor()
@@ -139,84 +139,83 @@ def load_from_db(keyword, country):
139
 
140
  def display_results(articles):
141
  """
142
- Convert a list of news articles into a Markdown string for display.
143
  """
144
  output = ""
145
  for idx, article in enumerate(articles, 1):
146
  output += f"### {idx}. {article['title']}\n"
147
- output += f"Source: {article['channel']}\n"
148
- output += f"Time: {article['time']}\n"
149
- output += f"Link: {article['link']}\n"
150
- output += f"Summary: {article['snippet']}\n\n"
151
  return output
152
 
153
 
154
  ########################################
155
- # 1) Search => Articles + Analysis, then save to DB
156
  ########################################
157
  def search_company(company):
158
  """
159
- For a single company (or keyword), search US news.
160
- 1) Retrieve a list of articles
161
- 2) Perform sentiment analysis
162
- 3) Save results to DB
163
- 4) Return (articles + analysis) in a single output.
164
  """
165
  error_message, articles = serphouse_search(company, "United States")
166
  if not error_message and articles:
167
- # Perform sentiment analysis
168
  analysis = analyze_sentiment_batch(articles, client)
169
 
170
- # Prepare data to save in DB
171
  store_dict = {
172
  "articles": articles,
173
  "analysis": analysis
174
  }
175
  save_to_db(company, "United States", store_dict)
176
 
177
- # Prepare output for display
178
  output = display_results(articles)
179
- output += f"\n\n### Analysis Report\n{analysis}\n"
180
  return output
181
- return f"No search results found for {company}."
182
 
183
  ########################################
184
- # 2) Load => Return articles + analysis from DB
185
  ########################################
186
  def load_company(company):
187
  """
188
- Load the most recent US news search results for the given company (or keyword) from the database,
189
- and return the articles + analysis in a single output.
190
  """
191
  data, timestamp = load_from_db(company, "United States")
192
  if data:
193
  articles = data.get("articles", [])
194
  analysis = data.get("analysis", "")
195
 
196
- output = f"### {company} Search Results\nLast Updated: {timestamp}\n\n"
197
  output += display_results(articles)
198
- output += f"\n\n### Analysis Report\n{analysis}\n"
199
  return output
200
- return f"No saved results for {company}."
201
 
202
 
203
  ########################################
204
- # 3) Updated show_stats() with new title
205
  ########################################
206
  def show_stats():
207
  """
208
- For each company in KOREAN_COMPANIES:
209
- - Retrieve the most recent timestamp in DB
210
- - Number of articles
211
- - Sentiment analysis result
212
- Return these in a report format.
213
-
214
- Title changed to: "EarnBOT Analysis Report"
215
  """
216
  conn = sqlite3.connect("search_results.db")
217
  c = conn.cursor()
218
 
219
- output = "## EarnBOT Analysis Report\n\n"
220
 
221
  data_list = []
222
  for company in KOREAN_COMPANIES:
@@ -253,10 +252,10 @@ def show_stats():
253
  for comp, tstamp, count, analysis in results_list:
254
  seoul_time = convert_to_seoul_time(tstamp)
255
  output += f"### {comp}\n"
256
- output += f"- Last updated: {seoul_time}\n"
257
- output += f"- Number of articles stored: {count}\n\n"
258
  if analysis:
259
- output += "#### News Sentiment Analysis\n"
260
  output += f"{analysis}\n\n"
261
  output += "---\n\n"
262
 
@@ -265,10 +264,10 @@ def show_stats():
265
 
266
  def search_all_companies():
267
  """
268
- Search all companies in KOREAN_COMPANIES (in parallel),
269
- perform sentiment analysis + save to DB => return Markdown of all results.
270
  """
271
- overall_result = "# [Search Results for All Companies]\n\n"
272
 
273
  def do_search(comp):
274
  return comp, search_company(comp)
@@ -284,9 +283,9 @@ def search_all_companies():
284
 
285
  def load_all_companies():
286
  """
287
- Load articles + analysis for all companies in KOREAN_COMPANIES from the DB => return Markdown.
288
  """
289
- overall_result = "# [All Companies Data Output]\n\n"
290
 
291
  for comp in KOREAN_COMPANIES:
292
  overall_result += f"## {comp}\n"
@@ -296,81 +295,81 @@ def load_all_companies():
296
 
297
  def full_summary_report():
298
  """
299
- 1) Search all companies (in parallel) -> 2) Load results -> 3) Show sentiment analysis stats
300
- Return a combined report with all three steps.
301
  """
302
- # 1) Search all companies => store to DB
303
  search_result_text = search_all_companies()
304
 
305
- # 2) Load all results => from DB
306
  load_result_text = load_all_companies()
307
 
308
- # 3) Show stats => EarnBOT Analysis Report
309
  stats_text = show_stats()
310
 
311
  combined_report = (
312
- "# Full Analysis Summary Report\n\n"
313
- "Executed in the following order:\n"
314
- "1. Search all companies (parallel) + sentiment analysis => 2. Load results from DB => 3. Show overall sentiment analysis stats\n\n"
315
  f"{search_result_text}\n\n"
316
  f"{load_result_text}\n\n"
317
- "## [Overall Sentiment Analysis Stats]\n\n"
318
  f"{stats_text}"
319
  )
320
  return combined_report
321
 
322
 
323
  ########################################
324
- # Additional feature: User custom search
325
  ########################################
326
  def search_custom(query, country):
327
  """
328
- For a user-provided (query, country):
329
- 1) Search + sentiment analysis => save to DB
330
- 2) Load from DB => display articles + analysis
331
  """
332
  error_message, articles = serphouse_search(query, country)
333
  if error_message:
334
- return f"An error occurred: {error_message}"
335
  if not articles:
336
- return "No results were found for your query."
337
 
338
- # 1) Perform analysis
339
  analysis = analyze_sentiment_batch(articles, client)
340
 
341
- # 2) Save to DB
342
  save_data = {
343
  "articles": articles,
344
  "analysis": analysis
345
  }
346
  save_to_db(query, country, save_data)
347
 
348
- # 3) Reload from DB
349
  loaded_data, timestamp = load_from_db(query, country)
350
  if not loaded_data:
351
- return "Failed to load data from DB."
352
 
353
- # 4) Prepare final output
354
- out = f"## [Custom Search Results]\n\n"
355
- out += f"**Keyword**: {query}\n\n"
356
- out += f"**Country**: {country}\n\n"
357
- out += f"**Timestamp**: {timestamp}\n\n"
358
 
359
  arts = loaded_data.get("articles", [])
360
  analy = loaded_data.get("analysis", "")
361
 
362
  out += display_results(arts)
363
- out += f"### News Sentiment Analysis\n{analy}\n"
364
 
365
  return out
366
 
367
 
368
  ########################################
369
- # API Authentication
370
  ########################################
371
  ACCESS_TOKEN = os.getenv("HF_TOKEN")
372
  if not ACCESS_TOKEN:
373
- raise ValueError("HF_TOKEN environment variable is not set")
374
 
375
  client = OpenAI(
376
  base_url="https://api-inference.huggingface.co/v1/",
@@ -381,7 +380,7 @@ API_KEY = os.getenv("SERPHOUSE_API_KEY")
381
 
382
 
383
  ########################################
384
- # Country-specific settings
385
  ########################################
386
  COUNTRY_LANGUAGES = {
387
  "United States": "en",
@@ -439,8 +438,8 @@ COUNTRY_LANGUAGES = {
439
  "Nigeria": "en",
440
  "Kenya": "sw",
441
  "Ukraine": "uk",
442
- "Croatia": "hr",
443
- "Slovakia": "sk",
444
  "Bulgaria": "bg",
445
  "Serbia": "sr",
446
  "Estonia": "et",
@@ -527,8 +526,8 @@ COUNTRY_LOCATIONS = {
527
  @lru_cache(maxsize=100)
528
  def translate_query(query, country):
529
  """
530
- Use the unofficial Google Translation API to translate the query into the target country's language.
531
- If the query is already in English, or if translation fails, return the original query.
532
  """
533
  try:
534
  if is_english(query):
@@ -558,20 +557,20 @@ def translate_query(query, country):
558
  return query
559
 
560
  except Exception as e:
561
- print(f"Translation error: {str(e)}")
562
  return query
563
 
564
  def is_english(text):
565
  """
566
- Check if a string is (mostly) English by verifying character code ranges.
567
  """
568
  return all(ord(char) < 128 for char in text.replace(' ', '').replace('-', '').replace('_', ''))
569
 
570
  def search_serphouse(query, country, page=1, num_result=10):
571
  """
572
- Send a real-time search request to the SerpHouse API,
573
- specifying the 'news' tab (sort_by=date) for the given query.
574
- Returns a dict with 'results' or 'error'.
575
  """
576
  url = "https://api.serphouse.com/serp/live"
577
 
@@ -628,26 +627,26 @@ def search_serphouse(query, country, page=1, num_result=10):
628
 
629
  except requests.exceptions.Timeout:
630
  return {
631
- "error": "Search timed out. Please try again later.",
632
  "translated_query": query
633
  }
634
  except requests.exceptions.RequestException as e:
635
  return {
636
- "error": f"Error during search: {str(e)}",
637
  "translated_query": query
638
  }
639
  except Exception as e:
640
  return {
641
- "error": f"Unexpected error occurred: {str(e)}",
642
  "translated_query": query
643
  }
644
 
645
  def format_results_from_raw(response_data):
646
  """
647
- Process the SerpHouse API response data and return (error_message, article_list).
648
  """
649
  if "error" in response_data:
650
- return "Error: " + response_data["error"], []
651
 
652
  try:
653
  results = response_data["results"]
@@ -655,9 +654,9 @@ def format_results_from_raw(response_data):
655
 
656
  news_results = results.get('results', {}).get('results', {}).get('news', [])
657
  if not news_results:
658
- return "No search results found.", []
659
 
660
- # Filter out Korean domains and Korean keywords (example filtering)
661
  korean_domains = [
662
  '.kr', 'korea', 'korean', 'yonhap', 'hankyung', 'chosun',
663
  'donga', 'joins', 'hani', 'koreatimes', 'koreaherald'
@@ -678,27 +677,27 @@ def format_results_from_raw(response_data):
678
  any(keyword in title for keyword in korean_keywords)
679
  )
680
 
681
- # Exclude Korean content
682
  if not is_korean_content:
683
  filtered_articles.append({
684
  "index": idx,
685
- "title": result.get("title", "No Title"),
686
  "link": url,
687
- "snippet": result.get("snippet", "No Content"),
688
- "channel": result.get("channel", result.get("source", "Unknown")),
689
- "time": result.get("time", result.get("date", "Unknown Time")),
690
  "image_url": result.get("img", result.get("thumbnail", "")),
691
  "translated_query": translated_query
692
  })
693
 
694
  return "", filtered_articles
695
  except Exception as e:
696
- return f"Error processing results: {str(e)}", []
697
 
698
  def serphouse_search(query, country):
699
  """
700
- Helper function to search and then format results.
701
- Returns (error_message, article_list).
702
  """
703
  response_data = search_serphouse(query, country)
704
  return format_results_from_raw(response_data)
@@ -907,9 +906,9 @@ footer {
907
  }
908
  """
909
 
910
- # --- Gradio Interface (UI portion only) ---
911
- with gr.Blocks(css=css, title="NewsAI Service") as iface:
912
- # Initialize the database first (keeping the call to init_db(), unchanged)
913
  init_db()
914
 
915
  gr.HTML("""<a href="https://visitorbadge.io/status?path=https%3A%2F%2Fopenfree-MoneyRadar.hf.space">
@@ -919,50 +918,54 @@ with gr.Blocks(css=css, title="NewsAI Service") as iface:
919
 
920
  with gr.Tabs():
921
  with gr.Tab("MoneyRadar"):
922
- # Added usage instructions and feature explanations here:
923
  gr.Markdown(
924
  """
925
- ## MoneyRadar: Implies scanning(Automatic extraction of top 100 priority news within the last 24 hours) the market to spot money-making opportunities.
926
-
927
- **How to Use This Service**:
928
- 1. **Custom Search**: Enter any keyword and choose a target country to fetch the latest news. The system automatically performs sentiment analysis and stores results in the database.
929
- 2. **Generate Full Analysis Summary Report**: This will automatically:
930
- - Search all predefined companies (in parallel),
931
- - Store the articles and sentiment analysis,
932
- - Display a combined overall report.
933
- 3. **Individual Companies**:
934
- - **Search**: Fetch and analyze the latest news from Google (for the chosen company).
935
- - **Load from DB**: Retrieve the most recent saved news and sentiment analysis from the local database.
936
-
937
- **Features**:
938
- - **Real-time News Scraping**: Retrieves fresh articles from multiple regions.
939
- - **Advanced Sentiment Analysis**: Uses state-of-the-art NLP models via the API.
940
- - **Data Persistence**: Automatically saves and retrieves search results in a local SQLite database for quick reference.
941
- - **Flexible**: Ability to search any keyword/country or select from predefined Big Tech & finance-related terms.
942
- 0. **Community: https://discord.gg/openfreeai
943
- ---
 
 
 
 
944
  """
945
  )
946
 
947
- # User custom search section
948
  with gr.Group():
949
- gr.Markdown("### Custom Search")
950
  with gr.Row():
951
  with gr.Column():
952
  user_input = gr.Textbox(
953
- label="Enter your keyword",
954
- placeholder="e.g., Apple, Samsung, etc.",
955
  elem_classes="textbox"
956
  )
957
  with gr.Column():
958
  country_selection = gr.Dropdown(
959
  choices=list(COUNTRY_LOCATIONS.keys()),
960
  value="United States",
961
- label="Select Country"
962
  )
963
  with gr.Column():
964
  custom_search_btn = gr.Button(
965
- "Search",
966
  variant="primary",
967
  elem_classes="primary-btn"
968
  )
@@ -975,10 +978,10 @@ with gr.Blocks(css=css, title="NewsAI Service") as iface:
975
  outputs=custom_search_output
976
  )
977
 
978
- # Button to generate a full report
979
  with gr.Row():
980
  full_report_btn = gr.Button(
981
- "Generate Full Analysis Summary Report",
982
  variant="primary",
983
  elem_classes="primary-btn"
984
  )
@@ -989,23 +992,23 @@ with gr.Blocks(css=css, title="NewsAI Service") as iface:
989
  outputs=full_report_display
990
  )
991
 
992
- # Individual search/load for companies in KOREAN_COMPANIES
993
  with gr.Column():
994
  for i in range(0, len(KOREAN_COMPANIES), 2):
995
  with gr.Row():
996
- # Left column
997
  with gr.Column():
998
  company = KOREAN_COMPANIES[i]
999
  with gr.Group():
1000
  gr.Markdown(f"### {company}")
1001
  with gr.Row():
1002
  search_btn = gr.Button(
1003
- "Search",
1004
  variant="primary",
1005
  elem_classes="primary-btn"
1006
  )
1007
  load_btn = gr.Button(
1008
- "Load from DB",
1009
  variant="secondary",
1010
  elem_classes="secondary-btn"
1011
  )
@@ -1020,7 +1023,7 @@ with gr.Blocks(css=css, title="NewsAI Service") as iface:
1020
  outputs=result_display
1021
  )
1022
 
1023
- # Right column (if exists)
1024
  if i + 1 < len(KOREAN_COMPANIES):
1025
  with gr.Column():
1026
  company = KOREAN_COMPANIES[i + 1]
@@ -1028,12 +1031,12 @@ with gr.Blocks(css=css, title="NewsAI Service") as iface:
1028
  gr.Markdown(f"### {company}")
1029
  with gr.Row():
1030
  search_btn = gr.Button(
1031
- "Search",
1032
  variant="primary",
1033
  elem_classes="primary-btn"
1034
  )
1035
  load_btn = gr.Button(
1036
- "Load from DB",
1037
  variant="secondary",
1038
  elem_classes="secondary-btn"
1039
  )
@@ -1048,7 +1051,7 @@ with gr.Blocks(css=css, title="NewsAI Service") as iface:
1048
  outputs=result_display
1049
  )
1050
 
1051
- # Launch the Gradio interface
1052
  iface.launch(
1053
  server_name="0.0.0.0",
1054
  server_port=7860,
 
14
  import sqlite3
15
  import pytz
16
 
17
+ # List of target companies/keywords (์˜๋ฌธ์œผ๋กœ ๊ฒ€์ƒ‰์–ด๋Š” ๊ทธ๋Œ€๋กœ ์œ ์ง€)
18
  KOREAN_COMPANIES = [
19
  "NVIDIA",
20
  "ALPHABET",
 
36
 
37
  def convert_to_seoul_time(timestamp_str):
38
  """
39
+ ์ฃผ์–ด์ง„ UTC ํƒ€์ž„์Šคํƒฌํ”„ ๋ฌธ์ž์—ด์„ ์„œ์šธ ์‹œ๊ฐ„(KST)์œผ๋กœ ๋ณ€ํ™˜.
40
  """
41
  try:
42
  dt = datetime.strptime(timestamp_str, '%Y-%m-%d %H:%M:%S')
 
44
  seoul_time = seoul_tz.localize(dt)
45
  return seoul_time.strftime('%Y-%m-%d %H:%M:%S KST')
46
  except Exception as e:
47
+ print(f"์‹œ๊ฐ„ ๋ณ€ํ™˜ ์˜ค๋ฅ˜: {str(e)}")
48
  return timestamp_str
49
 
50
  def analyze_sentiment_batch(articles, client):
51
  """
52
+ OpenAI API๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ๋‰ด์Šค ๊ธฐ์‚ฌ ๋ชจ์Œ์— ๋Œ€ํ•œ ์ „๋ฐ˜์ ์ธ ๊ฐ์„ฑ ๋ถ„์„์„ ์ˆ˜ํ–‰ํ•œ๋‹ค.
53
+ ๋ถ„์„ ๊ฒฐ๊ณผ๋Š” ํ•œ๊ธ€๋กœ ๋ฐ˜ํ™˜๋œ๋‹ค.
54
  """
55
  try:
56
+ # ๋ชจ๋“  ๊ธฐ์‚ฌ๋ฅผ ํ•˜๋‚˜์˜ ํ…์ŠคํŠธ๋กœ ๊ฒฐํ•ฉ
57
  combined_text = "\n\n".join([
58
+ f"์ œ๋ชฉ: {article.get('title', '')}\n๋‚ด์šฉ: {article.get('snippet', '')}"
59
  for article in articles
60
  ])
61
 
62
+ prompt = f"""์•„๋ž˜ ๋‰ด์Šค ๊ธฐ์‚ฌ ๋ชจ์Œ์— ๋Œ€ํ•œ ์ „๋ฐ˜์ ์ธ ๊ฐ์„ฑ ๋ถ„์„์„ ์ˆ˜ํ–‰ํ•˜์„ธ์š”.
63
 
64
+ ๋‰ด์Šค ๋‚ด์šฉ:
65
  {combined_text}
66
 
67
+ ๋‹ค์Œ ํ˜•์‹์„ ๋”ฐ๋ผ ์ž‘์„ฑํ•ด ์ฃผ์„ธ์š”:
68
+ 1. ์ „์ฒด ๊ฐ์„ฑ: [๊ธ์ •/๋ถ€์ •/์ค‘๋ฆฝ]
69
+ 2. ์ฃผ์š” ๊ธ์ • ์š”์ธ:
70
+ - [ํ•ญ๋ชฉ1]
71
+ - [ํ•ญ๋ชฉ2]
72
+ 3. ์ฃผ์š” ๋ถ€์ • ์š”์ธ:
73
+ - [ํ•ญ๋ชฉ1]
74
+ - [ํ•ญ๋ชฉ2]
75
+ 4. ์š”์•ฝ: [์ž์„ธํ•œ ์„ค๋ช…]
76
  """
 
77
  response = client.chat.completions.create(
78
  model="CohereForAI/c4ai-command-r-plus-08-2024",
79
  messages=[{"role": "user", "content": prompt}],
 
83
 
84
  return response.choices[0].message.content
85
  except Exception as e:
86
+ return f"๊ฐ์„ฑ ๋ถ„์„ ์‹คํŒจ: {str(e)}"
87
 
88
 
89
+ # ๋ฐ์ดํ„ฐ๋ฒ ์ด์Šค ์ดˆ๊ธฐํ™”
90
  def init_db():
91
  """
92
+ SQLite ๋ฐ์ดํ„ฐ๋ฒ ์ด์Šค(search_results.db)๋ฅผ ์ดˆ๊ธฐํ™” (์—†์œผ๋ฉด ์ƒ์„ฑ).
93
  """
94
  db_path = pathlib.Path("search_results.db")
95
  conn = sqlite3.connect(db_path)
 
105
 
106
  def save_to_db(keyword, country, results):
107
  """
108
+ ํŠน์ • (ํ‚ค์›Œ๋“œ, ๊ตญ๊ฐ€) ์กฐํ•ฉ์— ๋Œ€ํ•œ ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ๋ฅผ ๋ฐ์ดํ„ฐ๋ฒ ์ด์Šค์— ์ €์žฅ.
109
  """
110
  conn = sqlite3.connect("search_results.db")
111
  c = conn.cursor()
 
122
 
123
  def load_from_db(keyword, country):
124
  """
125
+ ํŠน์ • (ํ‚ค์›Œ๋“œ, ๊ตญ๊ฐ€) ์กฐํ•ฉ์— ๋Œ€ํ•œ ๊ฐ€์žฅ ์ตœ๊ทผ ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ๋ฅผ ๋ฐ์ดํ„ฐ๋ฒ ์ด์Šค์—์„œ ๋ถˆ๋Ÿฌ์˜ด.
126
+ ๋ฐ์ดํ„ฐ๋ฅผ ๋ถˆ๋Ÿฌ์˜ค๊ณ  ํƒ€์ž„์Šคํƒฌํ”„๋ฅผ ๋ณ€ํ™˜ํ•˜์—ฌ ๋ฐ˜ํ™˜.
127
  """
128
  conn = sqlite3.connect("search_results.db")
129
  c = conn.cursor()
 
139
 
140
  def display_results(articles):
141
  """
142
+ ๋‰ด์Šค ๊ธฐ์‚ฌ ๋ชฉ๋ก์„ ๋งˆํฌ๋‹ค์šด ๋ฌธ์ž์—ด๋กœ ๋ณ€ํ™˜ํ•˜์—ฌ ๋ฐ˜ํ™˜.
143
  """
144
  output = ""
145
  for idx, article in enumerate(articles, 1):
146
  output += f"### {idx}. {article['title']}\n"
147
+ output += f"์ถœ์ฒ˜: {article['channel']}\n"
148
+ output += f"์‹œ๊ฐ„: {article['time']}\n"
149
+ output += f"๋งํฌ: {article['link']}\n"
150
+ output += f"์š”์•ฝ: {article['snippet']}\n\n"
151
  return output
152
 
153
 
154
  ########################################
155
+ # 1) ๊ฒ€์ƒ‰ => ๊ธฐ์‚ฌ + ๊ฐ์„ฑ ๋ถ„์„, DB ์ €์žฅ
156
  ########################################
157
  def search_company(company):
158
  """
159
+ ๋‹จ์ผ ํšŒ์‚ฌ(๋˜๋Š” ํ‚ค์›Œ๋“œ)์— ๋Œ€ํ•ด ๋ฏธ๊ตญ ๋‰ด์Šค ๊ฒ€์ƒ‰์„ ์ง„ํ–‰:
160
+ 1) ๊ธฐ์‚ฌ ๋ชฉ๋ก์„ ๊ฒ€์ƒ‰
161
+ 2) ๊ฐ์„ฑ ๋ถ„์„ ์ˆ˜ํ–‰
162
+ 3) ๊ฒฐ๊ณผ๋ฅผ DB์— ์ €์žฅ
163
+ 4) ๊ธฐ์‚ฌ์™€ ๋ถ„์„ ๊ฒฐ๊ณผ๋ฅผ ํ•˜๋‚˜์˜ ์ถœ๋ ฅ์œผ๋กœ ๋ฐ˜ํ™˜
164
  """
165
  error_message, articles = serphouse_search(company, "United States")
166
  if not error_message and articles:
167
+ # ๊ฐ์„ฑ ๋ถ„์„ ์ˆ˜ํ–‰ (ํ•œ๊ธ€ ๊ฒฐ๊ณผ ๋ฐ˜ํ™˜)
168
  analysis = analyze_sentiment_batch(articles, client)
169
 
170
+ # DB์— ์ €์žฅํ•  ๋ฐ์ดํ„ฐ ์ค€๋น„
171
  store_dict = {
172
  "articles": articles,
173
  "analysis": analysis
174
  }
175
  save_to_db(company, "United States", store_dict)
176
 
177
+ # ์ถœ๋ ฅ์šฉ ๋ฐ์ดํ„ฐ ์ค€๋น„
178
  output = display_results(articles)
179
+ output += f"\n\n### ๊ฐ์„ฑ ๋ถ„์„ ๋ณด๊ณ ์„œ\n{analysis}\n"
180
  return output
181
+ return f"{company}์— ๋Œ€ํ•œ ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ๊ฐ€ ์—†์Šต๋‹ˆ๋‹ค."
182
 
183
  ########################################
184
+ # 2) ๋ถˆ๋Ÿฌ์˜ค๊ธฐ => DB์—์„œ ๊ธฐ์‚ฌ + ๋ถ„์„ ๊ฒฐ๊ณผ ๋ฐ˜ํ™˜
185
  ########################################
186
  def load_company(company):
187
  """
188
+ ์ฃผ์–ด์ง„ ํšŒ์‚ฌ(๋˜๋Š” ํ‚ค์›Œ๋“œ)์— ๋Œ€ํ•ด ๋ฐ์ดํ„ฐ๋ฒ ์ด์Šค์—์„œ ๊ฐ€์žฅ ์ตœ๊ทผ ๋ฏธ๊ตญ ๋‰ด์Šค ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ(๊ธฐ์‚ฌ + ๊ฐ์„ฑ ๋ถ„์„)๋ฅผ ๋ถˆ๋Ÿฌ์˜ด.
 
189
  """
190
  data, timestamp = load_from_db(company, "United States")
191
  if data:
192
  articles = data.get("articles", [])
193
  analysis = data.get("analysis", "")
194
 
195
+ output = f"### {company} ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ\n๋งˆ์ง€๋ง‰ ์—…๋ฐ์ดํŠธ: {timestamp}\n\n"
196
  output += display_results(articles)
197
+ output += f"\n\n### ๊ฐ์„ฑ ๋ถ„์„ ๋ณด๊ณ ์„œ\n{analysis}\n"
198
  return output
199
+ return f"{company}์— ๋Œ€ํ•œ ์ €์žฅ๋œ ๊ฒฐ๊ณผ๊ฐ€ ์—†์Šต๋‹ˆ๋‹ค."
200
 
201
 
202
  ########################################
203
+ # 3) show_stats() โ€“ ๋ณด๊ณ ์„œ ์ œ๋ชฉ ๋ณ€๊ฒฝ
204
  ########################################
205
  def show_stats():
206
  """
207
+ ๊ฐ ํšŒ์‚ฌ(KOREAN_COMPANIES)์— ๋Œ€ํ•ด:
208
+ - DB์— ์ €์žฅ๋œ ์ตœ์‹  ํƒ€์ž„์Šคํƒฌํ”„
209
+ - ์ €์žฅ๋œ ๊ธฐ์‚ฌ ๊ฐœ์ˆ˜
210
+ - ๊ฐ์„ฑ ๋ถ„์„ ๊ฒฐ๊ณผ
211
+ ์„ ๋ณด๊ณ ์„œ ํ˜•์‹์œผ๋กœ ๋ฐ˜ํ™˜.
212
+
213
+ ์ œ๋ชฉ: "EarnBOT ๋ถ„์„ ๋ณด๊ณ ์„œ"
214
  """
215
  conn = sqlite3.connect("search_results.db")
216
  c = conn.cursor()
217
 
218
+ output = "## EarnBOT ๋ถ„์„ ๋ณด๊ณ ์„œ\n\n"
219
 
220
  data_list = []
221
  for company in KOREAN_COMPANIES:
 
252
  for comp, tstamp, count, analysis in results_list:
253
  seoul_time = convert_to_seoul_time(tstamp)
254
  output += f"### {comp}\n"
255
+ output += f"- ๋งˆ์ง€๋ง‰ ์—…๋ฐ์ดํŠธ: {seoul_time}\n"
256
+ output += f"- ์ €์žฅ๋œ ๊ธฐ์‚ฌ ์ˆ˜: {count}\n\n"
257
  if analysis:
258
+ output += "#### ๋‰ด์Šค ๊ฐ์„ฑ ๋ถ„์„\n"
259
  output += f"{analysis}\n\n"
260
  output += "---\n\n"
261
 
 
264
 
265
  def search_all_companies():
266
  """
267
+ KOREAN_COMPANIES์˜ ๋ชจ๋“  ํ•ญ๋ชฉ์— ๋Œ€ํ•ด(๋ณ‘๋ ฌ ์ฒ˜๋ฆฌ) ๊ฒ€์ƒ‰ ํ›„,
268
+ ๊ฐ์„ฑ ๋ถ„์„ ์ˆ˜ํ–‰ ๋ฐ DB ์ €์žฅ => ๋ชจ๋“  ๊ฒฐ๊ณผ๋ฅผ ๋งˆํฌ๋‹ค์šด ๋ฌธ์ž์—ด๋กœ ๋ฐ˜ํ™˜.
269
  """
270
+ overall_result = "# [์ „์ฒด ํšŒ์‚ฌ ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ]\n\n"
271
 
272
  def do_search(comp):
273
  return comp, search_company(comp)
 
283
 
284
  def load_all_companies():
285
  """
286
+ DB์— ์ €์žฅ๋œ ๋ชจ๋“  ํšŒ์‚ฌ(KOREAN_COMPANIES)์˜ ๊ธฐ์‚ฌ์™€ ๋ถ„์„ ๊ฒฐ๊ณผ๋ฅผ ๋ถˆ๋Ÿฌ์™€ ๋งˆํฌ๋‹ค์šด์œผ๋กœ ๋ฐ˜ํ™˜.
287
  """
288
+ overall_result = "# [์ „์ฒด ํšŒ์‚ฌ ๋ฐ์ดํ„ฐ ์ถœ๋ ฅ]\n\n"
289
 
290
  for comp in KOREAN_COMPANIES:
291
  overall_result += f"## {comp}\n"
 
295
 
296
  def full_summary_report():
297
  """
298
+ 1) ๋ชจ๋“  ํšŒ์‚ฌ๋ฅผ ๊ฒ€์ƒ‰(๋ณ‘๋ ฌ ์ฒ˜๋ฆฌ) -> 2) DB์—์„œ ๊ฒฐ๊ณผ ๋ถˆ๋Ÿฌ์˜ค๊ธฐ -> 3) ๊ฐ์„ฑ ๋ถ„์„ ํ†ต๊ณ„ ํ‘œ์‹œ
299
+ ์„ธ ๋‹จ๊ณ„์˜ ๊ฒฐ๊ณผ๋ฅผ ํ•˜๋‚˜์˜ ์ข…ํ•ฉ ๋ณด๊ณ ์„œ๋กœ ๋ฐ˜ํ™˜.
300
  """
301
+ # 1) ๋ชจ๋“  ํšŒ์‚ฌ ๊ฒ€์ƒ‰ ๋ฐ DB ์ €์žฅ
302
  search_result_text = search_all_companies()
303
 
304
+ # 2) DB์—์„œ ๊ฒฐ๊ณผ ๋ถˆ๋Ÿฌ์˜ค๊ธฐ
305
  load_result_text = load_all_companies()
306
 
307
+ # 3) ํ†ต๊ณ„ ํ‘œ์‹œ โ€“ EarnBOT ๋ถ„์„ ๋ณด๊ณ ์„œ
308
  stats_text = show_stats()
309
 
310
  combined_report = (
311
+ "# ์ „์ฒด ๋ถ„์„ ์ข…ํ•ฉ ๋ณด๊ณ ์„œ\n\n"
312
+ "์‹คํ–‰ ์ˆœ์„œ:\n"
313
+ "1. ๋ชจ๋“  ํšŒ์‚ฌ ๊ฒ€์ƒ‰(๋ณ‘๋ ฌ ์ฒ˜๋ฆฌ) ๋ฐ ๊ฐ์„ฑ ๋ถ„์„ โ†’ 2. DB์—์„œ ๊ฒฐ๊ณผ ๋ถˆ๋Ÿฌ์˜ค๊ธฐ โ†’ 3. ์ „์ฒด ๊ฐ์„ฑ ๋ถ„์„ ํ†ต๊ณ„ ํ‘œ์‹œ\n\n"
314
  f"{search_result_text}\n\n"
315
  f"{load_result_text}\n\n"
316
+ "## [์ „์ฒด ๊ฐ์„ฑ ๋ถ„์„ ํ†ต๊ณ„]\n\n"
317
  f"{stats_text}"
318
  )
319
  return combined_report
320
 
321
 
322
  ########################################
323
+ # ์ถ”๊ฐ€ ๊ธฐ๋Šฅ: ์‚ฌ์šฉ์ž ์ •์˜ ๊ฒ€์ƒ‰
324
  ########################################
325
  def search_custom(query, country):
326
  """
327
+ ์‚ฌ์šฉ์ž๊ฐ€ ์ž…๋ ฅํ•œ (ํ‚ค์›Œ๋“œ, ๊ตญ๊ฐ€)์— ๋Œ€ํ•ด:
328
+ 1) ๊ฒ€์ƒ‰ ๋ฐ ๊ฐ์„ฑ ๋ถ„์„ ์ˆ˜ํ–‰ ํ›„ DB ์ €์žฅ
329
+ 2) DB์—์„œ ๋ถˆ๋Ÿฌ์™€ ๊ธฐ์‚ฌ์™€ ๋ถ„์„ ๊ฒฐ๊ณผ ํ‘œ์‹œ
330
  """
331
  error_message, articles = serphouse_search(query, country)
332
  if error_message:
333
+ return f"์˜ค๋ฅ˜ ๋ฐœ์ƒ: {error_message}"
334
  if not articles:
335
+ return "์ž…๋ ฅํ•˜์‹  ๊ฒ€์ƒ‰์–ด์— ๋Œ€ํ•œ ๊ฒฐ๊ณผ๊ฐ€ ์—†์Šต๋‹ˆ๋‹ค."
336
 
337
+ # 1) ๊ฐ์„ฑ ๋ถ„์„ ์ˆ˜ํ–‰ (ํ•œ๊ธ€)
338
  analysis = analyze_sentiment_batch(articles, client)
339
 
340
+ # 2) DB์— ์ €์žฅ
341
  save_data = {
342
  "articles": articles,
343
  "analysis": analysis
344
  }
345
  save_to_db(query, country, save_data)
346
 
347
+ # 3) DB์—์„œ ๋ถˆ๋Ÿฌ์˜ค๊ธฐ
348
  loaded_data, timestamp = load_from_db(query, country)
349
  if not loaded_data:
350
+ return "DB์—์„œ ๋ฐ์ดํ„ฐ๋ฅผ ๋ถˆ๋Ÿฌ์˜ค์ง€ ๋ชปํ–ˆ์Šต๋‹ˆ๋‹ค."
351
 
352
+ # 4) ์ตœ์ข… ์ถœ๋ ฅ ์ค€๋น„
353
+ out = f"## [์‚ฌ์šฉ์ž ์ •์˜ ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ]\n\n"
354
+ out += f"**๊ฒ€์ƒ‰์–ด**: {query}\n\n"
355
+ out += f"**๊ตญ๊ฐ€**: {country}\n\n"
356
+ out += f"**ํƒ€์ž„์Šคํƒฌํ”„**: {timestamp}\n\n"
357
 
358
  arts = loaded_data.get("articles", [])
359
  analy = loaded_data.get("analysis", "")
360
 
361
  out += display_results(arts)
362
+ out += f"### ๋‰ด์Šค ๊ฐ์„ฑ ๋ถ„์„\n{analy}\n"
363
 
364
  return out
365
 
366
 
367
  ########################################
368
+ # API ์ธ์ฆ ์„ค์ •
369
  ########################################
370
  ACCESS_TOKEN = os.getenv("HF_TOKEN")
371
  if not ACCESS_TOKEN:
372
+ raise ValueError("HF_TOKEN ํ™˜๊ฒฝ๋ณ€์ˆ˜๊ฐ€ ์„ค์ •๋˜์–ด ์žˆ์ง€ ์•Š์Šต๋‹ˆ๋‹ค.")
373
 
374
  client = OpenAI(
375
  base_url="https://api-inference.huggingface.co/v1/",
 
380
 
381
 
382
  ########################################
383
+ # ๊ตญ๊ฐ€๋ณ„ ์„ค์ •
384
  ########################################
385
  COUNTRY_LANGUAGES = {
386
  "United States": "en",
 
438
  "Nigeria": "en",
439
  "Kenya": "sw",
440
  "Ukraine": "uk",
441
+ "Croatia": "Croatia",
442
+ "Slovakia": "Slovakia",
443
  "Bulgaria": "bg",
444
  "Serbia": "sr",
445
  "Estonia": "et",
 
526
  @lru_cache(maxsize=100)
527
  def translate_query(query, country):
528
  """
529
+ ๋น„๊ณต์‹ Google Translation API๋ฅผ ์‚ฌ์šฉํ•ด ๋Œ€์ƒ ๊ตญ๊ฐ€์˜ ์–ธ์–ด๋กœ ๊ฒ€์ƒ‰์–ด ๋ฒˆ์—ญ.
530
+ ๋ฒˆ์—ญ์— ์‹คํŒจํ•˜๊ฑฐ๋‚˜ ์˜์–ด์ผ ๊ฒฝ์šฐ ์›๋ณธ ๊ฒ€์ƒ‰์–ด ๋ฐ˜ํ™˜.
531
  """
532
  try:
533
  if is_english(query):
 
557
  return query
558
 
559
  except Exception as e:
560
+ print(f"๋ฒˆ์—ญ ์˜ค๋ฅ˜: {str(e)}")
561
  return query
562
 
563
  def is_english(text):
564
  """
565
+ ๋ฌธ์ž์—ด์ด ์ฃผ๋กœ ์˜์–ด์ธ์ง€ ํ™•์ธ (๋ฌธ์ž ์ฝ”๋“œ ๋ฒ”์œ„ ํ™•์ธ).
566
  """
567
  return all(ord(char) < 128 for char in text.replace(' ', '').replace('-', '').replace('_', ''))
568
 
569
  def search_serphouse(query, country, page=1, num_result=10):
570
  """
571
+ SerpHouse API์— ์‹ค์‹œ๊ฐ„ ๊ฒ€์ƒ‰ ์š”์ฒญ์„ ๋ณด๋‚ด๋ฉฐ,
572
+ 'news' ํƒญ(๋‚ ์งœ์ˆœ ์ •๋ ฌ)๋กœ ๊ฒ€์ƒ‰.
573
+ ๋ฐ˜ํ™˜๊ฐ’์€ 'results' ๋˜๋Š” 'error'๊ฐ€ ํฌํ•จ๋œ dict.
574
  """
575
  url = "https://api.serphouse.com/serp/live"
576
 
 
627
 
628
  except requests.exceptions.Timeout:
629
  return {
630
+ "error": "๊ฒ€์ƒ‰ ์‹œ๊ฐ„์ด ์ดˆ๊ณผ๋˜์—ˆ์Šต๋‹ˆ๋‹ค. ์ž ์‹œ ํ›„ ๋‹ค์‹œ ์‹œ๋„ํ•ด ์ฃผ์„ธ์š”.",
631
  "translated_query": query
632
  }
633
  except requests.exceptions.RequestException as e:
634
  return {
635
+ "error": f"๊ฒ€์ƒ‰ ์ค‘ ์˜ค๋ฅ˜ ๋ฐœ์ƒ: {str(e)}",
636
  "translated_query": query
637
  }
638
  except Exception as e:
639
  return {
640
+ "error": f"์˜ˆ์ƒ์น˜ ๋ชปํ•œ ์˜ค๋ฅ˜ ๋ฐœ์ƒ: {str(e)}",
641
  "translated_query": query
642
  }
643
 
644
  def format_results_from_raw(response_data):
645
  """
646
+ SerpHouse API์˜ ์‘๋‹ต ๋ฐ์ดํ„ฐ๋ฅผ ์ฒ˜๋ฆฌํ•˜์—ฌ (์˜ค๋ฅ˜ ๋ฉ”์‹œ์ง€, ๊ธฐ์‚ฌ ๋ชฉ๋ก)์„ ๋ฐ˜ํ™˜.
647
  """
648
  if "error" in response_data:
649
+ return "์˜ค๋ฅ˜: " + response_data["error"], []
650
 
651
  try:
652
  results = response_data["results"]
 
654
 
655
  news_results = results.get('results', {}).get('results', {}).get('news', [])
656
  if not news_results:
657
+ return "๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ๊ฐ€ ์—†์Šต๋‹ˆ๋‹ค.", []
658
 
659
+ # ํ•œ๊ตญ ๋„๋ฉ”์ธ ๋ฐ ํ‚ค์›Œ๋“œ ํ•„ํ„ฐ๋ง (์˜ˆ์‹œ)
660
  korean_domains = [
661
  '.kr', 'korea', 'korean', 'yonhap', 'hankyung', 'chosun',
662
  'donga', 'joins', 'hani', 'koreatimes', 'koreaherald'
 
677
  any(keyword in title for keyword in korean_keywords)
678
  )
679
 
680
+ # ํ•œ๊ตญ ๊ด€๋ จ ์ฝ˜ํ…์ธ  ์ œ์™ธ
681
  if not is_korean_content:
682
  filtered_articles.append({
683
  "index": idx,
684
+ "title": result.get("title", "์ œ๋ชฉ ์—†์Œ"),
685
  "link": url,
686
+ "snippet": result.get("snippet", "๋‚ด์šฉ ์—†์Œ"),
687
+ "channel": result.get("channel", result.get("source", "์•Œ ์ˆ˜ ์—†์Œ")),
688
+ "time": result.get("time", result.get("date", "์‹œ๊ฐ„ ์ •๋ณด ์—†์Œ")),
689
  "image_url": result.get("img", result.get("thumbnail", "")),
690
  "translated_query": translated_query
691
  })
692
 
693
  return "", filtered_articles
694
  except Exception as e:
695
+ return f"๊ฒฐ๊ณผ ์ฒ˜๋ฆฌ ์ค‘ ์˜ค๋ฅ˜ ๋ฐœ์ƒ: {str(e)}", []
696
 
697
  def serphouse_search(query, country):
698
  """
699
+ ๊ฒ€์ƒ‰ ๋ฐ ๊ฒฐ๊ณผ ํฌ๋งทํŒ…์„ ์œ„ํ•œ ํ—ฌํผ ํ•จ์ˆ˜.
700
+ (์˜ค๋ฅ˜ ๋ฉ”์‹œ์ง€, ๊ธฐ์‚ฌ ๋ชฉ๋ก)์„ ๋ฐ˜ํ™˜.
701
  """
702
  response_data = search_serphouse(query, country)
703
  return format_results_from_raw(response_data)
 
906
  }
907
  """
908
 
909
+ # --- Gradio ์ธํ„ฐํŽ˜์ด์Šค (UI ๋ถ€๋ถ„) ---
910
+ with gr.Blocks(css=css, title="NewsAI ์„œ๏ฟฝ๏ฟฝ์Šค") as iface:
911
+ # ๋ฐ์ดํ„ฐ๋ฒ ์ด์Šค ์ดˆ๊ธฐํ™” (init_db() ํ˜ธ์ถœ)
912
  init_db()
913
 
914
  gr.HTML("""<a href="https://visitorbadge.io/status?path=https%3A%2F%2Fopenfree-MoneyRadar.hf.space">
 
918
 
919
  with gr.Tabs():
920
  with gr.Tab("MoneyRadar"):
921
+ # ์‚ฌ์šฉ ๋ฐฉ๋ฒ• ๋ฐ ๊ธฐ๋Šฅ ์„ค๋ช… (ํ•œ๊ธ€)
922
  gr.Markdown(
923
  """
924
+ ## MoneyRadar
925
+ ์ตœ์‹  24์‹œ๊ฐ„ ๋‚ด ์ƒ์œ„ 100๊ฐœ์˜ ์šฐ์„ ์ˆœ์œ„ ๋‰ด์Šค๋ฅผ ์ž๋™์œผ๋กœ ์ถ”์ถœํ•˜์—ฌ
926
+ ์ˆ˜์ต ๊ธฐํšŒ๋ฅผ ํฌ์ฐฉํ•ฉ๋‹ˆ๋‹ค.
927
+
928
+ **์„œ๋น„์Šค ์‚ฌ์šฉ ๋ฐฉ๋ฒ•**:
929
+ 1. **์‚ฌ์šฉ์ž ์ •์˜ ๊ฒ€์ƒ‰**: ํ‚ค์›Œ๋“œ๋ฅผ ์ž…๋ ฅํ•˜๊ณ  ๋Œ€์ƒ ๊ตญ๊ฐ€๋ฅผ ์„ ํƒํ•˜์—ฌ ์ตœ์‹  ๋‰ด์Šค๋ฅผ ๊ฐ€์ ธ์˜ต๋‹ˆ๋‹ค.
930
+ ์‹œ์Šคํ…œ์ด ์ž๋™์œผ๋กœ ๊ฐ์„ฑ ๋ถ„์„์„ ์ˆ˜ํ–‰ํ•˜๊ณ  ๊ฒฐ๊ณผ๋ฅผ ๋ฐ์ดํ„ฐ๋ฒ ์ด์Šค์— ์ €์žฅํ•ฉ๋‹ˆ๋‹ค.
931
+ 2. **์ „์ฒด ๋ถ„์„ ์ข…ํ•ฉ ๋ณด๊ณ ์„œ ์ƒ์„ฑ**: ์•„๋ž˜ ์ž‘์—…์„ ์ž๋™์œผ๋กœ ์ˆ˜ํ–‰ํ•ฉ๋‹ˆ๋‹ค.
932
+ - ์‚ฌ์ „ ์ •์˜๋œ ๋ชจ๋“  ํšŒ์‚ฌ๋ฅผ ๋ณ‘๋ ฌ๋กœ ๊ฒ€์ƒ‰
933
+ - ๊ธฐ์‚ฌ์™€ ๊ฐ์„ฑ ๋ถ„์„ ๊ฒฐ๊ณผ๋ฅผ ๋ฐ์ดํ„ฐ๋ฒ ์ด์Šค์— ์ €์žฅ
934
+ - ์ „์ฒด ๊ฒฐ๊ณผ๋ฅผ ์ข…ํ•ฉ ๋ณด๊ณ ์„œ๋กœ ํ‘œ์‹œ
935
+ 3. **๊ฐœ๋ณ„ ํšŒ์‚ฌ ๊ฒ€์ƒ‰/๋ถˆ๋Ÿฌ์˜ค๊ธฐ**:
936
+ - **๊ฒ€์ƒ‰**: ์„ ํƒํ•œ ํšŒ์‚ฌ์— ๋Œ€ํ•ด Google์—์„œ ์ตœ์‹  ๋‰ด์Šค๋ฅผ ๊ฒ€์ƒ‰ ๋ฐ ๋ถ„์„
937
+ - **DB ๋ถˆ๋Ÿฌ์˜ค๊ธฐ**: ๋กœ์ปฌ ๋ฐ์ดํ„ฐ๋ฒ ์ด์Šค์—์„œ ์ €์žฅ๋œ ์ตœ์‹  ๋‰ด์Šค์™€ ๊ฐ์„ฑ ๋ถ„์„ ๊ฒฐ๊ณผ๋ฅผ ๋ถˆ๋Ÿฌ์˜ต๋‹ˆ๋‹ค.
938
+
939
+ **์ฃผ์š” ๊ธฐ๋Šฅ**:
940
+ - **์‹ค์‹œ๊ฐ„ ๋‰ด์Šค ์Šคํฌ๋ž˜ํ•‘**: ์—ฌ๋Ÿฌ ์ง€์—ญ์—์„œ ์ตœ์‹  ๊ธฐ์‚ฌ๋ฅผ ๊ฐ€์ ธ์˜ต๋‹ˆ๋‹ค.
941
+ - **๊ณ ๊ธ‰ ๊ฐ์„ฑ ๋ถ„์„**: ์ตœ์‹  NLP ๋ชจ๋ธ์„ ํ™œ์šฉํ•œ ๊ฐ์„ฑ ๋ถ„์„์„ ์ˆ˜ํ–‰ํ•ฉ๋‹ˆ๋‹ค.
942
+ - **๋ฐ์ดํ„ฐ ์˜์†์„ฑ**: ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ๋ฅผ SQLite ๋ฐ์ดํ„ฐ๋ฒ ์ด์Šค์— ์ž๋™ ์ €์žฅ ๋ฐ ๋ถˆ๋Ÿฌ์˜ค๊ธฐ.
943
+ - **์œ ์—ฐ์„ฑ**: ์‚ฌ์ „ ์ •์˜๋œ ํ‚ค์›Œ๋“œ๋ฟ ์•„๋‹ˆ๋ผ ์›ํ•˜๋Š” ํ‚ค์›Œ๋“œ/๊ตญ๊ฐ€ ๊ฒ€์ƒ‰์ด ๊ฐ€๋Šฅํ•ฉ๋‹ˆ๋‹ค.
944
+
945
+ **์ปค๋ฎค๋‹ˆํ‹ฐ**: https://discord.gg/openfreeai
946
+ ---
947
  """
948
  )
949
 
950
+ # ์‚ฌ์šฉ์ž ์ •์˜ ๊ฒ€์ƒ‰ ์„น์…˜
951
  with gr.Group():
952
+ gr.Markdown("### ์‚ฌ์šฉ์ž ์ •์˜ ๊ฒ€์ƒ‰")
953
  with gr.Row():
954
  with gr.Column():
955
  user_input = gr.Textbox(
956
+ label="ํ‚ค์›Œ๋“œ๋ฅผ ์ž…๋ ฅํ•˜์„ธ์š”",
957
+ placeholder="์˜ˆ: Apple, Samsung ๋“ฑ",
958
  elem_classes="textbox"
959
  )
960
  with gr.Column():
961
  country_selection = gr.Dropdown(
962
  choices=list(COUNTRY_LOCATIONS.keys()),
963
  value="United States",
964
+ label="๊ตญ๊ฐ€ ์„ ํƒ"
965
  )
966
  with gr.Column():
967
  custom_search_btn = gr.Button(
968
+ "๊ฒ€์ƒ‰",
969
  variant="primary",
970
  elem_classes="primary-btn"
971
  )
 
978
  outputs=custom_search_output
979
  )
980
 
981
+ # ์ „์ฒด ๋ณด๊ณ ์„œ ์ƒ์„ฑ์„ ์œ„ํ•œ ๋ฒ„ํŠผ
982
  with gr.Row():
983
  full_report_btn = gr.Button(
984
+ "์ „์ฒด ๋ถ„์„ ์ข…ํ•ฉ ๋ณด๊ณ ์„œ ์ƒ์„ฑ",
985
  variant="primary",
986
  elem_classes="primary-btn"
987
  )
 
992
  outputs=full_report_display
993
  )
994
 
995
+ # ๊ฐœ๋ณ„ ํšŒ์‚ฌ์— ๋Œ€ํ•œ ๊ฒ€์ƒ‰/๋ถˆ๋Ÿฌ์˜ค๊ธฐ
996
  with gr.Column():
997
  for i in range(0, len(KOREAN_COMPANIES), 2):
998
  with gr.Row():
999
+ # ์™ผ์ชฝ ์ปฌ๋Ÿผ
1000
  with gr.Column():
1001
  company = KOREAN_COMPANIES[i]
1002
  with gr.Group():
1003
  gr.Markdown(f"### {company}")
1004
  with gr.Row():
1005
  search_btn = gr.Button(
1006
+ "๊ฒ€์ƒ‰",
1007
  variant="primary",
1008
  elem_classes="primary-btn"
1009
  )
1010
  load_btn = gr.Button(
1011
+ "DB ๋ถˆ๋Ÿฌ์˜ค๊ธฐ",
1012
  variant="secondary",
1013
  elem_classes="secondary-btn"
1014
  )
 
1023
  outputs=result_display
1024
  )
1025
 
1026
+ # ์˜ค๋ฅธ์ชฝ ์ปฌ๋Ÿผ (์กด์žฌํ•  ๊ฒฝ์šฐ)
1027
  if i + 1 < len(KOREAN_COMPANIES):
1028
  with gr.Column():
1029
  company = KOREAN_COMPANIES[i + 1]
 
1031
  gr.Markdown(f"### {company}")
1032
  with gr.Row():
1033
  search_btn = gr.Button(
1034
+ "๊ฒ€์ƒ‰",
1035
  variant="primary",
1036
  elem_classes="primary-btn"
1037
  )
1038
  load_btn = gr.Button(
1039
+ "DB ๋ถˆ๋Ÿฌ์˜ค๊ธฐ",
1040
  variant="secondary",
1041
  elem_classes="secondary-btn"
1042
  )
 
1051
  outputs=result_display
1052
  )
1053
 
1054
+ # Gradio ์ธํ„ฐํŽ˜์ด์Šค ์‹คํ–‰
1055
  iface.launch(
1056
  server_name="0.0.0.0",
1057
  server_port=7860,