openfree commited on
Commit
6ce4225
ยท
verified ยท
1 Parent(s): dcd9353

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +70 -90
app.py CHANGED
@@ -75,123 +75,103 @@ def create_deepseek_interface():
75
  print(f"์›๋ณธ ์ฟผ๋ฆฌ: {query}")
76
  print(extraction_result)
77
 
78
- # ํ•œ๊ธ€ ๊ฒ€์ƒ‰์–ด์ธ์ง€ ํ™•์ธํ•˜๊ณ  ์ ์ ˆํ•œ ์„ค์ • ์„ ํƒ
79
- is_korean = any('\uAC00' <= c <= '\uD7A3' for c in search_query)
80
-
81
- # MoneyRadar ๋ฐฉ์‹ ์ •ํ™•ํžˆ ๋”ฐ๋ฅด๊ธฐ
82
- from datetime import datetime, timedelta
83
- now = datetime.utcnow()
84
- yesterday = now - timedelta(days=1)
85
- date_range = f"{yesterday.strftime('%Y-%m-%d')},{now.strftime('%Y-%m-%d')}"
86
-
87
- # SerpHouse API ํ˜ธ์ถœ ์‹คํ–‰ - POST ๋ฉ”์„œ๋“œ ์‚ฌ์šฉ (MoneyRadar ์ฝ”๋“œ ์ •ํ™•ํžˆ ๋”ฐ๋ฅด๊ธฐ)
88
  url = "https://api.serphouse.com/serp/live"
89
 
90
- # ๊ตญ๊ฐ€ ์„ค์ •
91
- country = "KOREA" if is_korean else "United States"
92
-
93
- # COUNTRY_LOCATIONS๊ณผ COUNTRY_LANGUAGES ์ •์˜ (MoneyRadar์—์„œ ์‚ฌ์šฉ๋˜๋Š” ๋งคํ•‘)
94
- COUNTRY_LOCATIONS = {"United States": "United States", "KOREA": "kr"}
95
- COUNTRY_LANGUAGES = {"United States": "en", "KOREA": "ko"}
96
 
97
- # MoneyRadar ๋ฐฉ์‹์œผ๋กœ ํŽ˜์ด๋กœ๋“œ ๊ตฌ์„ฑ (100๊ฐœ ๊ฒฐ๊ณผ ์š”์ฒญ)
98
- payload = {
99
- "q": search_query.replace(',', ' ').strip(),
100
  "domain": "google.com",
101
- "loc": COUNTRY_LOCATIONS.get(country, "United States"),
102
- "lang": COUNTRY_LANGUAGES.get(country, "en"),
103
  "device": "desktop",
104
- "serp_type": "news",
105
- "page": "1",
106
- "num": "100",
107
- "date_range": date_range,
108
- "sort_by": "date"
109
  }
110
 
111
  headers = {
112
- "accept": "application/json",
113
- "content-type": "application/json",
114
- "authorization": f"Bearer {serphouse_api_key}"
115
  }
116
 
117
- print(f"SerpHouse API ํ˜ธ์ถœ ์ค‘... ๊ฒ€์ƒ‰์–ด: {search_query}")
118
- print(f"์š”์ฒญ ๋ฐฉ์‹: POST, ํŽ˜์ด๋กœ๋“œ: {payload}")
119
-
120
- # POST ์š”์ฒญ ์ˆ˜ํ–‰ (์„ธ์…˜๊ณผ ์žฌ์‹œ๋„ ๋กœ์ง ์ถ”๊ฐ€)
121
- import requests
122
- from requests.adapters import HTTPAdapter
123
- from requests.packages.urllib3.util.retry import Retry
124
-
125
- session = requests.Session()
126
- retries = Retry(
127
- total=5,
128
- backoff_factor=1,
129
- status_forcelist=[500, 502, 503, 504, 429],
130
- allowed_methods=["POST"]
131
- )
132
- adapter = HTTPAdapter(max_retries=retries)
133
- session.mount('http://', adapter)
134
- session.mount('https://', adapter)
135
-
136
- response = session.post(
137
- url,
138
- json=payload,
139
- headers=headers,
140
- timeout=(30, 30)
141
- )
142
 
 
 
143
  response.raise_for_status()
144
 
145
  print(f"SerpHouse API ์‘๋‹ต ์ƒํƒœ ์ฝ”๋“œ: {response.status_code}")
146
- response_data = response.json()
147
 
148
- # MoneyRadar ๋ฐฉ์‹์œผ๋กœ ๊ฒฐ๊ณผ ๋ฐ˜ํ™˜
149
- search_results = {
150
- "results": response_data,
151
- "translated_query": search_query
152
- }
153
-
154
- # ์‘๋‹ต ๊ตฌ์กฐ ๋กœ๊น…
155
  print(f"์‘๋‹ต ๊ตฌ์กฐ: {list(search_results.keys()) if isinstance(search_results, dict) else '๋”•์…”๋„ˆ๋ฆฌ ์•„๋‹˜'}")
156
 
157
- # ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ ํŒŒ์‹ฑ ๋ฐ ํฌ๋งทํŒ… (MoneyRadar format_results_from_raw ํ•จ์ˆ˜ ์ฐธ๊ณ )
158
  formatted_results = []
159
  formatted_results.append(f"๊ฒ€์ƒ‰์–ด: {search_query}\n\n")
160
 
161
- # ๊ฒฐ๊ณผ ๊ตฌ์กฐ ํŒŒ์‹ฑ (MoneyRadar์™€ ๋™์ผํ•˜๊ฒŒ)
162
- if "results" in search_results and "results" in search_results["results"]:
163
- if "results" in search_results["results"]["results"]:
164
- # ๋‰ด์Šค ๊ฒฐ๊ณผ ํŒŒ์‹ฑ
165
- news_results = search_results["results"]["results"]["results"].get("news", [])
166
- if news_results:
167
- for result in news_results[:5]: # ์ƒ์œ„ 5๊ฐœ๋งŒ ํ‘œ์‹œ
168
- title = result.get("title", "์ œ๋ชฉ ์—†์Œ")
169
- snippet = result.get("snippet", "๋‚ด์šฉ ์—†์Œ")
170
- url = result.get("url", result.get("link", "#"))
171
- source = result.get("source", result.get("channel", "์•Œ ์ˆ˜ ์—†์Œ"))
172
- time = result.get("time", result.get("date", "์‹œ๊ฐ„ ์ •๋ณด ์—†์Œ"))
173
-
174
- formatted_results.append(
175
- f"์ œ๋ชฉ: {title}\n"
176
- f"์ถœ์ฒ˜: {source}\n"
177
- f"์‹œ๊ฐ„: {time}\n"
178
- f"๋‚ด์šฉ: {snippet}\n"
179
- f"๋งํฌ: {url}\n\n"
180
- )
181
-
182
- print(f"๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ {len(news_results)}๊ฐœ ์ฐพ์Œ")
183
- return "".join(formatted_results)
184
 
185
- print("๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ ์—†์Œ ๋˜๋Š” ์‘๋‹ต ํ˜•์‹ ๋ถˆ์ผ์น˜")
186
- print(f"์‘๋‹ต ๊ตฌ์กฐ: {search_results.keys() if hasattr(search_results, 'keys') else '๋ถˆ๋ช…ํ™•ํ•œ ๊ตฌ์กฐ'}")
187
- return f"๊ฒ€์ƒ‰์–ด '{search_query}'์— ๋Œ€ํ•œ ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ๊ฐ€ ์—†๊ฑฐ๋‚˜ API ์‘๋‹ต ํ˜•์‹์ด ์˜ˆ์ƒ๊ณผ ๋‹ค๋ฆ…๋‹ˆ๋‹ค."
188
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
189
  except Exception as e:
190
  error_msg = f"๊ฒ€์ƒ‰ ์ค‘ ์˜ค๋ฅ˜ ๋ฐœ์ƒ: {str(e)}"
191
  print(error_msg)
192
  import traceback
193
  print(traceback.format_exc())
194
- return error_msg
 
 
 
 
 
 
195
 
196
  # ์ŠคํŠธ๋ฆฌ๋ฐ ๋ฐฉ์‹์œผ๋กœ DeepSeek API ํ˜ธ์ถœ ํ•จ์ˆ˜
197
  def query_deepseek_streaming(message, history, use_deep_research):
 
75
  print(f"์›๋ณธ ์ฟผ๋ฆฌ: {query}")
76
  print(extraction_result)
77
 
78
+ # ๋ฌธ์„œ ์ฝ”๋“œ๋ฅผ ๋” ์ž์„ธํžˆ ๋ถ„์„ํ•ด ๋ณด๋‹ˆ ๊ธฐ๋ณธ GET ๋ฐฉ์‹ ํ™œ์šฉ์ด ์ข‹์„ ๊ฒƒ ๊ฐ™์Šต๋‹ˆ๋‹ค
 
 
 
 
 
 
 
 
 
79
  url = "https://api.serphouse.com/serp/live"
80
 
81
+ # ํ•œ๊ธ€ ๊ฒ€์ƒ‰์–ด์ธ์ง€ ํ™•์ธ
82
+ is_korean = any('\uAC00' <= c <= '\uD7A3' for c in search_query)
 
 
 
 
83
 
84
+ # ๊ฐ„์†Œํ™”๋œ ํŒŒ๋ผ๋ฏธํ„ฐ๋กœ ์‹œ๋„
85
+ params = {
86
+ "q": search_query,
87
  "domain": "google.com",
88
+ "serp_type": "web", # ๊ธฐ๋ณธ ์›น ๊ฒ€์ƒ‰์œผ๋กœ ๋ณ€๊ฒฝ
 
89
  "device": "desktop",
90
+ "lang": "ko" if is_korean else "en"
 
 
 
 
91
  }
92
 
93
  headers = {
94
+ "Authorization": f"Bearer {serphouse_api_key}"
 
 
95
  }
96
 
97
+ print(f"SerpHouse API ํ˜ธ์ถœ ์ค‘... ๊ธฐ๋ณธ GET ๋ฐฉ์‹์œผ๋กœ ์‹œ๋„")
98
+ print(f"๊ฒ€์ƒ‰์–ด: {search_query}")
99
+ print(f"์š”์ฒญ URL: {url} - ํŒŒ๋ผ๋ฏธํ„ฐ: {params}")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
100
 
101
+ # GET ์š”์ฒญ ์ˆ˜ํ–‰
102
+ response = requests.get(url, headers=headers, params=params)
103
  response.raise_for_status()
104
 
105
  print(f"SerpHouse API ์‘๋‹ต ์ƒํƒœ ์ฝ”๋“œ: {response.status_code}")
106
+ search_results = response.json()
107
 
108
+ # ์‘๋‹ต ๊ตฌ์กฐ ํ™•์ธ
 
 
 
 
 
 
109
  print(f"์‘๋‹ต ๊ตฌ์กฐ: {list(search_results.keys()) if isinstance(search_results, dict) else '๋”•์…”๋„ˆ๋ฆฌ ์•„๋‹˜'}")
110
 
111
+ # ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ ํŒŒ์‹ฑ ๋ฐ ํฌ๋งทํŒ…
112
  formatted_results = []
113
  formatted_results.append(f"๊ฒ€์ƒ‰์–ด: {search_query}\n\n")
114
 
115
+ # ๋‹ค์–‘ํ•œ ๊ฐ€๋Šฅํ•œ ์‘๋‹ต ๊ตฌ์กฐ์— ๋Œ€ํ•œ ์ฒ˜๋ฆฌ
116
+ organic_results = None
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
117
 
118
+ # ๊ฐ€๋Šฅํ•œ ์‘๋‹ต ๊ตฌ์กฐ 1
119
+ if "results" in search_results and "organic" in search_results["results"]:
120
+ organic_results = search_results["results"]["organic"]
121
 
122
+ # ๊ฐ€๋Šฅํ•œ ์‘๋‹ต ๊ตฌ์กฐ 2
123
+ elif "organic" in search_results:
124
+ organic_results = search_results["organic"]
125
+
126
+ # ๊ฐ€๋Šฅํ•œ ์‘๋‹ต ๊ตฌ์กฐ 3 (์ค‘์ฒฉ๋œ results)
127
+ elif "results" in search_results and "results" in search_results["results"]:
128
+ if "organic" in search_results["results"]["results"]:
129
+ organic_results = search_results["results"]["results"]["organic"]
130
+
131
+ # organic_results๊ฐ€ ์žˆ์œผ๋ฉด ์ฒ˜๋ฆฌ
132
+ if organic_results and len(organic_results) > 0:
133
+ # ์‘๋‹ต ๊ตฌ์กฐ ์ถœ๋ ฅ
134
+ print(f"์ฒซ๋ฒˆ์งธ organic ๊ฒฐ๊ณผ ๊ตฌ์กฐ: {organic_results[0].keys() if len(organic_results) > 0 else 'empty'}")
135
+
136
+ for result in organic_results[:5]: # ์ƒ์œ„ 5๊ฐœ ๊ฒฐ๊ณผ๋งŒ ํ‘œ์‹œ
137
+ title = result.get("title", "์ œ๋ชฉ ์—†์Œ")
138
+ snippet = result.get("snippet", "๋‚ด์šฉ ์—†์Œ")
139
+ link = result.get("link", "#")
140
+
141
+ formatted_results.append(
142
+ f"์ œ๋ชฉ: {title}\n"
143
+ f"๋‚ด์šฉ: {snippet}\n"
144
+ f"๋งํฌ: {link}\n\n"
145
+ )
146
+
147
+ print(f"๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ {len(organic_results)}๊ฐœ ์ฐพ์Œ")
148
+ return "".join(formatted_results)
149
+
150
+ # ๊ฒฐ๊ณผ๊ฐ€ ์—†๊ฑฐ๋‚˜ ์˜ˆ์ƒ์น˜ ๋ชปํ•œ ๊ตฌ์กฐ์ธ ๊ฒฝ์šฐ
151
+ print("๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ ์—†์Œ ๋˜๋Š” ์˜ˆ์ƒ์น˜ ๋ชปํ•œ ์‘๋‹ต ๊ตฌ์กฐ")
152
+ print(f"์‘๋‹ต ๊ตฌ์กฐ ์ƒ์„ธ: {search_results.keys() if hasattr(search_results, 'keys') else '๋ถˆ๋ช…ํ™•ํ•œ ๊ตฌ์กฐ'}")
153
+
154
+ # ์‘๋‹ต ๋‚ด์šฉ์—์„œ ์˜ค๋ฅ˜ ๋ฉ”์‹œ์ง€ ์ฐพ๊ธฐ
155
+ error_msg = "๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ๊ฐ€ ์—†๊ฑฐ๋‚˜ ์‘๋‹ต ํ˜•์‹์ด ์˜ˆ์ƒ๊ณผ ๋‹ค๋ฆ…๋‹ˆ๋‹ค"
156
+ if "error" in search_results:
157
+ error_msg = search_results["error"]
158
+ elif "message" in search_results:
159
+ error_msg = search_results["message"]
160
+
161
+ return f"๊ฒ€์ƒ‰์–ด '{search_query}'์— ๋Œ€ํ•œ ๊ฒฐ๊ณผ: {error_msg}"
162
+
163
  except Exception as e:
164
  error_msg = f"๊ฒ€์ƒ‰ ์ค‘ ์˜ค๋ฅ˜ ๋ฐœ์ƒ: {str(e)}"
165
  print(error_msg)
166
  import traceback
167
  print(traceback.format_exc())
168
+
169
+ # ๋””๋ฒ„๊น… ๋ชฉ์ ์œผ๋กœ API ์š”์ฒญ ์ƒ์„ธ ์ •๋ณด ์ถ”๊ฐ€
170
+ return f"๊ฒ€์ƒ‰ ์ค‘ ์˜ค๋ฅ˜๊ฐ€ ๋ฐœ์ƒํ–ˆ์Šต๋‹ˆ๋‹ค: {str(e)}\n\n" + \
171
+ f"API ์š”์ฒญ ์ƒ์„ธ ์ •๋ณด:\n" + \
172
+ f"- URL: {url}\n" + \
173
+ f"- ๊ฒ€์ƒ‰์–ด: {search_query}\n" + \
174
+ f"- ํŒŒ๋ผ๋ฏธํ„ฐ: {params}\n"
175
 
176
  # ์ŠคํŠธ๋ฆฌ๋ฐ ๋ฐฉ์‹์œผ๋กœ DeepSeek API ํ˜ธ์ถœ ํ•จ์ˆ˜
177
  def query_deepseek_streaming(message, history, use_deep_research):