openfree commited on
Commit
65f1795
ยท
verified ยท
1 Parent(s): 3bf9017

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +49 -26
app.py CHANGED
@@ -80,6 +80,12 @@ def create_deepseek_interface():
80
  lang = "ko" if is_korean else "en"
81
  loc = "kr" if is_korean else "us"
82
 
 
 
 
 
 
 
83
  # SerpHouse API ํ˜ธ์ถœ ์‹คํ–‰ - POST ๋ฉ”์„œ๋“œ ์‚ฌ์šฉ (MoneyRadar ์ฝ”๋“œ ์ฐธ๊ณ )
84
  url = "https://api.serphouse.com/serp/live"
85
 
@@ -91,9 +97,11 @@ def create_deepseek_interface():
91
  "loc": loc,
92
  "lang": lang,
93
  "device": "desktop",
94
- "serp_type": "web",
95
  "page": "1",
96
- "num": "5"
 
 
97
  }
98
  }
99
 
@@ -106,45 +114,60 @@ def create_deepseek_interface():
106
  print(f"SerpHouse API ํ˜ธ์ถœ ์ค‘... ๊ฒ€์ƒ‰์–ด: {search_query}")
107
  print(f"์š”์ฒญ ๋ฐฉ์‹: POST, ํŽ˜์ด๋กœ๋“œ: {payload}")
108
 
109
- # POST ์š”์ฒญ ์ˆ˜ํ–‰
110
- response = requests.post(url, json=payload, headers=headers, timeout=(30, 30))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
111
  response.raise_for_status()
112
 
113
  print(f"SerpHouse API ์‘๋‹ต ์ƒํƒœ ์ฝ”๋“œ: {response.status_code}")
114
  search_results = response.json()
115
 
116
- # ์‘๋‹ต ๊ตฌ์กฐ ๋กœ๊น…
117
  print(f"์‘๋‹ต ๊ตฌ์กฐ: {list(search_results.keys()) if isinstance(search_results, dict) else '๋”•์…”๋„ˆ๋ฆฌ ์•„๋‹˜'}")
118
 
119
- # ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ ํŒŒ์‹ฑ ๋ฐ ํฌ๋งทํŒ…
120
  formatted_results = []
121
  formatted_results.append(f"๊ฒ€์ƒ‰์–ด: {search_query}\n\n")
122
 
123
- if "results" in search_results:
124
- results = search_results["results"]
125
- if "organic" in results:
126
- organic_results = results["organic"]
127
- for result in organic_results[:5]: # ์ƒ์œ„ 5๊ฐœ ๊ฒฐ๊ณผ๋งŒ ์‚ฌ์šฉ
 
128
  title = result.get("title", "์ œ๋ชฉ ์—†์Œ")
129
  snippet = result.get("snippet", "๋‚ด์šฉ ์—†์Œ")
130
- link = result.get("link", "#")
131
- formatted_results.append(f"์ œ๋ชฉ: {title}\n๋‚ด์šฉ: {snippet}\n๋งํฌ: {link}\n\n")
 
 
 
 
 
 
 
 
 
132
 
133
- print(f"๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ {len(organic_results)}๊ฐœ ์ฐพ์Œ")
134
  return "".join(formatted_results)
135
 
136
- # ๋‹ค๋ฅธ ๊ฐ€๋Šฅํ•œ ๊ฒฐ๊ณผ ๊ตฌ์กฐ ํ™•์ธ
137
- if isinstance(search_results, dict) and "organic" in search_results:
138
- organic_results = search_results["organic"]
139
- for result in organic_results[:5]:
140
- title = result.get("title", "์ œ๋ชฉ ์—†์Œ")
141
- snippet = result.get("snippet", "๋‚ด์šฉ ์—†์Œ")
142
- link = result.get("link", "#")
143
- formatted_results.append(f"์ œ๋ชฉ: {title}\n๋‚ด์šฉ: {snippet}\n๋งํฌ: {link}\n\n")
144
-
145
- print(f"๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ {len(organic_results)}๊ฐœ ์ฐพ์Œ")
146
- return "".join(formatted_results)
147
-
148
  print("๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ ์—†์Œ ๋˜๋Š” ์‘๋‹ต ํ˜•์‹ ๋ถˆ์ผ์น˜")
149
  print(f"์‘๋‹ต ๋‚ด์šฉ ๋ฏธ๋ฆฌ๋ณด๊ธฐ: {str(search_results)[:300]}...")
150
  return f"๊ฒ€์ƒ‰์–ด '{search_query}'์— ๋Œ€ํ•œ ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ๊ฐ€ ์—†๊ฑฐ๋‚˜ API ์‘๋‹ต ํ˜•์‹์ด ์˜ˆ์ƒ๊ณผ ๋‹ค๋ฆ…๋‹ˆ๋‹ค."
 
80
  lang = "ko" if is_korean else "en"
81
  loc = "kr" if is_korean else "us"
82
 
83
+ # MoneyRadar ๋ฐฉ์‹ ์ •ํ™•ํžˆ ๋”ฐ๋ฅด๊ธฐ - serp_type์„ news๋กœ ๋ณ€๊ฒฝํ•˜๊ณ  ๋‚ ์งœ ๋ฒ”์œ„ ์ถ”๊ฐ€
84
+ from datetime import datetime, timedelta
85
+ now = datetime.utcnow()
86
+ yesterday = now - timedelta(days=1)
87
+ date_range = f"{yesterday.strftime('%Y-%m-%d')},{now.strftime('%Y-%m-%d')}"
88
+
89
  # SerpHouse API ํ˜ธ์ถœ ์‹คํ–‰ - POST ๋ฉ”์„œ๋“œ ์‚ฌ์šฉ (MoneyRadar ์ฝ”๋“œ ์ฐธ๊ณ )
90
  url = "https://api.serphouse.com/serp/live"
91
 
 
97
  "loc": loc,
98
  "lang": lang,
99
  "device": "desktop",
100
+ "serp_type": "news", # web์—์„œ news๋กœ ๋ณ€๊ฒฝ
101
  "page": "1",
102
+ "num": "5",
103
+ "date_range": date_range, # ๋‚ ์งœ ๋ฒ”์œ„ ์ถ”๊ฐ€
104
+ "sort_by": "date" # ๋‚ ์งœ์ˆœ ์ •๋ ฌ ์ถ”๊ฐ€
105
  }
106
  }
107
 
 
114
  print(f"SerpHouse API ํ˜ธ์ถœ ์ค‘... ๊ฒ€์ƒ‰์–ด: {search_query}")
115
  print(f"์š”์ฒญ ๋ฐฉ์‹: POST, ํŽ˜์ด๋กœ๋“œ: {payload}")
116
 
117
+ # POST ์š”์ฒญ ์ˆ˜ํ–‰ (์„ธ์…˜๊ณผ ์žฌ์‹œ๋„ ๋กœ์ง ์ถ”๊ฐ€)
118
+ session = requests.Session()
119
+ retries = requests.packages.urllib3.util.retry.Retry(
120
+ total=5,
121
+ backoff_factor=1,
122
+ status_forcelist=[500, 502, 503, 504, 429],
123
+ allowed_methods=["POST"]
124
+ )
125
+ adapter = requests.adapters.HTTPAdapter(max_retries=retries)
126
+ session.mount('http://', adapter)
127
+ session.mount('https://', adapter)
128
+
129
+ response = session.post(
130
+ url,
131
+ json=payload,
132
+ headers=headers,
133
+ timeout=(30, 30)
134
+ )
135
+
136
  response.raise_for_status()
137
 
138
  print(f"SerpHouse API ์‘๋‹ต ์ƒํƒœ ์ฝ”๋“œ: {response.status_code}")
139
  search_results = response.json()
140
 
141
+ # MoneyRadar์˜ ๊ฒฐ๊ณผ ์ฒ˜๋ฆฌ ๋ฐฉ์‹ ๋ถ„์„ ๋ฐ ์ ์šฉ
142
  print(f"์‘๋‹ต ๊ตฌ์กฐ: {list(search_results.keys()) if isinstance(search_results, dict) else '๋”•์…”๋„ˆ๋ฆฌ ์•„๋‹˜'}")
143
 
144
+ # ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ ํŒŒ์‹ฑ ๋ฐ ํฌ๋งทํŒ…
145
  formatted_results = []
146
  formatted_results.append(f"๊ฒ€์ƒ‰์–ด: {search_query}\n\n")
147
 
148
+ # ๊ฒฐ๊ณผ ๊ตฌ์กฐ ํŒŒ์‹ฑ (MoneyRadar์™€ ๋™์ผํ•˜๊ฒŒ)
149
+ if "results" in search_results and "results" in search_results["results"]:
150
+ # ๋‰ด์Šค ๊ฒฐ๊ณผ ํŒŒ์‹ฑ
151
+ news_results = search_results["results"]["results"].get("news", [])
152
+ if news_results:
153
+ for result in news_results[:5]:
154
  title = result.get("title", "์ œ๋ชฉ ์—†์Œ")
155
  snippet = result.get("snippet", "๋‚ด์šฉ ์—†์Œ")
156
+ url = result.get("url", result.get("link", "#"))
157
+ source = result.get("source", result.get("channel", "์•Œ ์ˆ˜ ์—†์Œ"))
158
+ time = result.get("time", result.get("date", "์‹œ๊ฐ„ ์ •๋ณด ์—†์Œ"))
159
+
160
+ formatted_results.append(
161
+ f"์ œ๋ชฉ: {title}\n"
162
+ f"์ถœ์ฒ˜: {source}\n"
163
+ f"์‹œ๊ฐ„: {time}\n"
164
+ f"๋‚ด์šฉ: {snippet}\n"
165
+ f"๋งํฌ: {url}\n\n"
166
+ )
167
 
168
+ print(f"๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ {len(news_results)}๊ฐœ ์ฐพ์Œ")
169
  return "".join(formatted_results)
170
 
 
 
 
 
 
 
 
 
 
 
 
 
171
  print("๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ ์—†์Œ ๋˜๋Š” ์‘๋‹ต ํ˜•์‹ ๋ถˆ์ผ์น˜")
172
  print(f"์‘๋‹ต ๋‚ด์šฉ ๋ฏธ๋ฆฌ๋ณด๊ธฐ: {str(search_results)[:300]}...")
173
  return f"๊ฒ€์ƒ‰์–ด '{search_query}'์— ๋Œ€ํ•œ ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ๊ฐ€ ์—†๊ฑฐ๋‚˜ API ์‘๋‹ต ํ˜•์‹์ด ์˜ˆ์ƒ๊ณผ ๋‹ค๋ฆ…๋‹ˆ๋‹ค."