Update app.py
Browse files
app.py
CHANGED
@@ -233,19 +233,23 @@ def fetch_google_news_rss(query, num_results=10):
|
|
233 |
}
|
234 |
url = f"{base_url}?{urllib.parse.urlencode(params)}"
|
235 |
|
236 |
-
|
237 |
-
|
238 |
-
|
239 |
-
|
240 |
-
|
241 |
-
|
242 |
-
|
243 |
-
|
244 |
-
|
245 |
-
|
246 |
-
|
247 |
-
|
248 |
-
|
|
|
|
|
|
|
|
|
249 |
|
250 |
def summarize_news_content(content, model):
|
251 |
prompt_template = """
|
@@ -264,18 +268,27 @@ def process_google_news_rss(query, temperature, top_p, repetition_penalty):
|
|
264 |
embed = get_embeddings()
|
265 |
|
266 |
articles = fetch_google_news_rss(query)
|
|
|
|
|
|
|
267 |
processed_articles = []
|
268 |
|
269 |
for article in articles:
|
270 |
-
|
271 |
-
|
272 |
-
|
273 |
-
|
274 |
-
|
275 |
-
|
276 |
-
|
277 |
-
|
278 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
279 |
|
280 |
# Add processed articles to the database
|
281 |
docs = [Document(page_content=article["summary"], metadata={
|
@@ -284,19 +297,22 @@ def process_google_news_rss(query, temperature, top_p, repetition_penalty):
|
|
284 |
"published_date": article["published_date"]
|
285 |
}) for article in processed_articles]
|
286 |
|
287 |
-
|
288 |
-
|
289 |
-
|
290 |
-
|
291 |
-
|
292 |
-
|
293 |
-
|
294 |
-
|
295 |
-
|
296 |
-
|
297 |
-
|
298 |
-
|
299 |
-
|
|
|
|
|
|
|
300 |
|
301 |
def export_news_to_excel():
|
302 |
global news_database
|
|
|
233 |
}
|
234 |
url = f"{base_url}?{urllib.parse.urlencode(params)}"
|
235 |
|
236 |
+
try:
|
237 |
+
feed = feedparser.parse(url)
|
238 |
+
articles = []
|
239 |
+
|
240 |
+
for entry in feed.entries[:num_results]:
|
241 |
+
article = {
|
242 |
+
"published_date": entry.get("published", "N/A"),
|
243 |
+
"title": entry.get("title", "N/A"),
|
244 |
+
"url": entry.get("link", "N/A"),
|
245 |
+
"content": entry.get("summary", "N/A")
|
246 |
+
}
|
247 |
+
articles.append(article)
|
248 |
+
|
249 |
+
return articles
|
250 |
+
except Exception as e:
|
251 |
+
print(f"Error fetching news: {str(e)}")
|
252 |
+
return []
|
253 |
|
254 |
def summarize_news_content(content, model):
|
255 |
prompt_template = """
|
|
|
268 |
embed = get_embeddings()
|
269 |
|
270 |
articles = fetch_google_news_rss(query)
|
271 |
+
if not articles:
|
272 |
+
return "No news articles found for the given query."
|
273 |
+
|
274 |
processed_articles = []
|
275 |
|
276 |
for article in articles:
|
277 |
+
try:
|
278 |
+
summary = summarize_news_content(article["content"], model)
|
279 |
+
processed_article = {
|
280 |
+
"published_date": article["published_date"],
|
281 |
+
"title": article["title"],
|
282 |
+
"url": article["url"],
|
283 |
+
"content": article["content"],
|
284 |
+
"summary": summary
|
285 |
+
}
|
286 |
+
processed_articles.append(processed_article)
|
287 |
+
except Exception as e:
|
288 |
+
print(f"Error processing article: {str(e)}")
|
289 |
+
|
290 |
+
if not processed_articles:
|
291 |
+
return "Failed to process any news articles. Please try a different query or check the summarization process."
|
292 |
|
293 |
# Add processed articles to the database
|
294 |
docs = [Document(page_content=article["summary"], metadata={
|
|
|
297 |
"published_date": article["published_date"]
|
298 |
}) for article in processed_articles]
|
299 |
|
300 |
+
try:
|
301 |
+
if os.path.exists("faiss_database"):
|
302 |
+
database = FAISS.load_local("faiss_database", embed, allow_dangerous_deserialization=True)
|
303 |
+
database.add_documents(docs)
|
304 |
+
else:
|
305 |
+
database = FAISS.from_documents(docs, embed)
|
306 |
+
|
307 |
+
database.save_local("faiss_database")
|
308 |
+
|
309 |
+
# Update news_database for excel export
|
310 |
+
global news_database
|
311 |
+
news_database.extend(processed_articles)
|
312 |
+
|
313 |
+
return f"Processed and added {len(processed_articles)} news articles to the database."
|
314 |
+
except Exception as e:
|
315 |
+
return f"Error adding articles to the database: {str(e)}"
|
316 |
|
317 |
def export_news_to_excel():
|
318 |
global news_database
|