acecalisto3 commited on
Commit
18a8600
·
verified ·
1 Parent(s): 167e3cf

Update app2.py

Browse files
Files changed (1) hide show
  1. app2.py +29 -15
app2.py CHANGED
@@ -7,7 +7,6 @@ from typing import List, Tuple
7
  import aiohttp
8
  import datetime
9
  import hashlib
10
- from pathlib import Path
11
 
12
  import feedparser
13
  import gradio as gr
@@ -19,8 +18,7 @@ import validators
19
  from bs4 import BeautifulSoup
20
 
21
  # Configure logging
22
- logging.basicConfig(level=logging.INFO,
23
- format='%(asctime)s - %(levelname)s - %(message)s')
24
  logger = logging.getLogger(__name__)
25
 
26
  # Configuration
@@ -87,7 +85,8 @@ async def save_to_csv(storage_location: str, url: str, title: str,
87
  content: str, timestamp: datetime.datetime):
88
  try:
89
  os.makedirs(os.path.dirname(storage_location), exist_ok=True)
90
- with open(storage_location, "a", newline='', encoding="utf-8") as csvfile:
 
91
  csv_writer = csv.writer(csvfile)
92
  csv_writer.writerow([
93
  timestamp.strftime("%Y-%m-%d %H:%M:%S"), url, title, content
@@ -220,17 +219,18 @@ async def update_db_status(db_status):
220
  while True:
221
  try:
222
  await db_session.execute("SELECT 1")
223
- await db_status.update(value="Connected")
224
  except SQLAlchemyError:
225
- await db_status.update(value="Disconnected")
226
  await asyncio.sleep(60) # Check every minute
 
227
 
228
 
229
  async def update_feed_content(db_session):
230
  try:
231
  articles = await db_session.query(Article).order_by(
232
  Article.timestamp.desc()).limit(20).all()
233
- feed {
234
  'title': 'Website Changes Feed',
235
  'link': 'http://yourwebsite.com/feed',
236
  'description': 'Feed of changes detected on monitored websites.',
@@ -305,20 +305,34 @@ async def main():
305
  placeholder="Type your message here...")
306
  send_button = gr.Button("Send")
307
 
308
- start_button.click(
309
- start_monitoring,
310
- inputs=[target_urls, storage_location, feed_rss_checkbox],
311
- outputs=status_text)
 
 
 
312
 
313
- stop_button.click(lambda url: stop_monitoring(url),
314
- inputs=target_urls,
315
- outputs=status_text)
 
316
 
 
 
 
 
 
317
  send_button.click(
318
  chatbot_response,
319
  inputs=[message_input, chatbot_interface],
320
  outputs=[chatbot_interface, message_input])
321
 
 
 
 
 
 
322
  asyncio.create_task(periodic_update_with_error_handling(db_session))
323
  asyncio.create_task(update_db_status(db_status))
324
 
@@ -331,4 +345,4 @@ async def main():
331
 
332
 
333
  if __name__ == "__main__":
334
- asyncio.run(main())
 
7
  import aiohttp
8
  import datetime
9
  import hashlib
 
10
 
11
  import feedparser
12
  import gradio as gr
 
18
  from bs4 import BeautifulSoup
19
 
20
  # Configure logging
21
+ logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
 
22
  logger = logging.getLogger(__name__)
23
 
24
  # Configuration
 
85
  content: str, timestamp: datetime.datetime):
86
  try:
87
  os.makedirs(os.path.dirname(storage_location), exist_ok=True)
88
+ with open(storage_location, "a", newline='',
89
+ encoding="utf-8") as csvfile:
90
  csv_writer = csv.writer(csvfile)
91
  csv_writer.writerow([
92
  timestamp.strftime("%Y-%m-%d %H:%M:%S"), url, title, content
 
219
  while True:
220
  try:
221
  await db_session.execute("SELECT 1")
222
+ db_status = "Connected"
223
  except SQLAlchemyError:
224
+ db_status = "Disconnected"
225
  await asyncio.sleep(60) # Check every minute
226
+ return db_status
227
 
228
 
229
  async def update_feed_content(db_session):
230
  try:
231
  articles = await db_session.query(Article).order_by(
232
  Article.timestamp.desc()).limit(20).all()
233
+ feed = {
234
  'title': 'Website Changes Feed',
235
  'link': 'http://yourwebsite.com/feed',
236
  'description': 'Feed of changes detected on monitored websites.',
 
305
  placeholder="Type your message here...")
306
  send_button = gr.Button("Send")
307
 
308
+ async def on_start_click(target_urls_str: str, storage_loc: str,
309
+ feed_enabled: bool):
310
+ urls = [url.strip() for url in target_urls_str.split(",")]
311
+ await start_monitoring(urls,
312
+ storage_loc if storage_loc else None,
313
+ feed_enabled)
314
+ return "Monitoring started for valid URLs."
315
 
316
+ async def on_stop_click():
317
+ for url in list(monitoring_tasks.keys()):
318
+ stop_monitoring(url)
319
+ return "Monitoring stopped for all URLs."
320
 
321
+ start_button.click(
322
+ on_start_click,
323
+ inputs=[target_urls, storage_location, feed_rss_checkbox],
324
+ outputs=[status_text])
325
+ stop_button.click(on_stop_click, outputs=[status_text])
326
  send_button.click(
327
  chatbot_response,
328
  inputs=[message_input, chatbot_interface],
329
  outputs=[chatbot_interface, message_input])
330
 
331
+ feed_updater = gr.Timer(every=300)
332
+ feed_updater.tick(fn=update_feed_content,
333
+ inputs=[db_session],
334
+ outputs=feed_content)
335
+
336
  asyncio.create_task(periodic_update_with_error_handling(db_session))
337
  asyncio.create_task(update_db_status(db_status))
338
 
 
345
 
346
 
347
  if __name__ == "__main__":
348
+ asyncio.run(main())