acecalisto3 commited on
Commit
26576f8
1 Parent(s): ec81efc

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +44 -53
app.py CHANGED
@@ -18,24 +18,6 @@ from sqlalchemy.exc import SQLAlchemyError
18
  import validators
19
  import asyncio
20
 
21
- # Periodic update function
22
- async def periodic_update():
23
- while True:
24
- await asyncio.sleep(300) # Wait for 5 minutes
25
- await update_feed_content()
26
-
27
- async def update_feed_content():
28
- return generate_rss_feed()
29
-
30
- def start_periodic_task():
31
- loop = asyncio.get_event_loop()
32
- if loop.is_running():
33
- asyncio.create_task(periodic_update())
34
- else:
35
- loop.run_until_complete(periodic_update())
36
-
37
- # Start the periodic update task
38
- start_periodic_task()
39
  # Configure logging
40
  logging.basicConfig(level=logging.INFO,
41
  format='%(asctime)s - %(levelname)s - %(message)s')
@@ -57,6 +39,50 @@ engine = None # Initialize the database engine globally
57
  # Database setup
58
  Base = declarative_base()
59
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
60
 
61
  class Article(Base):
62
  __tablename__ = 'articles'
@@ -151,7 +177,6 @@ async def monitor_url(url: str, interval: int, storage_location: str,
151
 
152
  await asyncio.sleep(interval)
153
 
154
-
155
  async def start_monitoring(urls: List[str], storage_location: str,
156
  feed_rss: bool):
157
  for url in urls:
@@ -172,31 +197,6 @@ def stop_monitoring(url: str):
172
  monitoring_tasks[url].cancel()
173
  del monitoring_tasks[url]
174
 
175
-
176
- def generate_rss_feed():
177
- session = Session()
178
- try:
179
- articles = session.query(Article).order_by(
180
- Article.timestamp.desc()).limit(20).all()
181
- feed = feedparser.FeedParserDict()
182
- feed['title'] = 'Website Changes Feed'
183
- feed['link'] = 'http://yourwebsite.com/feed'
184
- feed['description'] = 'Feed of changes detected on monitored websites.'
185
- feed['entries'] = [{
186
- 'title': article.title,
187
- 'link': article.url,
188
- 'description': article.content,
189
- 'published': article.timestamp
190
- } for article in articles]
191
- return feedparser.FeedGenerator().feed_from_dictionary(
192
- feed).writeString('utf-8')
193
- except SQLAlchemyError as e:
194
- logger.error(f"Database error: {e}")
195
- return None
196
- finally:
197
- session.close()
198
-
199
-
200
  async def chatbot_response(message: str, history: List[Tuple[str, str]]):
201
  try:
202
  client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1",
@@ -297,15 +297,6 @@ with gr.Blocks() as demo:
297
  inputs=[message_input, chatbot_interface],
298
  outputs=[chatbot_interface, chatbot_interface])
299
 
300
- async def update_feed_content():
301
- return generate_rss_feed()
302
-
303
- # Periodic update loop
304
- async def periodic_update():
305
- while True:
306
- await asyncio.sleep(300) # Wait for 5 minutes
307
- await update_feed_content()
308
-
309
  # Start the periodic update task
310
  asyncio.create_task(periodic_update())
311
 
 
18
  import validators
19
  import asyncio
20
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
21
  # Configure logging
22
  logging.basicConfig(level=logging.INFO,
23
  format='%(asctime)s - %(levelname)s - %(message)s')
 
39
  # Database setup
40
  Base = declarative_base()
41
 
42
+ def generate_rss_feed():
43
+ session = Session()
44
+ try:
45
+ articles = session.query(Article).order_by(
46
+ Article.timestamp.desc()).limit(20).all()
47
+ feed = feedparser.FeedParserDict()
48
+ feed['title'] = 'Website Changes Feed'
49
+ feed['link'] = 'http://yourwebsite.com/feed' # Replace if needed
50
+ feed['description'] = 'Feed of changes detected on monitored websites.'
51
+ feed['entries'] = [{
52
+ 'title': article.title,
53
+ 'link': article.url,
54
+ 'description': article.content,
55
+ 'published': article.timestamp
56
+ } for article in articles]
57
+ return feedparser.FeedGenerator().feed_from_dictionary(
58
+ feed).writeString('utf-8')
59
+ except SQLAlchemyError as e:
60
+ logger.error(f"Database error: {e}")
61
+ return None
62
+ finally:
63
+ session.close()
64
+
65
+ async def update_feed_content():
66
+ return generate_rss_feed()
67
+
68
+ # Periodic update function
69
+ async def periodic_update():
70
+ while True:
71
+ await asyncio.sleep(300) # Wait for 5 minutes
72
+ await update_feed_content()
73
+
74
+ async def update_feed_content():
75
+ return generate_rss_feed()
76
+
77
+ def start_periodic_task():
78
+ loop = asyncio.get_event_loop()
79
+ if loop.is_running():
80
+ asyncio.create_task(periodic_update())
81
+ else:
82
+ loop.run_until_complete(periodic_update())
83
+
84
+ # Start the periodic update task
85
+ start_periodic_task()
86
 
87
  class Article(Base):
88
  __tablename__ = 'articles'
 
177
 
178
  await asyncio.sleep(interval)
179
 
 
180
  async def start_monitoring(urls: List[str], storage_location: str,
181
  feed_rss: bool):
182
  for url in urls:
 
197
  monitoring_tasks[url].cancel()
198
  del monitoring_tasks[url]
199
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
200
  async def chatbot_response(message: str, history: List[Tuple[str, str]]):
201
  try:
202
  client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1",
 
297
  inputs=[message_input, chatbot_interface],
298
  outputs=[chatbot_interface, chatbot_interface])
299
 
 
 
 
 
 
 
 
 
 
300
  # Start the periodic update task
301
  asyncio.create_task(periodic_update())
302