acecalisto3 commited on
Commit
8a00fb3
·
verified ·
1 Parent(s): 7fae359

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +84 -7
app.py CHANGED
@@ -1,4 +1,3 @@
1
-
2
  import asyncio
3
  import gradio as gr
4
  from sqlalchemy.exc import SQLAlchemyError
@@ -8,6 +7,8 @@ from sqlalchemy.orm import sessionmaker
8
  import logging
9
  import threading
10
  import time
 
 
11
 
12
  logger = logging.getLogger(__name__)
13
  logging.basicConfig(level=logging.INFO)
@@ -41,25 +42,78 @@ def update_db_status():
41
  return "Connected"
42
  else:
43
  return "Not connected"
44
- except SQLAlchemyError:
 
 
 
 
45
  return "Disconnected"
46
 
47
  # Background task to update status
48
  def background_update(db_status_textbox):
49
  while True:
50
  status = update_db_status()
51
- db_status_textbox.value = status
52
  logger.info(f"Database status updated: {status}")
53
  time.sleep(60)
54
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
55
  # Main application that runs Gradio UI and background tasks
56
  def main():
57
- with gr.Blocks() as demo:
58
- gr.Markdown("# Website Monitor and Chatbot")
59
 
60
  with gr.Row():
61
  with gr.Column():
62
- gr.Markdown("## Database Settings")
63
  db_host = gr.Textbox(label="Database Host", placeholder="localhost", value="localhost")
64
  db_port = gr.Textbox(label="Database Port", placeholder="3306", value="3306")
65
  db_user = gr.Textbox(label="Database User", placeholder="username", value="")
@@ -71,13 +125,36 @@ def main():
71
 
72
  connect_button = gr.Button("Connect to Database")
73
 
 
 
 
 
 
 
74
  # Connect button click event
 
 
 
 
 
 
75
  connect_button.click(
76
- set_db_connection,
77
  inputs=[db_host, db_port, db_user, db_pass, db_name],
78
  outputs=[status_text]
79
  )
80
 
 
 
 
 
 
 
 
 
 
 
 
81
  # Start background task to update status
82
  threading.Thread(target=background_update, args=(db_status_textbox,), daemon=True).start()
83
 
 
 
1
  import asyncio
2
  import gradio as gr
3
  from sqlalchemy.exc import SQLAlchemyError
 
7
  import logging
8
  import threading
9
  import time
10
+ import requests
11
+ from bs4 import BeautifulSoup
12
 
13
  logger = logging.getLogger(__name__)
14
  logging.basicConfig(level=logging.INFO)
 
42
  return "Connected"
43
  else:
44
  return "Not connected"
45
+ except SQLAlchemyError as e:
46
+ logger.error(f"SQLAlchemyError: {e}")
47
+ return "Disconnected"
48
+ except Exception as e:
49
+ logger.error(f"Unexpected error: {e}")
50
  return "Disconnected"
51
 
52
  # Background task to update status
53
  def background_update(db_status_textbox):
54
  while True:
55
  status = update_db_status()
56
+ db_status_textbox.update(value=status)
57
  logger.info(f"Database status updated: {status}")
58
  time.sleep(60)
59
 
60
+ # Function to scrape data from a URL
61
+ def scrape_data(url):
62
+ try:
63
+ response = requests.get(url)
64
+ response.raise_for_status()
65
+ soup = BeautifulSoup(response.text, 'html.parser')
66
+ # Extract data as needed, e.g., headlines, articles, etc.
67
+ data = [element.text for element in soup.find_all('h1')] # Example: extracting all h1 tags
68
+ return data
69
+ except requests.RequestException as e:
70
+ logger.error(f"Failed to scrape data from {url}: {e}")
71
+ return []
72
+
73
+ # Function to store scraped data in the database
74
+ async def store_data(data):
75
+ global db_session
76
+ try:
77
+ async with db_session() as session:
78
+ async with session.begin():
79
+ # Example: Assuming a table 'feeds' with a column 'content'
80
+ for item in data:
81
+ await session.execute(
82
+ "INSERT INTO feeds (content) VALUES (:content)",
83
+ {"content": item}
84
+ )
85
+ await session.commit()
86
+ logger.info("Data stored in the database.")
87
+ except SQLAlchemyError as e:
88
+ logger.error(f"Failed to store data in the database: {e}")
89
+
90
+ # Function to serve data to a target URL
91
+ def serve_data(target_url, data):
92
+ try:
93
+ response = requests.post(target_url, json={"data": data})
94
+ response.raise_for_status()
95
+ logger.info(f"Data served to {target_url} successfully.")
96
+ return "Data served successfully."
97
+ except requests.RequestException as e:
98
+ logger.error(f"Failed to serve data to {target_url}: {e}")
99
+ return f"Failed to serve data: {e}"
100
+
101
+ # Function to monitor and autopost data
102
+ def monitor_and_autopost(scrape_url, target_url):
103
+ while True:
104
+ data = scrape_data(scrape_url)
105
+ asyncio.run(store_data(data))
106
+ serve_data(target_url, data)
107
+ time.sleep(3600) # Run every hour
108
+
109
  # Main application that runs Gradio UI and background tasks
110
  def main():
111
+ with gr.Blocks(css=".gradio-container {background: linear-gradient(135deg, #6a0dad, #ffd700);}") as demo:
112
+ gr.Markdown("# Website Monitor and Chatbot", elem_id="title")
113
 
114
  with gr.Row():
115
  with gr.Column():
116
+ gr.Markdown("## Database Settings", elem_id="subtitle")
117
  db_host = gr.Textbox(label="Database Host", placeholder="localhost", value="localhost")
118
  db_port = gr.Textbox(label="Database Port", placeholder="3306", value="3306")
119
  db_user = gr.Textbox(label="Database User", placeholder="username", value="")
 
125
 
126
  connect_button = gr.Button("Connect to Database")
127
 
128
+ gr.Markdown("## Scraping and Serving Settings", elem_id="subtitle")
129
+ scrape_url = gr.Textbox(label="Scrape URL", placeholder="https://example.com")
130
+ target_url = gr.Textbox(label="Target URL", placeholder="https://target.com/api")
131
+
132
+ start_monitoring_button = gr.Button("Start Monitoring and Autoposting")
133
+
134
  # Connect button click event
135
+ def on_connect_click(host, port, user, password, db_name):
136
+ loop = asyncio.new_event_loop()
137
+ asyncio.set_event_loop(loop)
138
+ result = loop.run_until_complete(set_db_connection(host, port, user, password, db_name))
139
+ return result
140
+
141
  connect_button.click(
142
+ on_connect_click,
143
  inputs=[db_host, db_port, db_user, db_pass, db_name],
144
  outputs=[status_text]
145
  )
146
 
147
+ # Start monitoring and autoposting button click event
148
+ def on_start_monitoring_click(scrape_url, target_url):
149
+ threading.Thread(target=monitor_and_autopost, args=(scrape_url, target_url), daemon=True).start()
150
+ return "Monitoring and autoposting started."
151
+
152
+ start_monitoring_button.click(
153
+ on_start_monitoring_click,
154
+ inputs=[scrape_url, target_url],
155
+ outputs=[]
156
+ )
157
+
158
  # Start background task to update status
159
  threading.Thread(target=background_update, args=(db_status_textbox,), daemon=True).start()
160