acecalisto3 commited on
Commit
827e96b
·
verified ·
1 Parent(s): 063e8eb

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +49 -70
app.py CHANGED
@@ -1,74 +1,53 @@
1
- import gradio as gr
2
  from agent import start_scraping, display_csv, generate_rss_feed, chat_interface
 
3
 
4
- def validate_input(values):
5
- """Validate user input"""
6
- if not values["storage_location"]:
7
- raise ValueError("Storage location cannot be empty")
8
- if not values["urls"]:
9
- raise ValueError("URLs cannot be empty")
10
- return values
11
-
12
- def create_interface():
13
- """Create Gradio interface"""
14
- with gr.Blocks() as demo:
15
- with gr.Row():
16
- with gr.Column():
17
- message = gr.Textbox(label="Message")
18
- system_message = gr.Textbox(value="You are a helpful assistant.", label="System message")
19
- max_tokens = gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens")
20
- temperature = gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature")
21
- top_p = gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)")
22
- storage_location = gr.Textbox(value="scraped_data", label="Storage Location")
23
- urls = gr.Textbox(label="URLs (comma separated)")
24
- scrape_interval = gr.Slider(minimum=1, maximum=60, value=5, step=1, label="Scrape Interval (minutes)")
25
- content_type = gr.Radio(choices=["text", "media", "both"], value="text", label="Content Type")
26
- selector = gr.Textbox(label="CSS Selector (for media, optional)")
27
- start_button = gr.Button("Start Scraping")
28
- csv_output = gr.Textbox(label="CSV Output", interactive=False)
29
- status_output = gr.Textbox(label="Status", interactive=False)
30
-
31
- with gr.Column():
32
- chat_history = gr.Chatbot(label="Chat History")
33
- response_box = gr.Textbox(label="Response")
34
-
35
- def start_scraping_callback(values):
36
- """Start scraping callback"""
37
- try:
38
- validated_values = validate_input(values)
39
- status_output.value = start_scraping(validated_values["storage_location"], validated_values["urls"].split(","), validated_values["scrape_interval"], validated_values["content_type"], validated_values["selector"])
40
- csv_output.value = display_csv(validated_values["storage_location"], validated_values["urls"].split(",")[0]) # Display CSV for the first URL
41
- except Exception as e:
42
- print(f"Error: {str(e)}")
43
-
44
- start_button.click(start_scraping_callback, inputs=[storage_location, urls, scrape_interval, content_type, selector], outputs=[csv_output, status_output])
45
-
46
- def chat_interface_callback(values):
47
- """Chat interface callback"""
48
- try:
49
- chat_interface(values["message"], chat_history, system_message, max_tokens, temperature, top_p, storage_location, urls.split(","), scrape_interval, content_type, selector)
50
- except Exception as e:
51
- print(f"Error: {str(e)}")
52
-
53
- message.submit(chat_interface_callback, inputs=[message, chat_history, system_message, max_tokens, temperature, top_p, storage_location, urls, scrape_interval, content_type, selector], outputs=[chat_history, response_box])
54
-
55
- # Add a button to display the RSS feed for a selected URL
56
- with gr.Row():
57
- selected_url = gr.Textbox(label="Select URL for RSS Feed")
58
- rss_button = gr.Button("Generate RSS Feed")
59
- rss_output = gr.Textbox(label="RSS Feed Output", interactive=False)
60
-
61
- def generate_rss_feed_callback(values):
62
- """Generate RSS feed callback"""
63
- try:
64
- rss_output.value = generate_rss_feed(storage_location, values["selected_url"])
65
- except Exception as e:
66
- print(f"Error: {str(e)}")
67
-
68
- rss_button.click(generate_rss_feed_callback, inputs=[selected_url], outputs=rss_output)
69
-
70
- return demo
71
 
72
  if __name__ == "__main__":
73
- demo = create_interface()
74
- demo.launch()
 
1
+ import os
2
  from agent import start_scraping, display_csv, generate_rss_feed, chat_interface
3
+ import gradio as gr
4
 
5
+ def chatbot_interface(message, history, system_message, max_tokens, temperature, top_p, storage_location, urls, scrape_interval, content_type, selector):
6
+ history, response = chat_interface(message, history, system_message, max_tokens, temperature, top_p, storage_location, urls, scrape_interval, content_type, selector)
7
+ return history, response
8
+
9
+ def generate_rss(storage_location, url):
10
+ feed_entries = generate_rss_feed(storage_location, url)
11
+ return feedparser.FeedParserDict(feed_entries)
12
+
13
+ def main():
14
+ storage_location = "scraped_data"
15
+ urls = ["https://www.culvers.com/"]
16
+ scrape_interval = 5
17
+ content_type = "text"
18
+ selector = ""
19
+
20
+ chatbot_input = gr.inputs.Textbox(lines=5, label="Chatbot Input")
21
+ history_output = gr.outputs.Textbox(label="History")
22
+ response_output = gr.outputs.Textbox(label="Response")
23
+
24
+ csv_input = gr.inputs.Textbox(lines=1, label="CSV Input URL")
25
+ csv_output = gr.outputs.Textbox(label="CSV Output")
26
+
27
+ rss_input = gr.inputs.Textbox(lines=1, label="RSS Input URL")
28
+ rss_output = gr.outputs.HTML(label="RSS Output")
29
+
30
+ chatbot_interface = gr.Interface(
31
+ chatbot_interface,
32
+ [chatbot_input, history_output, system_message, max_tokens, temperature, top_p, storage_location, urls, scrape_interval, content_type, selector],
33
+ [history_output, response_output]
34
+ )
35
+
36
+ csv_interface = gr.Interface(
37
+ lambda url: display_csv(storage_location, url),
38
+ [csv_input],
39
+ [csv_output]
40
+ )
41
+
42
+ rss_interface = gr.Interface(
43
+ generate_rss,
44
+ [rss_input, storage_location],
45
+ [rss_output]
46
+ )
47
+
48
+ chatbot_interface.launch()
49
+ csv_interface.launch()
50
+ rss_interface.launch()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
51
 
52
  if __name__ == "__main__":
53
+ main()