acecalisto3 commited on
Commit
82cd09a
·
verified ·
1 Parent(s): 380086b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -1
app.py CHANGED
@@ -85,10 +85,17 @@ client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
85
  def respond(message, history, system_message, max_tokens, temperature, top_p):
86
  return generate(message, history, system_message, max_tokens, temperature, top_p)
87
 
88
- # Function to start scraping
89
  def start_scraping(storage_location, url1, url2, url3, url4, url5, url6, url7, url8, url9, url10, scrape_interval, content_type):
90
  urls = [url for url in [url1, url2, url3, url4, url5, url6, url7, url8, url9, url10] if url]
91
  handle_input(storage_location, urls, scrape_interval, content_type)
 
 
 
 
 
 
 
 
92
  return f"Started scraping {', '.join(urls)} every {scrape_interval} minutes."
93
 
94
  # Function to display CSV content
 
85
  def respond(message, history, system_message, max_tokens, temperature, top_p):
86
  return generate(message, history, system_message, max_tokens, temperature, top_p)
87
 
 
88
  def start_scraping(storage_location, url1, url2, url3, url4, url5, url6, url7, url8, url9, url10, scrape_interval, content_type):
89
  urls = [url for url in [url1, url2, url3, url4, url5, url6, url7, url8, url9, url10] if url]
90
  handle_input(storage_location, urls, scrape_interval, content_type)
91
+ # Start transaction
92
+ inspector.start_transaction('start_scraping')
93
+ # Scrape data
94
+ while True:
95
+ # Check for scrape_interval
96
+ time.sleep(scrape_interval * 60) # Check every scrape_interval minutes
97
+ # End transaction
98
+ inspector.end_transaction()
99
  return f"Started scraping {', '.join(urls)} every {scrape_interval} minutes."
100
 
101
  # Function to display CSV content