mgokg commited on
Commit
0c010c4
·
verified ·
1 Parent(s): 1d0db3a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -5
app.py CHANGED
@@ -13,7 +13,7 @@ import gradio as gr
13
 
14
  # -----------------------------------------------------------------------------
15
  # Default configuration and Prompts
16
- NUM_SEARCH = 8 # Number of links to parse from Google
17
  SEARCH_TIME_LIMIT = 7 # Max seconds to request website sources before skipping to the next URL
18
  TOTAL_TIMEOUT = 25 # Overall timeout for all operations
19
  MAX_CONTENT = 500 # Number of words to add to LLM context for each search result
@@ -90,13 +90,14 @@ def fetch_webpage(url, timeout):
90
 
91
  def parse_google_results(query, num_search=NUM_SEARCH, search_time_limit=SEARCH_TIME_LIMIT):
92
  """Perform a Google search and parse the content of the top results."""
93
- urls = search(query, num_results=num_search)
94
  max_workers = os.cpu_count() or 1 # Fallback to 1 if os.cpu_count() returns None
95
  with ThreadPoolExecutor(max_workers=max_workers) as executor:
96
  future_to_url = {executor.submit(fetch_webpage, url, search_time_limit): url for url in urls}
97
  return {url: page_text for future in as_completed(future_to_url) if (url := future.result()[0]) and (page_text := future.result()[1])}
98
 
99
  def save_markdown(content, file_path):
 
100
  with open(file_path, 'a') as file:
101
  file.write(content)
102
 
@@ -112,7 +113,7 @@ def llm_check_search(query, file_path, msg_history=None, llm_model=LLM_MODEL):
112
  max_tokens=30
113
  ).choices[0].message.content
114
 
115
- # check if the response contains "ns"
116
  cleaned_response = response.lower().strip()
117
  if re.fullmatch(r"\bns\b", cleaned_response):
118
  print("No Google search required.")
@@ -157,7 +158,7 @@ def llm_answer(query, file_path, msg_history=None, search_dic=None, llm_model=LL
157
  save_markdown(chunk_content, file_path)
158
 
159
  print("\n" + "*" * 21 + " LLM END " + "*" * 21 + "\n")
160
- # change the line for the next question
161
  save_markdown("\n\n", file_path)
162
  new_msg_history = new_msg_history + [{"role": "assistant", "content": ''.join(content)}]
163
 
@@ -167,7 +168,7 @@ def main_interface(query, file_path="playground.md"):
167
  """Main function to execute the search, generate response, and save to markdown."""
168
  msg_history = None
169
  save_path = None
170
- # start with an empty file
171
  with open(file_path, 'w') as file:
172
  pass
173
 
 
13
 
14
  # -----------------------------------------------------------------------------
15
  # Default configuration and Prompts
16
+ NUM_SEARCH = 8 # Number of links to parse from Google
17
  SEARCH_TIME_LIMIT = 7 # Max seconds to request website sources before skipping to the next URL
18
  TOTAL_TIMEOUT = 25 # Overall timeout for all operations
19
  MAX_CONTENT = 500 # Number of words to add to LLM context for each search result
 
90
 
91
  def parse_google_results(query, num_search=NUM_SEARCH, search_time_limit=SEARCH_TIME_LIMIT):
92
  """Perform a Google search and parse the content of the top results."""
93
+ urls = list(search(query, num_results=num_search))
94
  max_workers = os.cpu_count() or 1 # Fallback to 1 if os.cpu_count() returns None
95
  with ThreadPoolExecutor(max_workers=max_workers) as executor:
96
  future_to_url = {executor.submit(fetch_webpage, url, search_time_limit): url for url in urls}
97
  return {url: page_text for future in as_completed(future_to_url) if (url := future.result()[0]) and (page_text := future.result()[1])}
98
 
99
  def save_markdown(content, file_path):
100
+ """Save content to a Markdown file."""
101
  with open(file_path, 'a') as file:
102
  file.write(content)
103
 
 
113
  max_tokens=30
114
  ).choices[0].message.content
115
 
116
+ # Check if the response contains "ns"
117
  cleaned_response = response.lower().strip()
118
  if re.fullmatch(r"\bns\b", cleaned_response):
119
  print("No Google search required.")
 
158
  save_markdown(chunk_content, file_path)
159
 
160
  print("\n" + "*" * 21 + " LLM END " + "*" * 21 + "\n")
161
+ # Change the line for the next question
162
  save_markdown("\n\n", file_path)
163
  new_msg_history = new_msg_history + [{"role": "assistant", "content": ''.join(content)}]
164
 
 
168
  """Main function to execute the search, generate response, and save to markdown."""
169
  msg_history = None
170
  save_path = None
171
+ # Start with an empty file
172
  with open(file_path, 'w') as file:
173
  pass
174