Shreyas094 commited on
Commit
618cc2a
·
verified ·
1 Parent(s): a5d1e67

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +22 -7
app.py CHANGED
@@ -79,12 +79,15 @@ def update_vectors(files, parser):
79
  return f"Vector store updated successfully. Processed {total_chunks} chunks from {len(files)} files using {parser}."
80
 
81
  def generate_chunked_response(prompt, model, max_tokens=1000, num_calls=3, temperature=0.2):
 
82
  client = InferenceClient(model, token=huggingface_token)
83
  full_responses = []
84
  messages = [{"role": "user", "content": prompt}]
85
 
86
- for _ in range(num_calls):
87
- if stop_clicked: # Check if stop was clicked
 
 
88
  break
89
  try:
90
  response = ""
@@ -94,23 +97,25 @@ def generate_chunked_response(prompt, model, max_tokens=1000, num_calls=3, tempe
94
  temperature=temperature,
95
  stream=True,
96
  ):
97
- if stop_clicked: # Check if stop was clicked
 
98
  break
99
  if message.choices and message.choices[0].delta and message.choices[0].delta.content:
100
  chunk = message.choices[0].delta.content
101
  response += chunk
 
102
  full_responses.append(response)
103
  except Exception as e:
104
  print(f"Error in generating response: {str(e)}")
105
 
106
- # Combine all responses into a single string
107
  combined_response = " ".join(full_responses)
 
108
 
109
- # Clean the combined response
110
  clean_response = re.sub(r'<s>\[INST\].*?\[/INST\]\s*', '', combined_response, flags=re.DOTALL)
111
  clean_response = clean_response.replace("Using the following context:", "").strip()
112
  clean_response = clean_response.replace("Using the following context from the PDF documents:", "").strip()
113
 
 
114
  return clean_response
115
 
116
  def duckduckgo_search(query):
@@ -246,29 +251,39 @@ with gr.Blocks() as demo:
246
  clear_btn = gr.Button("Clear")
247
 
248
  def protected_generate_response(message, history, use_web_search, model, temperature, num_calls, is_generating, stop_clicked):
 
249
  if is_generating:
 
250
  return message, history, is_generating, stop_clicked
251
  is_generating = True
252
  stop_clicked = False
253
 
254
  try:
 
255
  if use_web_search:
 
256
  main_content, sources = get_response_with_search(message, model, num_calls=num_calls, temperature=temperature)
257
  formatted_response = f"{main_content}\n\nSources:\n{sources}"
258
  else:
 
259
  response = get_response_from_pdf(message, model, num_calls=num_calls, temperature=temperature)
260
  formatted_response = response
261
 
 
 
262
  if not stop_clicked:
263
- # Only append the final, combined response to the history
264
  history.append((message, formatted_response))
 
 
265
  except Exception as e:
266
  print(f"Error generating response: {str(e)}")
267
  history.append((message, "I'm sorry, but I encountered an error while generating the response. Please try again."))
268
 
269
  is_generating = False
 
270
  return "", history, is_generating, stop_clicked
271
-
272
  submit_btn.click(
273
  protected_generate_response,
274
  inputs=[msg, chatbot, use_web_search, model_dropdown, temperature_slider, num_calls_slider, is_generating, stop_clicked],
 
79
  return f"Vector store updated successfully. Processed {total_chunks} chunks from {len(files)} files using {parser}."
80
 
81
  def generate_chunked_response(prompt, model, max_tokens=1000, num_calls=3, temperature=0.2):
82
+ print(f"Starting generate_chunked_response with {num_calls} calls") # Debug log
83
  client = InferenceClient(model, token=huggingface_token)
84
  full_responses = []
85
  messages = [{"role": "user", "content": prompt}]
86
 
87
+ for i in range(num_calls):
88
+ print(f"Starting API call {i+1}") # Debug log
89
+ if stop_clicked:
90
+ print("Stop clicked, breaking loop") # Debug log
91
  break
92
  try:
93
  response = ""
 
97
  temperature=temperature,
98
  stream=True,
99
  ):
100
+ if stop_clicked:
101
+ print("Stop clicked during streaming, breaking") # Debug log
102
  break
103
  if message.choices and message.choices[0].delta and message.choices[0].delta.content:
104
  chunk = message.choices[0].delta.content
105
  response += chunk
106
+ print(f"API call {i+1} response: {response[:100]}...") # Debug log
107
  full_responses.append(response)
108
  except Exception as e:
109
  print(f"Error in generating response: {str(e)}")
110
 
 
111
  combined_response = " ".join(full_responses)
112
+ print(f"Combined response: {combined_response[:100]}...") # Debug log
113
 
 
114
  clean_response = re.sub(r'<s>\[INST\].*?\[/INST\]\s*', '', combined_response, flags=re.DOTALL)
115
  clean_response = clean_response.replace("Using the following context:", "").strip()
116
  clean_response = clean_response.replace("Using the following context from the PDF documents:", "").strip()
117
 
118
+ print(f"Final clean response: {clean_response[:100]}...") # Debug log
119
  return clean_response
120
 
121
  def duckduckgo_search(query):
 
251
  clear_btn = gr.Button("Clear")
252
 
253
  def protected_generate_response(message, history, use_web_search, model, temperature, num_calls, is_generating, stop_clicked):
254
+ print("Starting protected_generate_response") # Debug log
255
  if is_generating:
256
+ print("Already generating, returning") # Debug log
257
  return message, history, is_generating, stop_clicked
258
  is_generating = True
259
  stop_clicked = False
260
 
261
  try:
262
+ print(f"Generating response for: {message}") # Debug log
263
  if use_web_search:
264
+ print("Using web search") # Debug log
265
  main_content, sources = get_response_with_search(message, model, num_calls=num_calls, temperature=temperature)
266
  formatted_response = f"{main_content}\n\nSources:\n{sources}"
267
  else:
268
+ print("Using PDF search") # Debug log
269
  response = get_response_from_pdf(message, model, num_calls=num_calls, temperature=temperature)
270
  formatted_response = response
271
 
272
+ print(f"Generated response: {formatted_response[:100]}...") # Debug log
273
+
274
  if not stop_clicked:
275
+ print("Appending to history") # Debug log
276
  history.append((message, formatted_response))
277
+ else:
278
+ print("Stop clicked, not appending to history") # Debug log
279
  except Exception as e:
280
  print(f"Error generating response: {str(e)}")
281
  history.append((message, "I'm sorry, but I encountered an error while generating the response. Please try again."))
282
 
283
  is_generating = False
284
+ print(f"Returning history with {len(history)} items") # Debug log
285
  return "", history, is_generating, stop_clicked
286
+
287
  submit_btn.click(
288
  protected_generate_response,
289
  inputs=[msg, chatbot, use_web_search, model_dropdown, temperature_slider, num_calls_slider, is_generating, stop_clicked],