Shreyas094 commited on
Commit
352e558
·
verified ·
1 Parent(s): 774efea

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +18 -8
app.py CHANGED
@@ -219,10 +219,10 @@ def respond(message, history, model, temperature, num_calls, use_web_search):
219
  logging.info(f"Model Used: {model}")
220
  logging.info(f"Search Type: {'Web Search' if use_web_search else 'PDF Search'}")
221
 
222
- try:
223
  if use_web_search:
224
  for main_content, sources in get_response_with_search(message, model, num_calls=num_calls, temperature=temperature):
225
- response = f"{main_content}\n\n{sources}"
226
  first_line = response.split('\n')[0] if response else ''
227
  logging.info(f"Generated Response (first line): {first_line}")
228
  yield response
@@ -299,19 +299,29 @@ After writing the document, please provide a list of sources used in your respon
299
  if 'response' in json_response:
300
  chunk = json_response['response']
301
  full_response += chunk
302
- yield full_response
 
 
303
  except (json.JSONDecodeError, IndexError) as e:
304
  logging.error(f"Error parsing streaming response: {str(e)}")
305
  continue
306
  else:
307
  logging.error(f"HTTP Error: {response.status_code}, Response: {response.text}")
308
- yield f"I apologize, but I encountered an HTTP error: {response.status_code}. Please try again later."
309
  except Exception as e:
310
  logging.error(f"Error in generating response from Cloudflare: {str(e)}")
311
- yield f"I apologize, but an error occurred: {str(e)}. Please try again later."
312
 
313
  if not full_response:
314
- yield "I apologize, but I couldn't generate a response at this time. Please try again later."
 
 
 
 
 
 
 
 
315
 
316
  def get_response_with_search(query, model, num_calls=3, temperature=0.2):
317
  search_results = duckduckgo_search(query)
@@ -325,8 +335,8 @@ After writing the document, please provide a list of sources used in your respon
325
 
326
  if model == "@cf/meta/llama-3.1-8b-instruct":
327
  # Use Cloudflare API
328
- for response in get_response_from_cloudflare(prompt="", context=context, query=query, num_calls=num_calls, temperature=temperature, search_type="web"):
329
- yield response, "" # Yield streaming response without sources
330
  else:
331
  # Use Hugging Face API
332
  client = InferenceClient(model, token=huggingface_token)
 
219
  logging.info(f"Model Used: {model}")
220
  logging.info(f"Search Type: {'Web Search' if use_web_search else 'PDF Search'}")
221
 
222
+ try:
223
  if use_web_search:
224
  for main_content, sources in get_response_with_search(message, model, num_calls=num_calls, temperature=temperature):
225
+ response = f"{main_content}\n\n{sources}" if sources else main_content
226
  first_line = response.split('\n')[0] if response else ''
227
  logging.info(f"Generated Response (first line): {first_line}")
228
  yield response
 
299
  if 'response' in json_response:
300
  chunk = json_response['response']
301
  full_response += chunk
302
+ # Attempt to split the response into main content and sources
303
+ main_content, sources = split_content_and_sources(full_response)
304
+ yield main_content, sources
305
  except (json.JSONDecodeError, IndexError) as e:
306
  logging.error(f"Error parsing streaming response: {str(e)}")
307
  continue
308
  else:
309
  logging.error(f"HTTP Error: {response.status_code}, Response: {response.text}")
310
+ yield f"I apologize, but I encountered an HTTP error: {response.status_code}. Please try again later.", ""
311
  except Exception as e:
312
  logging.error(f"Error in generating response from Cloudflare: {str(e)}")
313
+ yield f"I apologize, but an error occurred: {str(e)}. Please try again later.", ""
314
 
315
  if not full_response:
316
+ yield "I apologize, but I couldn't generate a response at this time. Please try again later.", ""
317
+
318
+ def split_content_and_sources(text):
319
+ # Attempt to split the text into main content and sources
320
+ parts = text.split("Sources:", 1)
321
+ if len(parts) > 1:
322
+ return parts[0].strip(), "Sources:" + parts[1]
323
+ else:
324
+ return text, ""
325
 
326
  def get_response_with_search(query, model, num_calls=3, temperature=0.2):
327
  search_results = duckduckgo_search(query)
 
335
 
336
  if model == "@cf/meta/llama-3.1-8b-instruct":
337
  # Use Cloudflare API
338
+ for main_content, sources in get_response_from_cloudflare(prompt="", context=context, query=query, num_calls=num_calls, temperature=temperature, search_type="web"):
339
+ yield main_content, sources
340
  else:
341
  # Use Hugging Face API
342
  client = InferenceClient(model, token=huggingface_token)