Shreyas094 commited on
Commit
8df21a0
·
verified ·
1 Parent(s): 5b2111b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +33 -26
app.py CHANGED
@@ -404,6 +404,8 @@ def respond(message, history, model, temperature, num_calls, use_web_search, sel
404
  logging.info(f"Selected Documents: {selected_docs}")
405
  logging.info(f"Use Web Search: {use_web_search}")
406
 
 
 
407
  if use_web_search:
408
  original_query = message
409
  rephrased_query = rephrase_query(message, conversation_manager)
@@ -423,9 +425,10 @@ def respond(message, history, model, temperature, num_calls, use_web_search, sel
423
 
424
  if final_summary:
425
  conversation_manager.add_interaction(original_query, final_summary)
426
- yield final_summary
427
  else:
428
- yield "Unable to generate a response. Please try a different query."
 
429
  else:
430
  # Existing PDF search logic
431
  try:
@@ -438,37 +441,41 @@ def respond(message, history, model, temperature, num_calls, use_web_search, sel
438
  relevant_docs = [doc for doc in all_relevant_docs if doc.metadata["source"] in selected_docs]
439
 
440
  if not relevant_docs:
441
- yield "No relevant information found in the selected documents. Please try selecting different documents or rephrasing your query."
442
- return
443
-
444
- context_str = "\n".join([doc.page_content for doc in relevant_docs])
445
- logging.info(f"Context length: {len(context_str)}")
446
- else:
447
- context_str = "No documents available."
448
- yield "No documents available. Please upload PDF documents to answer questions."
449
- return
450
-
451
- if model.startswith("duckduckgo/"):
452
- # Use DuckDuckGo chat with context
453
- for partial_response in get_response_from_duckduckgo(message, model, context_str, num_calls, temperature):
454
- yield partial_response
455
- elif model == "@cf/meta/llama-3.1-8b-instruct":
456
- # Use Cloudflare API
457
- for partial_response in get_response_from_cloudflare(prompt="", context=context_str, query=message, num_calls=num_calls, temperature=temperature, search_type="pdf"):
458
- yield partial_response
459
  else:
460
- # Use Hugging Face API
461
- for partial_response in get_response_from_pdf(message, model, selected_docs, num_calls=num_calls, temperature=temperature):
462
- yield partial_response
463
  except Exception as e:
464
  logging.error(f"Error with {model}: {str(e)}")
465
  if "microsoft/Phi-3-mini-4k-instruct" in model:
466
  logging.info("Falling back to Mistral model due to Phi-3 error")
467
  fallback_model = "mistralai/Mistral-7B-Instruct-v0.3"
468
- yield from respond(message, history, fallback_model, temperature, num_calls, selected_docs)
469
  else:
470
- yield f"An error occurred with the {model} model: {str(e)}. Please try again or select a different model."
471
-
 
 
 
 
 
 
472
  logging.basicConfig(level=logging.DEBUG)
473
 
474
  def get_response_from_cloudflare(prompt, context, query, num_calls=3, temperature=0.2, search_type="pdf"):
 
404
  logging.info(f"Selected Documents: {selected_docs}")
405
  logging.info(f"Use Web Search: {use_web_search}")
406
 
407
+ response = ""
408
+
409
  if use_web_search:
410
  original_query = message
411
  rephrased_query = rephrase_query(message, conversation_manager)
 
425
 
426
  if final_summary:
427
  conversation_manager.add_interaction(original_query, final_summary)
428
+ response = final_summary
429
  else:
430
+ response = "Unable to generate a response. Please try a different query."
431
+
432
  else:
433
  # Existing PDF search logic
434
  try:
 
441
  relevant_docs = [doc for doc in all_relevant_docs if doc.metadata["source"] in selected_docs]
442
 
443
  if not relevant_docs:
444
+ response = "No relevant information found in the selected documents. Please try selecting different documents or rephrasing your query."
445
+ else:
446
+ context_str = "\n".join([doc.page_content for doc in relevant_docs])
447
+ logging.info(f"Context length: {len(context_str)}")
448
+
449
+ if model.startswith("duckduckgo/"):
450
+ # Use DuckDuckGo chat with context
451
+ for partial_response in get_response_from_duckduckgo(message, model, context_str, num_calls, temperature):
452
+ response += partial_response
453
+ elif model == "@cf/meta/llama-3.1-8b-instruct":
454
+ # Use Cloudflare API
455
+ for partial_response in get_response_from_cloudflare(prompt="", context=context_str, query=message, num_calls=num_calls, temperature=temperature, search_type="pdf"):
456
+ response += partial_response
457
+ else:
458
+ # Use Hugging Face API
459
+ for partial_response in get_response_from_pdf(message, model, selected_docs, num_calls=num_calls, temperature=temperature):
460
+ response += partial_response
 
461
  else:
462
+ response = "No documents available. Please upload PDF documents to answer questions."
463
+
 
464
  except Exception as e:
465
  logging.error(f"Error with {model}: {str(e)}")
466
  if "microsoft/Phi-3-mini-4k-instruct" in model:
467
  logging.info("Falling back to Mistral model due to Phi-3 error")
468
  fallback_model = "mistralai/Mistral-7B-Instruct-v0.3"
469
+ return respond(message, history, fallback_model, temperature, num_calls, use_web_search, selected_docs)
470
  else:
471
+ response = f"An error occurred with the {model} model: {str(e)}. Please try again or select a different model."
472
+
473
+ # Update the conversation history
474
+ history.append((message, response))
475
+
476
+ # Yield the updated history
477
+ yield history
478
+
479
  logging.basicConfig(level=logging.DEBUG)
480
 
481
  def get_response_from_cloudflare(prompt, context, query, num_calls=3, temperature=0.2, search_type="pdf"):