Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -435,13 +435,12 @@ def respond(message, history, model, temperature, num_calls, use_web_search, sel
|
|
435 |
logging.info(f"Model Used: {model}")
|
436 |
logging.info(f"Selected Documents: {selected_docs}")
|
437 |
logging.info(f"Use Web Search: {use_web_search}")
|
438 |
-
|
439 |
if use_web_search:
|
440 |
original_query = message
|
441 |
rephrased_query = rephrase_query(message, conversation_manager)
|
442 |
logging.info(f"Original query: {original_query}")
|
443 |
logging.info(f"Rephrased query: {rephrased_query}")
|
444 |
-
|
445 |
final_summary = ""
|
446 |
for _ in range(num_calls):
|
447 |
search_results = get_web_search_results(rephrased_query)
|
@@ -451,8 +450,7 @@ def respond(message, history, model, temperature, num_calls, use_web_search, sel
|
|
451 |
final_summary += search_results[0]["error"] + "\n\n"
|
452 |
else:
|
453 |
summary = summarize_web_results(rephrased_query, search_results, conversation_manager)
|
454 |
-
final_summary += summary + "\n\n"
|
455 |
-
|
456 |
if final_summary:
|
457 |
conversation_manager.add_interaction(original_query, final_summary)
|
458 |
yield final_summary
|
@@ -483,21 +481,21 @@ def respond(message, history, model, temperature, num_calls, use_web_search, sel
|
|
483 |
if model.startswith("duckduckgo/"):
|
484 |
# Use DuckDuckGo chat with context
|
485 |
for partial_response in get_response_from_duckduckgo(message, model, context_str, num_calls, temperature):
|
486 |
-
yield partial_response
|
487 |
elif model == "@cf/meta/llama-3.1-8b-instruct":
|
488 |
# Use Cloudflare API
|
489 |
for partial_response in get_response_from_cloudflare(prompt="", context=context_str, query=message, num_calls=num_calls, temperature=temperature, search_type="pdf"):
|
490 |
-
yield partial_response
|
491 |
else:
|
492 |
# Use Hugging Face API
|
493 |
for partial_response in get_response_from_pdf(message, model, selected_docs, num_calls=num_calls, temperature=temperature):
|
494 |
-
yield partial_response
|
495 |
except Exception as e:
|
496 |
logging.error(f"Error with {model}: {str(e)}")
|
497 |
if "microsoft/Phi-3-mini-4k-instruct" in model:
|
498 |
logging.info("Falling back to Mistral model due to Phi-3 error")
|
499 |
fallback_model = "mistralai/Mistral-7B-Instruct-v0.3"
|
500 |
-
yield from respond(message, history, fallback_model, temperature, num_calls, selected_docs)
|
501 |
else:
|
502 |
yield f"An error occurred with the {model} model: {str(e)}. Please try again or select a different model."
|
503 |
|
|
|
435 |
logging.info(f"Model Used: {model}")
|
436 |
logging.info(f"Selected Documents: {selected_docs}")
|
437 |
logging.info(f"Use Web Search: {use_web_search}")
|
438 |
+
|
439 |
if use_web_search:
|
440 |
original_query = message
|
441 |
rephrased_query = rephrase_query(message, conversation_manager)
|
442 |
logging.info(f"Original query: {original_query}")
|
443 |
logging.info(f"Rephrased query: {rephrased_query}")
|
|
|
444 |
final_summary = ""
|
445 |
for _ in range(num_calls):
|
446 |
search_results = get_web_search_results(rephrased_query)
|
|
|
450 |
final_summary += search_results[0]["error"] + "\n\n"
|
451 |
else:
|
452 |
summary = summarize_web_results(rephrased_query, search_results, conversation_manager)
|
453 |
+
final_summary += str(summary) + "\n\n"
|
|
|
454 |
if final_summary:
|
455 |
conversation_manager.add_interaction(original_query, final_summary)
|
456 |
yield final_summary
|
|
|
481 |
if model.startswith("duckduckgo/"):
|
482 |
# Use DuckDuckGo chat with context
|
483 |
for partial_response in get_response_from_duckduckgo(message, model, context_str, num_calls, temperature):
|
484 |
+
yield str(partial_response)
|
485 |
elif model == "@cf/meta/llama-3.1-8b-instruct":
|
486 |
# Use Cloudflare API
|
487 |
for partial_response in get_response_from_cloudflare(prompt="", context=context_str, query=message, num_calls=num_calls, temperature=temperature, search_type="pdf"):
|
488 |
+
yield str(partial_response)
|
489 |
else:
|
490 |
# Use Hugging Face API
|
491 |
for partial_response in get_response_from_pdf(message, model, selected_docs, num_calls=num_calls, temperature=temperature):
|
492 |
+
yield str(partial_response)
|
493 |
except Exception as e:
|
494 |
logging.error(f"Error with {model}: {str(e)}")
|
495 |
if "microsoft/Phi-3-mini-4k-instruct" in model:
|
496 |
logging.info("Falling back to Mistral model due to Phi-3 error")
|
497 |
fallback_model = "mistralai/Mistral-7B-Instruct-v0.3"
|
498 |
+
yield from (str(response) for response in respond(message, history, fallback_model, temperature, num_calls, selected_docs))
|
499 |
else:
|
500 |
yield f"An error occurred with the {model} model: {str(e)}. Please try again or select a different model."
|
501 |
|