Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -2,33 +2,17 @@ import os
|
|
2 |
import gradio as gr
|
3 |
from rag import generate_answer
|
4 |
|
5 |
-
def chat_with_bot(query: str) ->
|
6 |
"""
|
7 |
-
Chat with the bot using the provided query
|
8 |
"""
|
9 |
try:
|
10 |
response = generate_answer(query) # Ensure this function is working correctly
|
11 |
except Exception as e:
|
12 |
print(f"Error in generate_answer: {str(e)}") # More specific error handling
|
13 |
-
return "Error generating answer" # Return a default response on error
|
14 |
|
15 |
-
|
16 |
-
formatted_response = f"""
|
17 |
-
**Summary:**
|
18 |
-
{response.get("summary", "No summary available.")}
|
19 |
-
|
20 |
-
**Details:**
|
21 |
-
{response.get("details", "No details available.")}
|
22 |
-
|
23 |
-
**Cautions:**
|
24 |
-
{response.get("cautions", "No cautions available.")}
|
25 |
-
|
26 |
-
**Disclaimer:**
|
27 |
-
{response.get("disclaimer", "No disclaimer available.")}
|
28 |
-
|
29 |
-
If you have any further questions or need more details, feel free to ask!
|
30 |
-
"""
|
31 |
-
return formatted_response
|
32 |
|
33 |
def main():
|
34 |
"""
|
@@ -38,7 +22,7 @@ def main():
|
|
38 |
interface = gr.Interface(
|
39 |
fn=chat_with_bot,
|
40 |
inputs="text",
|
41 |
-
outputs="
|
42 |
title="RAG-LLM based Medical Chatbot",
|
43 |
description="Ask your medical questions and get answers from the chatbot."
|
44 |
)
|
@@ -48,4 +32,4 @@ def main():
|
|
48 |
print(f"An error occurred while launching the interface: {str(e)}") # More context on errors
|
49 |
|
50 |
if __name__ == "__main__":
|
51 |
-
main()
|
|
|
2 |
import gradio as gr
|
3 |
from rag import generate_answer
|
4 |
|
5 |
+
def chat_with_bot(query: str) -> tuple:
|
6 |
"""
|
7 |
+
Chat with the bot using the provided query.
|
8 |
"""
|
9 |
try:
|
10 |
response = generate_answer(query) # Ensure this function is working correctly
|
11 |
except Exception as e:
|
12 |
print(f"Error in generate_answer: {str(e)}") # More specific error handling
|
13 |
+
return "Error generating answer", None # Return a default response on error
|
14 |
|
15 |
+
return response
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
16 |
|
17 |
def main():
|
18 |
"""
|
|
|
22 |
interface = gr.Interface(
|
23 |
fn=chat_with_bot,
|
24 |
inputs="text",
|
25 |
+
outputs="text",
|
26 |
title="RAG-LLM based Medical Chatbot",
|
27 |
description="Ask your medical questions and get answers from the chatbot."
|
28 |
)
|
|
|
32 |
print(f"An error occurred while launching the interface: {str(e)}") # More context on errors
|
33 |
|
34 |
if __name__ == "__main__":
|
35 |
+
main()
|