Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -26,11 +26,13 @@ def askme(symptoms, question):
|
|
26 |
prompt = tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
|
27 |
inputs = tokenizer(prompt, return_tensors="pt").to(device) # Ensure inputs are on CUDA device
|
28 |
outputs = model.generate(**inputs, max_new_tokens=200, use_cache=True)
|
29 |
-
response_text = tokenizer.batch_decode(outputs, skip_special_tokens=True)[0].strip()
|
30 |
-
# Remove system messages and content
|
31 |
-
|
32 |
-
|
33 |
-
|
|
|
|
|
34 |
|
35 |
# Example usage
|
36 |
symptoms = '''\
|
@@ -87,8 +89,8 @@ css = """
|
|
87 |
}
|
88 |
"""
|
89 |
|
90 |
-
welcome_message = """# AI Medical Chatbot
|
91 |
-
Ask any medical question and get answers from our AI Medical Chatbot
|
92 |
Developed by Ruslan Magana. Visit [https://ruslanmv.com/](https://ruslanmv.com/) for more information."""
|
93 |
|
94 |
|
@@ -106,9 +108,6 @@ with gr.Blocks(css=css) as interface:
|
|
106 |
|
107 |
interface.launch()
|
108 |
|
109 |
-
|
110 |
-
|
111 |
-
|
112 |
'''
|
113 |
iface = gr.Interface(
|
114 |
fn=askme,
|
|
|
26 |
prompt = tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
|
27 |
inputs = tokenizer(prompt, return_tensors="pt").to(device) # Ensure inputs are on CUDA device
|
28 |
outputs = model.generate(**inputs, max_new_tokens=200, use_cache=True)
|
29 |
+
response_text = tokenizer.batch_decode(outputs, skip_special_tokens=True)[0].strip()
|
30 |
+
# Remove system messages and content
|
31 |
+
# Extract and return the generated text, removing the prompt
|
32 |
+
# Extract only the assistant's response
|
33 |
+
answer = response_text.split('<|im_start|>assistant')[-1].strip()
|
34 |
+
#answer =response_text.split("assistant")[1].strip().split("user")[0].strip()
|
35 |
+
return answer
|
36 |
|
37 |
# Example usage
|
38 |
symptoms = '''\
|
|
|
89 |
}
|
90 |
"""
|
91 |
|
92 |
+
welcome_message = """# AI Medical Llama 3 Chatbot
|
93 |
+
Ask any medical question giving first your symptoms and get answers from our AI Medical Llama3 Chatbot
|
94 |
Developed by Ruslan Magana. Visit [https://ruslanmv.com/](https://ruslanmv.com/) for more information."""
|
95 |
|
96 |
|
|
|
108 |
|
109 |
interface.launch()
|
110 |
|
|
|
|
|
|
|
111 |
'''
|
112 |
iface = gr.Interface(
|
113 |
fn=askme,
|