Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -1,3 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
# Import necessary libraries
|
2 |
import streamlit as st
|
3 |
from transformers import AutoModelForCausalLM, AutoTokenizer, BitsAndBytesConfig, pipeline
|
@@ -41,9 +48,13 @@ st.sidebar.write(f"You're now chatting with **{selected_model}**")
|
|
41 |
st.sidebar.image("https://www.hmgaihub.com/untitled.png")
|
42 |
st.sidebar.markdown("*Generated content may be inaccurate or false.*")
|
43 |
st.sidebar.markdown("*This is an under development project.*")
|
|
|
|
|
|
|
|
|
44 |
|
45 |
def load_model(selected_model_name):
|
46 |
-
|
47 |
model_name = model_links[selected_model_name]
|
48 |
|
49 |
# Define the BitsAndBytesConfig for quantization
|
@@ -81,13 +92,12 @@ def load_model(selected_model_name):
|
|
81 |
"mistralai/Mistral-7B-Instruct-v0.2", trust_remote_code=True
|
82 |
)
|
83 |
|
84 |
-
|
|
|
85 |
|
86 |
return model, tokenizer
|
87 |
|
88 |
|
89 |
-
|
90 |
-
|
91 |
# Load model and tokenizer
|
92 |
model, tokenizer = load_model(selected_model)
|
93 |
|
@@ -106,11 +116,10 @@ if prompt := st.chat_input("Ask me anything about diabetes"):
|
|
106 |
st.session_state.messages.append({"role": "user", "content": prompt})
|
107 |
|
108 |
instructions = """
|
109 |
-
Act as a highly knowledgeable
|
110 |
-
you will answer only what the need and in professional way. do not add extra unnecessary information. you can however chat with the patient casually
|
111 |
"""
|
112 |
|
113 |
-
full_prompt = f"<s>[INST] {
|
114 |
|
115 |
with st.chat_message("assistant"):
|
116 |
result = pipeline(
|
|
|
1 |
+
"""
|
2 |
+
Diabetes Version
|
3 |
+
@aim: Demo for testing purposes only
|
4 |
+
@inquiries: Dr M As'ad
|
5 |
+
@email: [email protected]
|
6 |
+
"""
|
7 |
+
|
8 |
# Import necessary libraries
|
9 |
import streamlit as st
|
10 |
from transformers import AutoModelForCausalLM, AutoTokenizer, BitsAndBytesConfig, pipeline
|
|
|
48 |
st.sidebar.image("https://www.hmgaihub.com/untitled.png")
|
49 |
st.sidebar.markdown("*Generated content may be inaccurate or false.*")
|
50 |
st.sidebar.markdown("*This is an under development project.*")
|
51 |
+
st.sidebar.markdown("*Not a replacement for medical advice from a doctor.*")
|
52 |
+
|
53 |
+
# Loading message placeholder
|
54 |
+
loading_message = st.empty()
|
55 |
|
56 |
def load_model(selected_model_name):
|
57 |
+
loading_message.info("Loading the model, please wait...")
|
58 |
model_name = model_links[selected_model_name]
|
59 |
|
60 |
# Define the BitsAndBytesConfig for quantization
|
|
|
92 |
"mistralai/Mistral-7B-Instruct-v0.2", trust_remote_code=True
|
93 |
)
|
94 |
|
95 |
+
# Clear the loading message
|
96 |
+
loading_message.success("Model is ready. Now we are ready!")
|
97 |
|
98 |
return model, tokenizer
|
99 |
|
100 |
|
|
|
|
|
101 |
# Load model and tokenizer
|
102 |
model, tokenizer = load_model(selected_model)
|
103 |
|
|
|
116 |
st.session_state.messages.append({"role": "user", "content": prompt})
|
117 |
|
118 |
instructions = """
|
119 |
+
Act as a highly knowledgeable doctor with special interest in diabetes, skilled at explaining complex medical information in a way that is easy to understand for patients without a medical background. Your responses should not only demonstrate empathy and care but also uphold a high standard of medical accuracy and reliability. Respond precisely to what the patient needs in a professional, accurate, and reassuring manner, avoiding any unnecessary information.
|
|
|
120 |
"""
|
121 |
|
122 |
+
full_prompt = f"<s>[INST] {prompt} [/INST] {instructions}</s>"
|
123 |
|
124 |
with st.chat_message("assistant"):
|
125 |
result = pipeline(
|