Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -19,6 +19,7 @@ client = OpenAI(
|
|
19 |
#Create supported models
|
20 |
model_links ={
|
21 |
"HAH-2024-v0.1":"drmasad/HAH-2024-v0.11",
|
|
|
22 |
}
|
23 |
|
24 |
model_info ={
|
@@ -26,6 +27,11 @@ model_info ={
|
|
26 |
{'description':"""The HAH-2024-v0.1 model is a **Large Language Model (LLM)** that's able to have question and answer interactions.\n \
|
27 |
\nIt was created by fine tuning Mistra 7b instruct usin 3000 review articles on diabetes** \n""",
|
28 |
'logo':'https://www.hmgaihub.com/untitled.png'},
|
|
|
|
|
|
|
|
|
|
|
29 |
}
|
30 |
|
31 |
def reset_conversation():
|
@@ -88,7 +94,7 @@ for message in st.session_state.messages:
|
|
88 |
st.markdown(message["content"])
|
89 |
|
90 |
# Accept user input
|
91 |
-
if prompt := st.chat_input(f"Hi I'm {selected_model}, ask me a question
|
92 |
|
93 |
# Display user message in chat message container
|
94 |
with st.chat_message("user"):
|
@@ -101,8 +107,11 @@ if prompt := st.chat_input(f"Hi I'm {selected_model}, ask me a question on diabe
|
|
101 |
with st.chat_message("assistant"):
|
102 |
stream = client.chat.completions.create(
|
103 |
model=model_links[selected_model],
|
104 |
-
messages=[
|
105 |
-
|
|
|
|
|
|
|
106 |
stream=True,
|
107 |
max_tokens=3000,
|
108 |
)
|
|
|
19 |
#Create supported models
|
20 |
model_links ={
|
21 |
"HAH-2024-v0.1":"drmasad/HAH-2024-v0.11",
|
22 |
+
"Mistral":"mistralai/Mistral-7B-Instruct-v0.2",
|
23 |
}
|
24 |
|
25 |
model_info ={
|
|
|
27 |
{'description':"""The HAH-2024-v0.1 model is a **Large Language Model (LLM)** that's able to have question and answer interactions.\n \
|
28 |
\nIt was created by fine tuning Mistra 7b instruct usin 3000 review articles on diabetes** \n""",
|
29 |
'logo':'https://www.hmgaihub.com/untitled.png'},
|
30 |
+
"Mistral":
|
31 |
+
{'description':
|
32 |
+
"""The Mistral model is a **Large Language Model (LLM)** that's able to have question and answer interactions...""",
|
33 |
+
'logo':
|
34 |
+
'https://mistral.ai/images/logo_hubc88c4ece131b91c7cb753f40e9e1cc5_2589_256x0_resize_q97_h2_lanczos_3.webp'},
|
35 |
}
|
36 |
|
37 |
def reset_conversation():
|
|
|
94 |
st.markdown(message["content"])
|
95 |
|
96 |
# Accept user input
|
97 |
+
if prompt := st.chat_input(f"Hi I'm {selected_model}, ask me a question"):
|
98 |
|
99 |
# Display user message in chat message container
|
100 |
with st.chat_message("user"):
|
|
|
107 |
with st.chat_message("assistant"):
|
108 |
stream = client.chat.completions.create(
|
109 |
model=model_links[selected_model],
|
110 |
+
messages=[
|
111 |
+
{"role": m["role"], "content": m["content"]}
|
112 |
+
for m in st.session_state.messages
|
113 |
+
],
|
114 |
+
temperature=temp_values,#0.5,
|
115 |
stream=True,
|
116 |
max_tokens=3000,
|
117 |
)
|