drmasad commited on
Commit
23624f5
·
verified ·
1 Parent(s): 7f942f9

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +114 -0
app.py ADDED
@@ -0,0 +1,114 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """ HAH 2024 v0.1 Chatbot
2
+ @author: Dr M As'ad
3
+ @email: [email protected]
4
+ """
5
+
6
+ import streamlit as st
7
+ from openai import OpenAI
8
+ import os
9
+ import sys
10
+ from dotenv import load_dotenv, dotenv_values
11
+ load_dotenv()
12
+
13
+ # initialize the client
14
+ client = OpenAI(
15
+ base_url="https://api-inference.huggingface.co/v1",
16
+ api_key=os.environ.get('HUGGINGFACEHUB_API_TOKEN')
17
+ )
18
+
19
+ #Create supported models
20
+ model_links ={
21
+ "HAH-2024-v0.1":"drmasad/HAH-2024-v0.11",
22
+ }
23
+
24
+ model_info ={
25
+ "HAH-2024-v0.1":
26
+ {'description':"""The HAH-2024-v0.1 model is a **Large Language Model (LLM)** that's able to have question and answer interactions.\n \
27
+ \nIt was created by fine tuning Mistra 7b instruct usin 3000 review articles on diabetes** \n""",
28
+ 'logo':'https://www.hmgaihub.com/untitled.png'},
29
+ }
30
+
31
+ def reset_conversation():
32
+ '''
33
+ Resets Conversation
34
+ '''
35
+ st.session_state.conversation = []
36
+ st.session_state.messages = []
37
+ return None
38
+
39
+ # Define the available models
40
+ models =[key for key in model_links.keys()]
41
+
42
+ # Create the sidebar with the dropdown for model selection
43
+ selected_model = st.sidebar.selectbox("Select Model", models)
44
+
45
+ #Create a temperature slider
46
+ temp_values = st.sidebar.slider('Select a temperature value', 0.0, 1.0, (0.5))
47
+
48
+
49
+ #Add reset button to clear conversation
50
+ st.sidebar.button('Reset Chat', on_click=reset_conversation) #Reset button
51
+
52
+
53
+ # Create model description
54
+ st.sidebar.write(f"You're now chatting with **{selected_model}**")
55
+ st.sidebar.markdown(model_info[selected_model]['description'])
56
+ st.sidebar.image(model_info[selected_model]['logo'])
57
+ st.sidebar.markdown("*Generated content may be inaccurate or false.*")
58
+
59
+
60
+ if "prev_option" not in st.session_state:
61
+ st.session_state.prev_option = selected_model
62
+
63
+ if st.session_state.prev_option != selected_model:
64
+ st.session_state.messages = []
65
+ # st.write(f"Changed to {selected_model}")
66
+ st.session_state.prev_option = selected_model
67
+ reset_conversation()
68
+
69
+ #Pull in the model we want to use
70
+ repo_id = model_links[selected_model]
71
+
72
+
73
+ st.subheader(f'AI - {selected_model}')
74
+ # st.title(f'ChatBot Using {selected_model}')
75
+
76
+ # Set a default model
77
+ if selected_model not in st.session_state:
78
+ st.session_state[selected_model] = model_links[selected_model]
79
+
80
+ # Initialize chat history
81
+ if "messages" not in st.session_state:
82
+ st.session_state.messages = []
83
+
84
+
85
+ # Display chat messages from history on app rerun
86
+ for message in st.session_state.messages:
87
+ with st.chat_message(message["role"]):
88
+ st.markdown(message["content"])
89
+
90
+ # Accept user input
91
+ if prompt := st.chat_input(f"Hi I'm {selected_model}, ask me a question on diabetes"):
92
+
93
+ # Display user message in chat message container
94
+ with st.chat_message("user"):
95
+ st.markdown(prompt)
96
+ # Add user message to chat history
97
+ st.session_state.messages.append({"role": "user", "content": prompt})
98
+
99
+
100
+ # Display assistant response in chat message container
101
+ with st.chat_message("assistant"):
102
+ stream = client.chat.completions.create(
103
+ model=model_links[selected_model],
104
+ messages=[
105
+ {"role": m["role"], "content": m["content"]}
106
+ for m in st.session_state.messages
107
+ ],
108
+ temperature=temp_values,#0.5,
109
+ stream=True,
110
+ max_tokens=3000,
111
+ )
112
+
113
+ response = st.write_stream(stream)
114
+ st.session_state.messages.append({"role": "assistant", "content": response})