amitpandey8 commited on
Commit
84bfc91
·
verified ·
1 Parent(s): 9dc2e63

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +34 -0
app.py ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from transformers import AutoModelForCausalLM, AutoTokenizer
3
+
4
+ # Load the model and tokenizer
5
+ model_name = "GRMenon/mental-health-mistral-7b-instructv0.2-finetuned-V2"
6
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
7
+ model = AutoModelForCausalLM.from_pretrained(model_name)
8
+
9
+ def get_bot_response(user_input):
10
+ # Tokenize user input
11
+ inputs = tokenizer.encode(user_input, return_tensors="pt")
12
+
13
+ # Generate response
14
+ bot_response = model.generate(inputs, max_length=100, num_return_sequences=1)
15
+
16
+ # Decode and return response
17
+ return tokenizer.decode(bot_response[0], skip_special_tokens=True)
18
+
19
+ def main():
20
+ st.title("Mental Health Chatbot")
21
+
22
+ st.write("Welcome to the mental health chatbot. You can ask questions or share your thoughts, and I'll try to provide helpful responses.")
23
+
24
+ user_input = st.text_input("Type your message here:", value="", key="user_input")
25
+
26
+ if user_input:
27
+ st.write(f"You: {user_input}")
28
+
29
+ bot_response = get_bot_response(user_input)
30
+
31
+ st.write(f"Chatbot: {bot_response}")
32
+
33
+ if __name__ == "__main__":
34
+ main()