devfire commited on
Commit
39e60f4
Β·
verified Β·
1 Parent(s): 173a4b5

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +19 -9
app.py CHANGED
@@ -1,23 +1,33 @@
1
  import os
2
  import streamlit as st
 
3
  from groq import Groq
4
  from transformers import pipeline, AutoModelForCausalLM, AutoTokenizer
5
 
6
  # Set up the Groq API Key
7
- GROQ_API_KEY = "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"
8
  os.environ["GROQ_API_KEY"] = GROQ_API_KEY
9
 
10
  # Initialize the Groq client
11
  client = Groq(api_key=GROQ_API_KEY)
12
 
13
- # Initialize Hugging Face DeepSeek R1 model
14
- pipe = pipeline("text-generation", model="deepseek-ai/DeepSeek-R1", trust_remote_code=True)
15
 
16
- def generate_response_hf(user_message):
17
- response = pipe(user_message, max_length=200, do_sample=True)
18
- return response[0]['generated_text']
19
 
20
- # Streamlit user interface setup
 
 
 
 
 
 
 
 
 
21
  st.set_page_config(page_title="AI Study Assistant", page_icon="πŸ€–", layout="wide")
22
  st.title("πŸ“š Subject-specific AI Chatbot")
23
  st.write("Hello! I'm your AI Study Assistant. You can ask me any questions related to your subjects, and I'll try to help.")
@@ -31,7 +41,7 @@ chat_model = st.sidebar.radio("Choose AI Model:", ["Groq API", "DeepSeek R1 (Hug
31
  if 'conversation_history' not in st.session_state:
32
  st.session_state.conversation_history = []
33
 
34
- # Define a list of subjects for which the chatbot will answer
35
  subjects = ["Chemistry", "Computer", "English", "Islamiat", "Mathematics", "Physics", "Urdu"]
36
 
37
  def generate_chatbot_response(user_message):
@@ -51,7 +61,7 @@ def generate_chatbot_response(user_message):
51
  else:
52
  return generate_response_hf(prompt)
53
 
54
- # User input for conversation
55
  st.markdown("### πŸ’¬ Chat with me")
56
  user_input = st.chat_input("Ask me a subject-related question:")
57
 
 
1
  import os
2
  import streamlit as st
3
+ import torch
4
  from groq import Groq
5
  from transformers import pipeline, AutoModelForCausalLM, AutoTokenizer
6
 
7
  # Set up the Groq API Key
8
+ GROQ_API_KEY = "your_groq_api_key_here" # Replace with your actual key
9
  os.environ["GROQ_API_KEY"] = GROQ_API_KEY
10
 
11
  # Initialize the Groq client
12
  client = Groq(api_key=GROQ_API_KEY)
13
 
14
+ # Initialize Hugging Face DeepSeek R1 model correctly
15
+ MODEL_NAME = "deepseek-ai/DeepSeek-R1"
16
 
17
+ try:
18
+ tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME, trust_remote_code=True)
19
+ model = AutoModelForCausalLM.from_pretrained(MODEL_NAME, trust_remote_code=True, torch_dtype=torch.float16, device_map="auto")
20
 
21
+ def generate_response_hf(user_message):
22
+ inputs = tokenizer(user_message, return_tensors="pt").to("cuda" if torch.cuda.is_available() else "cpu")
23
+ outputs = model.generate(**inputs, max_length=200)
24
+ return tokenizer.decode(outputs[0], skip_special_tokens=True)
25
+
26
+ except Exception as e:
27
+ st.error(f"Error loading DeepSeek-R1: {str(e)}")
28
+ generate_response_hf = lambda x: "Error: Model not loaded."
29
+
30
+ # Streamlit UI setup
31
  st.set_page_config(page_title="AI Study Assistant", page_icon="πŸ€–", layout="wide")
32
  st.title("πŸ“š Subject-specific AI Chatbot")
33
  st.write("Hello! I'm your AI Study Assistant. You can ask me any questions related to your subjects, and I'll try to help.")
 
41
  if 'conversation_history' not in st.session_state:
42
  st.session_state.conversation_history = []
43
 
44
+ # Define subjects
45
  subjects = ["Chemistry", "Computer", "English", "Islamiat", "Mathematics", "Physics", "Urdu"]
46
 
47
  def generate_chatbot_response(user_message):
 
61
  else:
62
  return generate_response_hf(prompt)
63
 
64
+ # User input
65
  st.markdown("### πŸ’¬ Chat with me")
66
  user_input = st.chat_input("Ask me a subject-related question:")
67