Haseeb-001 commited on
Commit
c9adc18
Β·
verified Β·
1 Parent(s): dd41d69

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +75 -0
app.py ADDED
@@ -0,0 +1,75 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import streamlit as st
3
+ import faiss
4
+ import pickle
5
+ from groq import Groq
6
+ from datasets import load_dataset
7
+ from transformers import pipeline
8
+
9
+ # Initialize Groq API
10
+ client = Groq(api_key=os.environ.get("GROQ_API_KEY"))
11
+
12
+ # Load datasets
13
+ healthcare_ds = load_dataset("harishnair04/mtsamples")
14
+ education_ds = load_dataset("ehovy/race", "all")
15
+ finance_ds = load_dataset("warwickai/financial_phrasebank_mirror")
16
+
17
+ # Load chat model
18
+ chat_pipe = pipeline("text-generation", model="rajkumarrrk/dialogpt-fine-tuned-on-daily-dialog")
19
+
20
+ # FAISS Index Setup
21
+ index = faiss.IndexFlatL2(768)
22
+ chat_history = []
23
+
24
+ # Streamlit UI Setup
25
+ st.set_page_config(page_title="AI Chatbot", layout="wide")
26
+ st.title("πŸ€– AI Chatbot (Healthcare, Education & Finance)")
27
+
28
+ # Sidebar for chat history
29
+ st.sidebar.title("πŸ“œ Chat History")
30
+ if st.sidebar.button("Download Chat History"):
31
+ with open("chat_history.txt", "w") as file:
32
+ file.write("\n".join(chat_history))
33
+ st.sidebar.success("Chat history saved!")
34
+
35
+ # Chat Interface
36
+ user_input = st.text_input("πŸ’¬ Ask me anything:", placeholder="Type your query here...")
37
+ if st.button("Send"):
38
+ if user_input:
39
+ # Determine dataset based on user query (Basic CAG Implementation)
40
+ dataset = healthcare_ds if "health" in user_input.lower() else \
41
+ education_ds if "education" in user_input.lower() else \
42
+ finance_ds
43
+
44
+ # RAG: Retrieve relevant data
45
+ retrieved_data = dataset['train'][0] # Simplified retrieval
46
+
47
+ # Generate response using Llama via Groq API
48
+ chat_completion = client.chat.completions.create(
49
+ messages=[{"role": "user", "content": f"{user_input} {retrieved_data}"}],
50
+ model="llama-3.3-70b-versatile"
51
+ )
52
+ response = chat_completion.choices[0].message.content
53
+
54
+ # Save chat to FAISS and display
55
+ chat_history.append(f"User: {user_input}\nBot: {response}")
56
+ st.text_area("πŸ€– AI Response:", value=response, height=200)
57
+
58
+ # Display past chats
59
+ st.sidebar.write("\n".join(chat_history))
60
+
61
+ # Save chat history using pickle for persistence
62
+ def save_chat_history():
63
+ with open("chat_history.pkl", "wb") as file:
64
+ pickle.dump(chat_history, file)
65
+
66
+ def load_chat_history():
67
+ global chat_history
68
+ if os.path.exists("chat_history.pkl"):
69
+ with open("chat_history.pkl", "rb") as file:
70
+ chat_history = pickle.load(file)
71
+
72
+ load_chat_history()
73
+ if st.sidebar.button("Save Chat History"):
74
+ save_chat_history()
75
+ st.sidebar.success("Chat history saved permanently!")