srinuksv commited on
Commit
954a292
·
verified ·
1 Parent(s): 19e8df5

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +86 -63
app.py CHANGED
@@ -4,20 +4,24 @@ import os
4
  from llama_index.core import StorageContext, load_index_from_storage, VectorStoreIndex, SimpleDirectoryReader, ChatPromptTemplate, Settings
5
  from llama_index.llms.huggingface import HuggingFaceInferenceAPI
6
  from llama_index.embeddings.huggingface import HuggingFaceEmbedding
 
7
  import firebase_admin
8
  from firebase_admin import db, credentials
9
  import datetime
10
  import uuid
11
  import random
 
 
 
 
12
 
 
13
  # Load environment variables
14
  load_dotenv()
15
-
16
- # Initialize Firebase with provided credentials and URL
17
  cred = credentials.Certificate("redfernstech-fd8fe-firebase-adminsdk-g9vcn-0537b4efd6.json")
18
  firebase_admin.initialize_app(cred, {"databaseURL": "https://redfernstech-fd8fe-default-rtdb.firebaseio.com/"})
19
-
20
- # Configure Llama index settings
21
  Settings.llm = HuggingFaceInferenceAPI(
22
  model_name="meta-llama/Meta-Llama-3-8B-Instruct",
23
  tokenizer_name="meta-llama/Meta-Llama-3-8B-Instruct",
@@ -30,33 +34,30 @@ Settings.embed_model = HuggingFaceEmbedding(
30
  model_name="BAAI/bge-small-en-v1.5"
31
  )
32
 
33
- # Define directories
34
  PERSIST_DIR = "db"
35
- PDF_DIRECTORY = 'data'
36
 
37
  # Ensure directories exist
38
  os.makedirs(PDF_DIRECTORY, exist_ok=True)
39
  os.makedirs(PERSIST_DIR, exist_ok=True)
40
 
41
- # Dictionary to store chat histories for different sessions
42
- session_chat_histories = {}
43
-
44
- def select_random_name():
45
- names = ['Clara', 'Lily']
46
- return random.choice(names)
47
 
48
  def data_ingestion_from_directory():
 
49
  documents = SimpleDirectoryReader(PDF_DIRECTORY).load_data()
50
  storage_context = StorageContext.from_defaults()
51
  index = VectorStoreIndex.from_documents(documents)
52
  index.storage_context.persist(persist_dir=PERSIST_DIR)
53
 
54
- def handle_query(session_id, query):
55
  chat_text_qa_msgs = [
56
  (
57
  "user",
58
  """
59
- As Clara, your goal is to provide code to the user.
60
  Your task is to give code to the model and offer guidance on creating a website using Django, HTML, CSS, and Bootstrap.
61
  {context_str}
62
  Question:
@@ -66,14 +67,15 @@ def handle_query(session_id, query):
66
  ]
67
  text_qa_template = ChatPromptTemplate.from_messages(chat_text_qa_msgs)
68
 
 
69
  storage_context = StorageContext.from_defaults(persist_dir=PERSIST_DIR)
70
  index = load_index_from_storage(storage_context)
71
 
 
72
  context_str = ""
73
- if session_id in session_chat_histories:
74
- for past_query, response in reversed(session_chat_histories[session_id]):
75
- if past_query.strip():
76
- context_str += f"User asked: '{past_query}'\nBot answered: '{response}'\n"
77
 
78
  query_engine = index.as_query_engine(text_qa_template=text_qa_template, context_str=context_str)
79
  answer = query_engine.query(query)
@@ -85,62 +87,83 @@ def handle_query(session_id, query):
85
  else:
86
  response = "Sorry, I couldn't find an answer."
87
 
88
- if session_id not in session_chat_histories:
89
- session_chat_histories[session_id] = []
90
-
91
- session_chat_histories[session_id].append((query, response))
92
  message_data = {
93
- "query": query,
94
- "response": response,
95
- "timestamp": datetime.datetime.now().isoformat()
96
- }
97
 
 
98
  save_chat_message(session_id, message_data)
 
99
  return response
100
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
101
  def save_chat_message(session_id, message_data):
102
- ref = db.reference(f'/chat_history/{session_id}')
103
  ref.push().set(message_data)
104
 
 
105
  def chat_interface(message, history):
106
- # Retrieve or create a new session ID based on history
107
- session_id = history[0][1] if history and history[0][1] else str(uuid.uuid4())
108
- history.append((message, session_id)) # Append the session ID to history
109
- response = handle_query(session_id, message)
110
- return response, history
111
 
 
 
 
 
 
 
 
 
 
112
  css = '''
113
  .circle-logo {
114
- display: inline-block;
115
- width: 40px;
116
- height: 40px;
117
- border-radius: 50%;
118
- overflow: hidden;
119
- margin-right: 10px;
120
- vertical-align: middle;
121
- }
122
- .circle-logo img {
123
- width: 100%;
124
- height: 100%;
125
- object-fit: cover;
 
 
 
 
 
 
 
 
 
126
  }
127
- .response-with-logo {
128
- display: flex;
129
- align-items: center;
130
- margin-bottom: 10px;
131
- }
132
- footer {
133
- display: none !important;
134
- background-color: #F8D7DA;
135
- }
136
- label.svelte-1b6s6s {display: none}
137
  '''
138
-
139
- # Load data and start Gradio interface
140
- print("Processing PDF ingestion from directory:", PDF_DIRECTORY)
141
- data_ingestion_from_directory()
142
-
143
- gr.ChatInterface(fn=chat_interface,
144
- css=css,
145
- description="Clara",
146
- clear_btn=None, undo_btn=None, retry_btn=None).launch()
 
4
  from llama_index.core import StorageContext, load_index_from_storage, VectorStoreIndex, SimpleDirectoryReader, ChatPromptTemplate, Settings
5
  from llama_index.llms.huggingface import HuggingFaceInferenceAPI
6
  from llama_index.embeddings.huggingface import HuggingFaceEmbedding
7
+ from sentence_transformers import SentenceTransformer
8
  import firebase_admin
9
  from firebase_admin import db, credentials
10
  import datetime
11
  import uuid
12
  import random
13
+ session_id = str(uuid.uuid4())
14
+ def select_random_name():
15
+ names = ['Clara', 'Lily']
16
+ return random.choice(names)
17
 
18
+ # Example usage
19
  # Load environment variables
20
  load_dotenv()
21
+ # authenticate to firebase
 
22
  cred = credentials.Certificate("redfernstech-fd8fe-firebase-adminsdk-g9vcn-0537b4efd6.json")
23
  firebase_admin.initialize_app(cred, {"databaseURL": "https://redfernstech-fd8fe-default-rtdb.firebaseio.com/"})
24
+ # Configure the Llama index settings
 
25
  Settings.llm = HuggingFaceInferenceAPI(
26
  model_name="meta-llama/Meta-Llama-3-8B-Instruct",
27
  tokenizer_name="meta-llama/Meta-Llama-3-8B-Instruct",
 
34
  model_name="BAAI/bge-small-en-v1.5"
35
  )
36
 
37
+ # Define the directory for persistent storage and data
38
  PERSIST_DIR = "db"
39
+ PDF_DIRECTORY = 'data' # Changed to the directory containing PDFs
40
 
41
  # Ensure directories exist
42
  os.makedirs(PDF_DIRECTORY, exist_ok=True)
43
  os.makedirs(PERSIST_DIR, exist_ok=True)
44
 
45
+ # Variable to store current chat conversation
46
+ current_chat_history = []
 
 
 
 
47
 
48
  def data_ingestion_from_directory():
49
+ # Use SimpleDirectoryReader on the directory containing the PDF files
50
  documents = SimpleDirectoryReader(PDF_DIRECTORY).load_data()
51
  storage_context = StorageContext.from_defaults()
52
  index = VectorStoreIndex.from_documents(documents)
53
  index.storage_context.persist(persist_dir=PERSIST_DIR)
54
 
55
+ def handle_query(query):
56
  chat_text_qa_msgs = [
57
  (
58
  "user",
59
  """
60
+ As Clera, your goal is to provide code to the user.
61
  Your task is to give code to the model and offer guidance on creating a website using Django, HTML, CSS, and Bootstrap.
62
  {context_str}
63
  Question:
 
67
  ]
68
  text_qa_template = ChatPromptTemplate.from_messages(chat_text_qa_msgs)
69
 
70
+ # Load index from storage
71
  storage_context = StorageContext.from_defaults(persist_dir=PERSIST_DIR)
72
  index = load_index_from_storage(storage_context)
73
 
74
+ # Use chat history to enhance response
75
  context_str = ""
76
+ for past_query, response in reversed(current_chat_history):
77
+ if past_query.strip():
78
+ context_str += f"User asked: '{past_query}'\nBot answered: '{response}'\n"
 
79
 
80
  query_engine = index.as_query_engine(text_qa_template=text_qa_template, context_str=context_str)
81
  answer = query_engine.query(query)
 
87
  else:
88
  response = "Sorry, I couldn't find an answer."
89
 
90
+ # Update current chat history
91
+ current_chat_history.append((query, response))
 
 
92
  message_data = {
93
+ "query": query,
94
+ "response":response,
95
+ "timestamp": datetime.datetime.now().isoformat() # Use a library like datetime
96
+ }
97
 
98
+ # Call the save function to store in Firebase with the generated session ID
99
  save_chat_message(session_id, message_data)
100
+
101
  return response
102
 
103
+ # Example usage: Process PDF ingestion from directory
104
+ print("Processing PDF ingestion from directory:", PDF_DIRECTORY)
105
+ data_ingestion_from_directory()
106
+
107
+ # Define the function to handle predictions
108
+ """def predict(message,history):
109
+ response = handle_query(message)
110
+ return response"""
111
+ def predict(message, history):
112
+ logo_html = '''
113
+ <div class="circle-logo">
114
+ <img src="https://rb.gy/8r06eg" alt="FernAi">
115
+ </div>
116
+ '''
117
+ response = handle_query(message)
118
+ response_with_logo = f'<div class="response-with-logo">{logo_html}<div class="response-text">{response}</div></div>'
119
+ return response_with_logo
120
  def save_chat_message(session_id, message_data):
121
+ ref = db.reference(f'/chat_history/{session_id}') # Use the session ID to save chat data
122
  ref.push().set(message_data)
123
 
124
+ # Define your Gradio chat interface function (replace with your actual logic)
125
  def chat_interface(message, history):
126
+ try:
127
+ # Generate a unique session ID for this chat session
128
+
 
 
129
 
130
+ # Process the user message and generate a response (your chatbot logic)
131
+ response = handle_query(message)
132
+
133
+ # Return the bot response
134
+ return response
135
+ except Exception as e:
136
+ return str(e)
137
+
138
+ # Custom CSS for styling
139
  css = '''
140
  .circle-logo {
141
+ display: inline-block;
142
+ width: 40px;
143
+ height: 40px;
144
+ border-radius: 50%;
145
+ overflow: hidden;
146
+ margin-right: 10px;
147
+ vertical-align: middle;
148
+ }
149
+ .circle-logo img {
150
+ width: 100%;
151
+ height: 100%;
152
+ object-fit: cover;
153
+ }
154
+ .response-with-logo {
155
+ display: flex;
156
+ align-items: center;
157
+ margin-bottom: 10px;
158
+ }
159
+ footer {
160
+ display: none !important;
161
+ background-color: #F8D7DA;
162
  }
163
+ label.svelte-1b6s6s {display: none}
 
 
 
 
 
 
 
 
 
164
  '''
165
+ gr.ChatInterface(chat_interface,
166
+ css=css,
167
+ description="Clara",
168
+ clear_btn=None, undo_btn=None, retry_btn=None,
169
+ ).launch()