Srinivasulu kethanaboina commited on
Commit
d18ba55
·
verified ·
1 Parent(s): 2405bb9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +31 -25
app.py CHANGED
@@ -4,21 +4,23 @@ import os
4
  from llama_index.core import StorageContext, load_index_from_storage, VectorStoreIndex, SimpleDirectoryReader, ChatPromptTemplate, Settings
5
  from llama_index.llms.huggingface import HuggingFaceInferenceAPI
6
  from llama_index.embeddings.huggingface import HuggingFaceEmbedding
7
- import random
8
- import uuid
 
9
  import datetime
10
- from gradio_client import Client
11
-
12
- # Initialize Gradio Client
13
- client = Client("srinukethanaboina/SRUNU")
14
 
15
  def select_random_name():
16
  names = ['Clara', 'Lily']
17
  return random.choice(names)
18
 
 
19
  # Load environment variables
20
  load_dotenv()
21
-
 
 
22
  # Configure the Llama index settings
23
  Settings.llm = HuggingFaceInferenceAPI(
24
  model_name="meta-llama/Meta-Llama-3-8B-Instruct",
@@ -34,7 +36,7 @@ Settings.embed_model = HuggingFaceEmbedding(
34
 
35
  # Define the directory for persistent storage and data
36
  PERSIST_DIR = "db"
37
- PDF_DIRECTORY = 'data' # Directory containing PDFs
38
 
39
  # Ensure directories exist
40
  os.makedirs(PDF_DIRECTORY, exist_ok=True)
@@ -55,7 +57,7 @@ def handle_query(query):
55
  (
56
  "user",
57
  """
58
- You are the Clara Redfernstech chatbot. Your goal is to provide accurate, professional, and helpful answers to user queries based on the company's data. Always ensure your responses are clear and concise. give response within 10-15 words only
59
  {context_str}
60
  Question:
61
  {query_str}
@@ -93,35 +95,43 @@ def handle_query(query):
93
  print("Processing PDF ingestion from directory:", PDF_DIRECTORY)
94
  data_ingestion_from_directory()
95
 
96
- def predict(message, req):
 
 
 
 
97
  logo_html = '''
98
  <div class="circle-logo">
99
  <img src="https://rb.gy/8r06eg" alt="FernAi">
100
  </div>
101
  '''
102
- # Use the gradio_client API to process the chat history and IP address
103
- response = client.predict(
104
- ip_address=req.client.host, # Extract IP address from request
105
- chat_history=message,
106
- api_name="/predict"
107
- )
108
  response_with_logo = f'<div class="response-with-logo">{logo_html}<div class="response-text">{response}</div></div>'
109
  return response_with_logo
 
 
 
110
 
111
  # Define your Gradio chat interface function (replace with your actual logic)
112
- def chat_interface(message, history, request: gr.Request):
113
  try:
114
- # Process the user message and generate a response
115
- response = predict(message, request)
 
 
 
116
 
117
  # Capture the message data
118
  message_data = {
119
- "sender": "user",
120
  "message": message,
121
  "response": response,
122
  "timestamp": datetime.datetime.now().isoformat() # Use a library like datetime
123
  }
124
 
 
 
 
125
  # Return the bot response
126
  return response
127
  except Exception as e:
@@ -163,12 +173,8 @@ label.svelte-1b6s6s {display: none}
163
  div.svelte-rk35yg {display: none;}
164
  div.progress-text.svelte-z7cif2.meta-text {display: none;}
165
  '''
166
-
167
- # Launch the Gradio interface
168
- # Launch the Gradio interface with verbose error reporting
169
  gr.ChatInterface(chat_interface,
170
  css=css,
171
  description="Clara",
172
  clear_btn=None, undo_btn=None, retry_btn=None,
173
- ).launch(show_error=True)
174
-
 
4
  from llama_index.core import StorageContext, load_index_from_storage, VectorStoreIndex, SimpleDirectoryReader, ChatPromptTemplate, Settings
5
  from llama_index.llms.huggingface import HuggingFaceInferenceAPI
6
  from llama_index.embeddings.huggingface import HuggingFaceEmbedding
7
+ from sentence_transformers import SentenceTransformer
8
+ import firebase_admin
9
+ from firebase_admin import db, credentials
10
  import datetime
11
+ import uuid
12
+ import random
 
 
13
 
14
  def select_random_name():
15
  names = ['Clara', 'Lily']
16
  return random.choice(names)
17
 
18
+ # Example usage
19
  # Load environment variables
20
  load_dotenv()
21
+ # authenticate to firebase
22
+ cred = credentials.Certificate("redfernstech-fd8fe-firebase-adminsdk-g9vcn-0537b4efd6.json")
23
+ firebase_admin.initialize_app(cred, {"databaseURL": "https://redfernstech-fd8fe-default-rtdb.firebaseio.com/"})
24
  # Configure the Llama index settings
25
  Settings.llm = HuggingFaceInferenceAPI(
26
  model_name="meta-llama/Meta-Llama-3-8B-Instruct",
 
36
 
37
  # Define the directory for persistent storage and data
38
  PERSIST_DIR = "db"
39
+ PDF_DIRECTORY = 'data' # Changed to the directory containing PDFs
40
 
41
  # Ensure directories exist
42
  os.makedirs(PDF_DIRECTORY, exist_ok=True)
 
57
  (
58
  "user",
59
  """
60
+ You are the clara Redfernstech chatbot. Your goal is to provide accurate, professional, and helpful answers to user queries based on the company's data. Always ensure your responses are clear and concise. give response within 10-15 words only
61
  {context_str}
62
  Question:
63
  {query_str}
 
95
  print("Processing PDF ingestion from directory:", PDF_DIRECTORY)
96
  data_ingestion_from_directory()
97
 
98
+ # Define the function to handle predictions
99
+ """def predict(message,history):
100
+ response = handle_query(message)
101
+ return response"""
102
+ def predict(message, history):
103
  logo_html = '''
104
  <div class="circle-logo">
105
  <img src="https://rb.gy/8r06eg" alt="FernAi">
106
  </div>
107
  '''
108
+ response = handle_query(message)
 
 
 
 
 
109
  response_with_logo = f'<div class="response-with-logo">{logo_html}<div class="response-text">{response}</div></div>'
110
  return response_with_logo
111
+ def save_chat_message(session_id, message_data):
112
+ ref = db.reference(f'/chat_history/{session_id}') # Use the session ID to save chat data
113
+ ref.push().set(message_data)
114
 
115
  # Define your Gradio chat interface function (replace with your actual logic)
116
+ def chat_interface(message, history,request: gr.Request):
117
  try:
118
+ # Generate a unique session ID for this chat session
119
+ session_id = str(uuid.uuid4())
120
+
121
+ # Process the user message and generate a response (your chatbot logic)
122
+ response = handle_query(message)
123
 
124
  # Capture the message data
125
  message_data = {
126
+ "sender": request.client.host,
127
  "message": message,
128
  "response": response,
129
  "timestamp": datetime.datetime.now().isoformat() # Use a library like datetime
130
  }
131
 
132
+ # Call the save function to store in Firebase with the generated session ID
133
+ save_chat_message(session_id, message_data)
134
+
135
  # Return the bot response
136
  return response
137
  except Exception as e:
 
173
  div.svelte-rk35yg {display: none;}
174
  div.progress-text.svelte-z7cif2.meta-text {display: none;}
175
  '''
 
 
 
176
  gr.ChatInterface(chat_interface,
177
  css=css,
178
  description="Clara",
179
  clear_btn=None, undo_btn=None, retry_btn=None,
180
+ ).launch()