Srinivasulu kethanaboina commited on
Commit
a001ee9
·
verified ·
1 Parent(s): cd7fdea

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -180
app.py CHANGED
@@ -1,183 +1,8 @@
1
- from dotenv import load_dotenv
2
  import gradio as gr
3
- import os
4
- import uvicorn
5
- from fastapi import FastAPI, Request
6
- from llama_index.core import StorageContext, load_index_from_storage, VectorStoreIndex, SimpleDirectoryReader, ChatPromptTemplate, Settings
7
- from llama_index.llms.huggingface import HuggingFaceInferenceAPI
8
- from llama_index.embeddings.huggingface import HuggingFaceEmbedding
9
- import firebase_admin
10
- from firebase_admin import db, credentials
11
- import datetime
12
- import uuid
13
- import threading
14
- import random
15
-
16
- # Function to select a random name
17
- def select_random_name():
18
- names = ['Clara', 'Lily']
19
- return random.choice(names)
20
-
21
- # Load environment variables
22
- load_dotenv()
23
-
24
- # Authenticate to Firebase
25
- cred = credentials.Certificate("redfernstech-fd8fe-firebase-adminsdk-g9vcn-0537b4efd6.json")
26
- firebase_admin.initialize_app(cred, {"databaseURL": "https://redfernstech-fd8fe-default-rtdb.firebaseio.com/"})
27
-
28
- # Configure Llama index settings
29
- Settings.llm = HuggingFaceInferenceAPI(
30
- model_name="meta-llama/Meta-Llama-3-8B-Instruct",
31
- tokenizer_name="meta-llama/Meta-Llama-3-8B-Instruct",
32
- context_window=3000,
33
- token=os.getenv("HF_TOKEN"),
34
- max_new_tokens=512,
35
- generate_kwargs={"temperature": 0.1},
36
- )
37
-
38
- Settings.embed_model = HuggingFaceEmbedding(
39
- model_name="BAAI/bge-small-en-v1.5"
40
- )
41
-
42
- # Define the directory for persistent storage and data
43
- PERSIST_DIR = "db"
44
- PDF_DIRECTORY = 'data'
45
-
46
- # Ensure directories exist
47
- os.makedirs(PDF_DIRECTORY, exist_ok=True)
48
- os.makedirs(PERSIST_DIR, exist_ok=True)
49
-
50
- # Variable to store current chat conversation
51
- current_chat_history = []
52
-
53
- def data_ingestion_from_directory():
54
- # Use SimpleDirectoryReader on the directory containing the PDF files
55
- documents = SimpleDirectoryReader(PDF_DIRECTORY).load_data()
56
- storage_context = StorageContext.from_defaults()
57
- index = VectorStoreIndex.from_documents(documents)
58
- index.storage_context.persist(persist_dir=PERSIST_DIR)
59
-
60
- def handle_query(query):
61
- chat_text_qa_msgs = [
62
- (
63
- "user",
64
- """
65
- You are the Clara Redfernstech chatbot. Your goal is to provide accurate, professional, and helpful answers to user queries based on the company's data. Always ensure your responses are clear and concise. Give responses within 10-15 words only.
66
- {context_str}
67
- Question:
68
- {query_str}
69
- """
70
- )
71
- ]
72
- text_qa_template = ChatPromptTemplate.from_messages(chat_text_qa_msgs)
73
-
74
- # Load index from storage
75
- storage_context = StorageContext.from_defaults(persist_dir=PERSIST_DIR)
76
- index = load_index_from_storage(storage_context)
77
-
78
- # Use chat history to enhance response
79
- context_str = ""
80
- for past_query, response in reversed(current_chat_history):
81
- if past_query.strip():
82
- context_str += f"User asked: '{past_query}'\nBot answered: '{response}'\n"
83
-
84
- query_engine = index.as_query_engine(text_qa_template=text_qa_template, context_str=context_str)
85
- answer = query_engine.query(query)
86
-
87
- if hasattr(answer, 'response'):
88
- response = answer.response
89
- elif isinstance(answer, dict) and 'response' in answer:
90
- response = answer['response']
91
- else:
92
- response = "Sorry, I couldn't find an answer."
93
-
94
- # Update current chat history
95
- current_chat_history.append((query, response))
96
-
97
- return response
98
-
99
- def save_chat_message(session_id, message_data):
100
- ref = db.reference(f'/chat_history/{session_id}') # Use the session ID to save chat data
101
- ref.push().set(message_data)
102
-
103
- def chat_interface(message, history):
104
- try:
105
- # Generate a unique session ID for this chat session
106
- session_id = str(uuid.uuid4())
107
-
108
- # Process the user message and generate a response (your chatbot logic)
109
- response = handle_query(message)
110
-
111
- # Capture the message data
112
- message_data = {
113
- "sender": "user",
114
- "message": message,
115
- "response": response,
116
- "timestamp": datetime.datetime.now().isoformat() # Use a library like datetime
117
- }
118
-
119
- # Call the save function to store in Firebase with the generated session ID
120
- save_chat_message(session_id, message_data)
121
-
122
- # Return the bot response
123
- return response
124
- except Exception as e:
125
- return str(e)
126
-
127
- # Custom CSS for styling
128
- css = '''
129
- .circle-logo {
130
- display: inline-block;
131
- width: 40px;
132
- height: 40px;
133
- border-radius: 50%;
134
- overflow: hidden;
135
- margin-right: 10px;
136
- vertical-align: middle;
137
- }
138
- .circle-logo img {
139
- width: 100%;
140
- height: 100%;
141
- object-fit: cover;
142
- }
143
- .response-with-logo {
144
- display: flex;
145
- align-items: center;
146
- margin-bottom: 10px;
147
- }
148
- footer {
149
- display: none !important;
150
- background-color: #F8D7DA;
151
- }
152
- .svelte-1ed2p3z p {
153
- font-size: 24px;
154
- font-weight: bold;
155
- line-height: 1.2;
156
- color: #111;
157
- margin: 20px 0;
158
- }
159
- label.svelte-1b6s6s {display: none}
160
- div.svelte-rk35yg {display: none;}
161
- div.progress-text.svelte-z7cif2.meta-text {display: none;}
162
- '''
163
-
164
  app = FastAPI()
165
-
166
  @app.get("/")
167
- async def root():
168
- return {"message": "Hello"}
169
-
170
- @app.get("/chat")
171
- async def chat_ui(username: str, email: str):
172
- gr.ChatInterface(
173
- fn=chat_interface,
174
- css=css,
175
- description="Clara",
176
- clear_btn=None,
177
- undo_btn=None,
178
- retry_btn=None
179
- ).launch()
180
- return {"message": "Chat interface launched."}
181
-
182
- if __name__ == "__main__":
183
- threading.Thread(target=lambda: uvicorn.run(app, host="0.0.0.0", port=8000), daemon=True).start()
 
1
+ from fastapi import FastAPI
2
  import gradio as gr
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3
  app = FastAPI()
 
4
  @app.get("/")
5
+ def read_main():
6
+ return {"message": "This is your main app"}
7
+ io = gr.ChatInterface(respond_to_chat)
8
+ app = gr.mount_gradio_app(app, io, path="/gradio")