Srinivasulu kethanaboina commited on
Commit
5cff97b
·
verified ·
1 Parent(s): d4aca7a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +151 -47
app.py CHANGED
@@ -1,55 +1,159 @@
 
 
1
  import gradio as gr
 
2
  import firebase_admin
3
  from firebase_admin import db, credentials
4
- from gradio_client import Client
 
 
 
 
 
 
 
5
 
6
- # Initialize Firebase
7
- cred = credentials.Certificate("redfernstech-fd8fe-firebase-adminsdk-g9vcn-0537b4efd6.json")
 
 
 
8
  firebase_admin.initialize_app(cred, {"databaseURL": "https://redfernstech-fd8fe-default-rtdb.firebaseio.com/"})
9
 
10
- # Initialize Gradio Client
11
- client = Client("vilarin/Llama-3.1-8B-Instruct")
12
-
13
- def process_inputs(ip_address, chat_history):
14
- # Process chat history with the model
15
- result = client.predict(
16
- message=chat_history,
17
- system_prompt="summarize the text and give me the client interest within 30-40 words only in bullet points",
18
- temperature=0.8,
19
- max_new_tokens=1024,
20
- top_p=1,
21
- top_k=20,
22
- penalty=1.2,
23
- api_name="/chat"
24
- )
25
-
26
- # Print result for debugging
27
- print(result)
28
-
29
- # Check Firebase for existing IP address
30
- ref = db.reference('ip_addresses')
31
- ip_data = ref.get()
32
-
33
- if ip_address in ip_data:
34
- # Update existing record
35
- ref.child(ip_address).update({"chat_history": chat_history, "result": result})
36
- else:
37
- # Create new record
38
- ref.child(ip_address).set({"chat_history": chat_history, "result": result})
39
-
40
- return result
41
-
42
- # Define the Gradio interface
43
- interface = gr.Interface(
44
- fn=process_inputs,
45
- inputs=[
46
- gr.Textbox(label="IP Address"),
47
- gr.Textbox(label="Chat History")
48
- ],
49
- outputs="text",
50
- title="Chat History Processor",
51
- description="Enter an IP address and chat history to process and save to Firebase."
52
  )
53
 
54
- # Launch the Gradio interface
55
- interface.launch()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from dotenv import load_dotenv
2
+ from fastapi import FastAPI, Request
3
  import gradio as gr
4
+ import os
5
  import firebase_admin
6
  from firebase_admin import db, credentials
7
+ import datetime
8
+ import uuid
9
+ import random
10
+ from llama_index.core import StorageContext, load_index_from_storage, VectorStoreIndex, SimpleDirectoryReader, ChatPromptTemplate, Settings
11
+ from llama_index.llms.huggingface import HuggingFaceInferenceAPI
12
+ from llama_index.embeddings.huggingface import HuggingFaceEmbedding
13
+ import threading
14
+ import uvicorn
15
 
16
+ # Load environment variables
17
+ load_dotenv()
18
+
19
+ # Authenticate to Firebase
20
+ cred = credentials.Certificate("/content/redfernstech-fd8fe-firebase-adminsdk-g9vcn-0537b4efd6.json")
21
  firebase_admin.initialize_app(cred, {"databaseURL": "https://redfernstech-fd8fe-default-rtdb.firebaseio.com/"})
22
 
23
+ app = FastAPI()
24
+
25
+ # Configure the Llama index settings
26
+ Settings.llm = HuggingFaceInferenceAPI(
27
+ model_name="meta-llama/Meta-Llama-3-8B-Instruct",
28
+ tokenizer_name="meta-llama/Meta-Llama-3-8B-Instruct",
29
+ context_window=3000,
30
+ token=os.getenv("HF_TOKEN"),
31
+ max_new_tokens=512,
32
+ generate_kwargs={"temperature": 0.1},
33
+ )
34
+ Settings.embed_model = HuggingFaceEmbedding(
35
+ model_name="BAAI/bge-small-en-v1.5"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
36
  )
37
 
38
+ # Define directories
39
+ PERSIST_DIR = "db"
40
+ PDF_DIRECTORY = 'data'
41
+ os.makedirs(PDF_DIRECTORY, exist_ok=True)
42
+ os.makedirs(PERSIST_DIR, exist_ok=True)
43
+
44
+ # Variable to store chat history
45
+ current_chat_history = []
46
+
47
+ def data_ingestion_from_directory():
48
+ documents = SimpleDirectoryReader(PDF_DIRECTORY).load_data()
49
+ storage_context = StorageContext.from_defaults()
50
+ index = VectorStoreIndex.from_documents(documents)
51
+ index.storage_context.persist(persist_dir=PERSIST_DIR)
52
+
53
+ def handle_query(query):
54
+ chat_text_qa_msgs = [
55
+ (
56
+ "user",
57
+ """
58
+ You are Clara, the Redfernstech chatbot. Provide accurate, professional, and concise answers based on the data. Respond within 10-15 words only.
59
+ {context_str}
60
+ Question:
61
+ {query_str}
62
+ """
63
+ )
64
+ ]
65
+ text_qa_template = ChatPromptTemplate.from_messages(chat_text_qa_msgs)
66
+
67
+ storage_context = StorageContext.from_defaults(persist_dir=PERSIST_DIR)
68
+ index = load_index_from_storage(storage_context)
69
+
70
+ context_str = ""
71
+ for past_query, response in reversed(current_chat_history):
72
+ if past_query.strip():
73
+ context_str += f"User asked: '{past_query}'\nBot answered: '{response}'\n"
74
+
75
+ query_engine = index.as_query_engine(text_qa_template=text_qa_template, context_str=context_str)
76
+ answer = query_engine.query(query)
77
+
78
+ response = answer.response if hasattr(answer, 'response') else "Sorry, I couldn't find an answer."
79
+
80
+ current_chat_history.append((query, response))
81
+
82
+ return response
83
+
84
+ def save_chat_message(username, email, session_id, message_data):
85
+ ref = db.reference(f'/chat_history/{username}/{email}/{session_id}')
86
+ ref.push().set(message_data)
87
+
88
+ def chat_interface(message, history, request: gr.Request):
89
+ try:
90
+ username = request.query_params.get('username')
91
+ email = request.query_params.get('email')
92
+ session_id = str(uuid.uuid4())
93
+ response = handle_query(message)
94
+
95
+ message_data = {
96
+ "sender": request.client.host,
97
+ "message": message,
98
+ "response": response,
99
+ "timestamp": datetime.datetime.now().isoformat()
100
+ }
101
+
102
+ save_chat_message(username, email, session_id, message_data)
103
+
104
+ return response
105
+ except Exception as e:
106
+ return str(e)
107
+
108
+ css = '''
109
+ .circle-logo {
110
+ display: inline-block;
111
+ width: 40px;
112
+ height: 40px;
113
+ border-radius: 50%;
114
+ overflow: hidden;
115
+ margin-right: 10px;
116
+ vertical-align: middle;
117
+ }
118
+ .circle-logo img {
119
+ width: 100%;
120
+ height: 100%;
121
+ object-fit: cover;
122
+ }
123
+ .response-with-logo {
124
+ display: flex;
125
+ align-items: center;
126
+ margin-bottom: 10px;
127
+ }
128
+ footer {
129
+ display: none !important;
130
+ background-color: #F8D7DA;
131
+ }
132
+ .svelte-1ed2p3z p {
133
+ font-size: 24px;
134
+ font-weight: bold;
135
+ line-height: 1.2;
136
+ color: #111;
137
+ margin: 20px 0;
138
+ }
139
+ label.svelte-1b6s6s {display: none}
140
+ div.svelte-rk35yg {display: none;}
141
+ div.progress-text.svelte-z7cif2.meta-text {display: none;}
142
+ '''
143
+
144
+ @app.get("/chat")
145
+ async def chat_ui(username: str, email: str):
146
+ with gr.Blocks(css=css) as demo:
147
+ gr.ChatInterface(
148
+ fn=chat_interface,
149
+ css=css,
150
+ description="Clara",
151
+ clear_btn=None,
152
+ undo_btn=None,
153
+ retry_btn=None
154
+ ).launch()
155
+ return {"message": "Chat interface launched."}
156
+
157
+ if __name__ == "__main__":
158
+ data_ingestion_from_directory()
159
+ threading.Thread(target=lambda: uvicorn.run(app, host="0.0.0.0", port=8000), daemon=True).start()