Hashim998 commited on
Commit
84a6a0c
·
verified ·
1 Parent(s): 7d3517c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +275 -292
app.py CHANGED
@@ -1,292 +1,275 @@
1
- import os
2
- import uuid
3
- import json
4
- import time
5
- import gradio as gr
6
- import logging
7
- from dotenv import load_dotenv
8
- import google.generativeai as genai
9
-
10
- from langgraph.graph import START, MessagesState, StateGraph
11
- from langgraph.checkpoint.memory import MemorySaver
12
- from langchain_core.messages import HumanMessage, AIMessage
13
- from langchain_core.prompts.chat import (
14
- ChatPromptTemplate,
15
- SystemMessagePromptTemplate,
16
- MessagesPlaceholder,
17
- HumanMessagePromptTemplate,
18
- )
19
- from langchain_google_genai import ChatGoogleGenerativeAI
20
- from langchain_core.messages import BaseMessage
21
-
22
-
23
- # === Logging & .env ===
24
- logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s")
25
- logger = logging.getLogger(__name__)
26
- load_dotenv()
27
-
28
- GEMINI_API_KEY = os.getenv("GEMINI_API_KEY")
29
- if not GEMINI_API_KEY:
30
- raise ValueError("Missing GEMINI_API_KEY")
31
- genai.configure(api_key=GEMINI_API_KEY)
32
-
33
- HISTORY_FILE = "chat_history.json"
34
-
35
- # === Persistent Storage ===
36
- def load_all_sessions():
37
- if os.path.exists(HISTORY_FILE):
38
- with open(HISTORY_FILE, "r", encoding="utf-8") as f:
39
- return json.load(f)
40
- return {}
41
-
42
- def save_all_sessions(sessions):
43
- with open(HISTORY_FILE, "w", encoding="utf-8") as f:
44
- json.dump(sessions, f, indent=2)
45
-
46
- # === Chatbot Class ===
47
- class GeminiChatbot:
48
- def __init__(self):
49
- self.setup_model()
50
-
51
- def setup_model(self):
52
- system_template = """
53
- You are a helpful, respectful and honest assistant. Always answer as helpfully as possible, while being safe.
54
- Your answers should be informative, engaging, and accurate. If a question doesn't make any sense, or isn't factually coherent, explain why instead of answering something not correct.
55
- If you don't know the answer to a question, please don't share false information.
56
- """
57
-
58
- self.prompt = ChatPromptTemplate.from_messages([
59
- SystemMessagePromptTemplate.from_template(system_template),
60
- MessagesPlaceholder(variable_name="chat_history"),
61
- HumanMessagePromptTemplate.from_template("{input}")
62
- ])
63
-
64
- self.model = ChatGoogleGenerativeAI(
65
- model="gemini-2.0-flash",
66
- temperature=0.7,
67
- top_p=0.95,
68
- google_api_key=GEMINI_API_KEY,
69
- convert_system_message_to_human=True
70
- )
71
-
72
- def call_model(state: MessagesState):
73
- chat_history = state["messages"][:-1]
74
- user_input = state["messages"][-1].content
75
-
76
- formatted_messages = self.prompt.format_messages(
77
- chat_history=chat_history,
78
- input=user_input
79
- )
80
-
81
- response = self.model.invoke(formatted_messages)
82
- return {"messages": response}
83
-
84
- workflow = StateGraph(state_schema=MessagesState)
85
- workflow.add_node("model", call_model)
86
- workflow.add_edge(START, "model")
87
-
88
- self.memory = MemorySaver()
89
- self.app = workflow.compile(checkpointer=self.memory)
90
-
91
- def get_response(self, user_message, history, thread_id):
92
- try:
93
- # Convert string history into LangChain message objects
94
- langchain_history = []
95
- for user, bot in history:
96
- langchain_history.append(HumanMessage(content=user))
97
- langchain_history.append(AIMessage(content=bot))
98
-
99
- # Add the new user message
100
- input_message = HumanMessage(content=user_message)
101
- full_history = langchain_history + [input_message]
102
-
103
- full_response = ""
104
- config = {"configurable": {"thread_id": thread_id}}
105
-
106
- # Invoke the model with full conversation
107
- response = self.app.invoke({"messages": full_history}, config)
108
- complete_response = response["messages"][-1].content
109
-
110
- for char in complete_response:
111
- full_response += char
112
- yield full_response
113
- time.sleep(0.01)
114
-
115
- except Exception as e:
116
- logger.error(f"LangGraph Error: {e}")
117
- yield f"⚠ Error: {type(e).__name__} — {str(e)}"
118
-
119
-
120
- # === Gradio UI ===
121
- chatbot = GeminiChatbot()
122
- sessions = load_all_sessions()
123
-
124
-
125
- def launch_interface():
126
- with gr.Blocks(
127
- theme=gr.themes.Base(),
128
- css="""
129
- body {
130
- background-color: black;
131
- }
132
- .gr-block.gr-textbox textarea {
133
- background-color: #2f2f2f;
134
- color: white;
135
- }
136
- .gr-chatbot {
137
- background-color: #2f2f2f;
138
- color: white;
139
- }
140
- .gr-button, .gr-dropdown {
141
- margin: 5px auto;
142
- display: block;
143
- width: 50%;
144
- }
145
- .gr-markdown h2 {
146
- text-align: center;
147
- color: white;
148
- }
149
- """
150
- ) as demo:
151
- demo.title = "LangChain Powered ChatBot"
152
- gr.Markdown("## LangChain Powered ChatBot")
153
-
154
- current_thread_id = gr.State()
155
- session_names = gr.State()
156
- history = gr.State([])
157
-
158
- # Initialize with first session or create new
159
- if not sessions:
160
- new_id = str(uuid.uuid4())
161
- sessions[new_id] = []
162
- save_all_sessions(sessions)
163
- current_thread_id.value = new_id
164
- session_names.value = [f"NEW: {new_id}"]
165
- else:
166
- current_thread_id.value = next(iter(sessions.keys()))
167
- session_names.value = [f"PREVIOUS: {k}" for k in sessions.keys()]
168
-
169
- def get_dropdown_choices():
170
- """Get current dropdown choices including active sessions and new chat"""
171
- choices = []
172
- for session_id in sessions:
173
- if sessions[session_id]: # Only show sessions with history
174
- choices.append(f"PREVIOUS: {session_id}")
175
- choices.append(f"NEW: {current_thread_id.value}")
176
- return choices
177
-
178
- with gr.Column():
179
- new_chat_btn = gr.Button("New Chat", variant="primary")
180
- session_selector = gr.Dropdown(
181
- label="Chats",
182
- choices=get_dropdown_choices(),
183
- value=f"NEW: {current_thread_id.value}",
184
- interactive=True
185
- )
186
-
187
- chatbot_ui = gr.Chatbot(label="Conversation", height=320)
188
-
189
- with gr.Row():
190
- msg = gr.Textbox(placeholder="Ask a question...", container=False, scale=9)
191
- send = gr.Button("Send", variant="primary", scale=1)
192
-
193
- clear = gr.Button("Clear Current Chat")
194
-
195
- def start_new_chat():
196
- new_id = str(uuid.uuid4())
197
- sessions[new_id] = []
198
- save_all_sessions(sessions)
199
-
200
- # Format for dropdown
201
- display_name = f"NEW: {new_id}"
202
- updated_choices = [f"PREVIOUS: {k}" for k in sessions if sessions[k]] + [display_name]
203
-
204
- return (
205
- new_id, # thread ID state
206
- [], # history
207
- gr.update(choices=updated_choices, value=display_name), # update dropdown
208
- display_name # visible value
209
- )
210
-
211
-
212
-
213
- def switch_chat(selected_display_id):
214
- """Switch between different chat sessions"""
215
- if not selected_display_id:
216
- return current_thread_id.value, [], ""
217
-
218
- true_id = selected_display_id.split(": ", 1)[-1]
219
- chat_history = sessions.get(true_id, [])
220
- return true_id, chat_history, selected_display_id
221
-
222
- def respond(message, history, thread_id):
223
- """Generate response and update chat history"""
224
- if not message.strip():
225
- yield history
226
- return
227
-
228
- # Add user message to history
229
- history.append((message, ""))
230
- yield history
231
-
232
- # Stream response
233
- full_response = ""
234
- for chunk in chatbot.get_response(message, history[:-1], thread_id):
235
- full_response = chunk
236
- history[-1] = (message, full_response)
237
- yield history
238
-
239
- # Save updated session
240
- sessions[thread_id] = history
241
- save_all_sessions(sessions)
242
-
243
- def clear_current(thread_id):
244
- """Clear current chat history"""
245
- sessions[thread_id] = []
246
- save_all_sessions(sessions)
247
- return []
248
-
249
- new_chat_btn.click(
250
- start_new_chat,
251
- outputs=[current_thread_id, chatbot_ui, session_selector, session_selector]
252
- )
253
-
254
- session_selector.change(
255
- switch_chat,
256
- inputs=session_selector,
257
- outputs=[current_thread_id, chatbot_ui, session_selector]
258
- )
259
-
260
- send.click(
261
- respond,
262
- inputs=[msg, chatbot_ui, current_thread_id],
263
- outputs=[chatbot_ui]
264
- ).then(
265
- lambda: "", None, msg # Clear input after sending
266
- )
267
-
268
- msg.submit(
269
- respond,
270
- inputs=[msg, chatbot_ui, current_thread_id],
271
- outputs=[chatbot_ui]
272
- ).then(
273
- lambda: "", None, msg # Clear input after sending
274
- )
275
-
276
- clear.click(
277
- clear_current,
278
- inputs=[current_thread_id],
279
- outputs=[chatbot_ui]
280
- )
281
-
282
- return demo
283
-
284
-
285
-
286
- # === Run App ===
287
- if __name__ == "__main__":
288
- try:
289
- demo = launch_interface()
290
- demo.launch(share=True)
291
- except Exception as e:
292
- logger.critical(f"App failed: {e}")
 
1
+ import os
2
+ import uuid
3
+
4
+ import google.generativeai as genai
5
+
6
+ from langgraph.graph import START, MessagesState, StateGraph
7
+ from langgraph.checkpoint.memory import MemorySaver
8
+
9
+ from langchain_core.messages import HumanMessage, AIMessage
10
+ from langchain_core.prompts.chat import (ChatPromptTemplate, SystemMessagePromptTemplate, MessagesPlaceholder, HumanMessagePromptTemplate,)
11
+ from langchain_google_genai import ChatGoogleGenerativeAI
12
+
13
+ import gradio as gr
14
+
15
+ import time
16
+ import json
17
+
18
+ import logging
19
+ from dotenv import load_dotenv
20
+
21
+
22
+ logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s")
23
+ logger = logging.getLogger(__name__)
24
+ load_dotenv()
25
+
26
+ GEMINI_API_KEY = os.getenv("GEMINI_API_KEY")
27
+ if not GEMINI_API_KEY:
28
+ raise ValueError("Missing GEMINI_API_KEY")
29
+ genai.configure(api_key=GEMINI_API_KEY)
30
+
31
+ HISTORY_FILE = "chat_history.json"
32
+
33
+
34
+ def load_all_sessions():
35
+ if os.path.exists(HISTORY_FILE):
36
+ with open(HISTORY_FILE, "r", encoding="utf-8") as f:
37
+ return json.load(f)
38
+ return {}
39
+
40
+ def save_all_sessions(sessions):
41
+ with open(HISTORY_FILE, "w", encoding="utf-8") as f:
42
+ json.dump(sessions, f, indent=2)
43
+
44
+ class GeminiChatbot:
45
+ def __init__(self):
46
+ self.setup_model()
47
+
48
+ def setup_model(self):
49
+ system_template = """
50
+ You are a helpful, respectful and honest assistant. Always answer as helpfully as possible, while being safe.
51
+ Your answers should be informative, engaging, and accurate. If a question doesn't make any sense, or isn't factually coherent, explain why instead of answering something not correct.
52
+ If you don't know the answer to a question, please don't share false information.
53
+ """
54
+
55
+ self.prompt = ChatPromptTemplate.from_messages([
56
+ SystemMessagePromptTemplate.from_template(system_template),
57
+ MessagesPlaceholder(variable_name="chat_history"),
58
+ HumanMessagePromptTemplate.from_template("{input}")
59
+ ])
60
+
61
+ self.model = ChatGoogleGenerativeAI(
62
+ model="gemini-2.0-flash",
63
+ temperature=0.7,
64
+ top_p=0.95,
65
+ google_api_key=GEMINI_API_KEY,
66
+ convert_system_message_to_human=True
67
+ )
68
+
69
+ def call_model(state: MessagesState):
70
+ chat_history = state["messages"][:-1]
71
+ user_input = state["messages"][-1].content
72
+
73
+ formatted_messages = self.prompt.format_messages(
74
+ chat_history=chat_history,
75
+ input=user_input
76
+ )
77
+
78
+ response = self.model.invoke(formatted_messages)
79
+ return {"messages": response}
80
+
81
+ workflow = StateGraph(state_schema=MessagesState)
82
+ workflow.add_node("model", call_model)
83
+ workflow.add_edge(START, "model")
84
+
85
+ self.memory = MemorySaver()
86
+ self.app = workflow.compile(checkpointer=self.memory)
87
+
88
+ def get_response(self, user_message, history, thread_id):
89
+ try:
90
+ langchain_history = []
91
+ for user, bot in history:
92
+ langchain_history.append(HumanMessage(content=user))
93
+ langchain_history.append(AIMessage(content=bot))
94
+
95
+ input_message = HumanMessage(content=user_message)
96
+ full_history = langchain_history + [input_message]
97
+
98
+ full_response = ""
99
+ config = {"configurable": {"thread_id": thread_id}}
100
+
101
+ response = self.app.invoke({"messages": full_history}, config)
102
+ complete_response = response["messages"][-1].content
103
+
104
+ for char in complete_response:
105
+ full_response += char
106
+ yield full_response
107
+ time.sleep(0.01)
108
+
109
+ except Exception as e:
110
+ logger.error(f"LangGraph Error: {e}")
111
+ yield f"Error: {type(e).__name__} — {str(e)}"
112
+
113
+
114
+ chatbot = GeminiChatbot()
115
+ sessions = load_all_sessions()
116
+
117
+
118
+ def launch_interface():
119
+ with gr.Blocks(
120
+ theme=gr.themes.Base(),
121
+ css="""
122
+ body {
123
+ background-color: black;
124
+ }
125
+ .gr-block.gr-textbox textarea {
126
+ background-color: #2f2f2f;
127
+ color: white;
128
+ }
129
+ .gr-chatbot {
130
+ background-color: #2f2f2f;
131
+ color: white;
132
+ }
133
+ .gr-button, .gr-dropdown {
134
+ margin: 5px auto;
135
+ display: block;
136
+ width: 50%;
137
+ }
138
+ .gr-markdown h2 {
139
+ text-align: center;
140
+ color: white;
141
+ }
142
+ """
143
+ ) as demo:
144
+ demo.title = "LangChain Powered ChatBot"
145
+ gr.Markdown("## LangChain Powered ChatBot")
146
+
147
+ current_thread_id = gr.State()
148
+ session_names = gr.State()
149
+ history = gr.State([])
150
+
151
+ if not sessions:
152
+ new_id = str(uuid.uuid4())
153
+ sessions[new_id] = []
154
+ save_all_sessions(sessions)
155
+ current_thread_id.value = new_id
156
+ session_names.value = [f"NEW: {new_id}"]
157
+ else:
158
+ current_thread_id.value = next(iter(sessions.keys()))
159
+ session_names.value = [f"PREVIOUS: {k}" for k in sessions.keys()]
160
+
161
+ def get_dropdown_choices():
162
+ choices = []
163
+ for session_id in sessions:
164
+ if sessions[session_id]:
165
+ choices.append(f"PREVIOUS: {session_id}")
166
+ choices.append(f"NEW: {current_thread_id.value}")
167
+ return choices
168
+
169
+ with gr.Column():
170
+ new_chat_btn = gr.Button("New Chat", variant="primary")
171
+ session_selector = gr.Dropdown(
172
+ label="Chats",
173
+ choices=get_dropdown_choices(),
174
+ value=f"NEW: {current_thread_id.value}",
175
+ interactive=True
176
+ )
177
+
178
+ chatbot_ui = gr.Chatbot(label="Conversation", height=320)
179
+
180
+ with gr.Row():
181
+ msg = gr.Textbox(placeholder="Ask a question...", container=False, scale=9)
182
+ send = gr.Button("Send", variant="primary", scale=1)
183
+
184
+ clear = gr.Button("Clear Current Chat")
185
+
186
+ def start_new_chat():
187
+ new_id = str(uuid.uuid4())
188
+ sessions[new_id] = []
189
+ save_all_sessions(sessions)
190
+
191
+ display_name = f"NEW: {new_id}"
192
+ updated_choices = [f"PREVIOUS: {k}" for k in sessions if sessions[k]] + [display_name]
193
+
194
+ return (
195
+ new_id,
196
+ [],
197
+ gr.update(choices=updated_choices, value=display_name),
198
+ display_name
199
+ )
200
+
201
+
202
+
203
+ def switch_chat(selected_display_id):
204
+ if not selected_display_id:
205
+ return current_thread_id.value, [], ""
206
+
207
+ true_id = selected_display_id.split(": ", 1)[-1]
208
+ chat_history = sessions.get(true_id, [])
209
+ return true_id, chat_history, selected_display_id
210
+
211
+ def respond(message, history, thread_id):
212
+ if not message.strip():
213
+ yield history
214
+ return
215
+
216
+ history.append((message, ""))
217
+ yield history
218
+
219
+ full_response = ""
220
+ for chunk in chatbot.get_response(message, history[:-1], thread_id):
221
+ full_response = chunk
222
+ history[-1] = (message, full_response)
223
+ yield history
224
+
225
+ sessions[thread_id] = history
226
+ save_all_sessions(sessions)
227
+
228
+ def clear_current(thread_id):
229
+ sessions[thread_id] = []
230
+ save_all_sessions(sessions)
231
+ return []
232
+
233
+ new_chat_btn.click(
234
+ start_new_chat,
235
+ outputs=[current_thread_id, chatbot_ui, session_selector, session_selector]
236
+ )
237
+
238
+ session_selector.change(
239
+ switch_chat,
240
+ inputs=session_selector,
241
+ outputs=[current_thread_id, chatbot_ui, session_selector]
242
+ )
243
+
244
+ send.click(
245
+ respond,
246
+ inputs=[msg, chatbot_ui, current_thread_id],
247
+ outputs=[chatbot_ui]
248
+ ).then(
249
+ lambda: "", None, msg
250
+ )
251
+
252
+ msg.submit(
253
+ respond,
254
+ inputs=[msg, chatbot_ui, current_thread_id],
255
+ outputs=[chatbot_ui]
256
+ ).then(
257
+ lambda: "", None, msg
258
+ )
259
+
260
+ clear.click(
261
+ clear_current,
262
+ inputs=[current_thread_id],
263
+ outputs=[chatbot_ui]
264
+ )
265
+
266
+ return demo
267
+
268
+
269
+
270
+ if __name__ == "__main__":
271
+ try:
272
+ demo = launch_interface()
273
+ demo.launch(share=True)
274
+ except Exception as e:
275
+ logger.critical(f"App failed: {e}")