Update app.py
Browse files
app.py
CHANGED
@@ -21,23 +21,38 @@ from langchain_google_genai import ChatGoogleGenerativeAI
|
|
21 |
logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s")
|
22 |
logger = logging.getLogger(__name__)
|
23 |
|
24 |
-
# ===
|
25 |
-
|
26 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
27 |
|
28 |
# === Chat Storage ===
|
29 |
HISTORY_FILE = "chat_history.json"
|
30 |
|
31 |
def load_all_sessions():
|
32 |
-
|
33 |
-
|
34 |
-
|
|
|
|
|
|
|
35 |
return {}
|
36 |
|
37 |
def save_all_sessions(sessions):
|
38 |
-
|
39 |
-
|
|
|
|
|
|
|
40 |
|
|
|
41 |
sessions = load_all_sessions()
|
42 |
|
43 |
# === Gemini Chatbot ===
|
@@ -58,20 +73,31 @@ class GeminiChatbot:
|
|
58 |
HumanMessagePromptTemplate.from_template("{input}")
|
59 |
])
|
60 |
|
61 |
-
|
62 |
-
model=
|
63 |
-
|
64 |
-
|
65 |
-
|
66 |
-
|
67 |
-
|
|
|
|
|
|
|
|
|
68 |
|
69 |
def call_model(state: MessagesState):
|
70 |
-
|
71 |
-
|
72 |
-
|
73 |
-
|
74 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
75 |
|
76 |
workflow = StateGraph(state_schema=MessagesState)
|
77 |
workflow.add_node("model", call_model)
|
@@ -94,6 +120,7 @@ class GeminiChatbot:
|
|
94 |
response = self.app.invoke({"messages": full_history}, config)
|
95 |
full_text = response["messages"][-1].content
|
96 |
|
|
|
97 |
full_response = ""
|
98 |
for char in full_text:
|
99 |
full_response += char
|
@@ -102,106 +129,151 @@ class GeminiChatbot:
|
|
102 |
|
103 |
except Exception as e:
|
104 |
logger.error(f"Response error: {e}")
|
105 |
-
yield f"⚠
|
106 |
|
107 |
-
|
|
|
|
|
|
|
|
|
|
|
108 |
|
109 |
# === Gradio UI ===
|
110 |
def launch_interface():
|
111 |
with gr.Blocks(
|
112 |
theme=gr.themes.Base(),
|
113 |
css="""
|
114 |
-
body { background-color:
|
115 |
-
.gr-
|
116 |
-
.gr-
|
117 |
-
.gr-
|
118 |
-
|
119 |
-
|
120 |
-
|
121 |
}
|
122 |
-
.gr-markdown h2 { text-align: center;
|
123 |
"""
|
124 |
) as demo:
|
125 |
demo.title = "LangChain Powered ChatBot"
|
126 |
gr.Markdown("## LangChain Powered ChatBot")
|
127 |
|
|
|
128 |
current_thread_id = gr.State()
|
129 |
-
session_names = gr.State()
|
130 |
history = gr.State([])
|
131 |
-
|
|
|
132 |
if not sessions:
|
133 |
new_id = str(uuid.uuid4())
|
134 |
sessions[new_id] = []
|
135 |
save_all_sessions(sessions)
|
136 |
current_thread_id.value = new_id
|
137 |
-
session_names.value = [f"NEW: {new_id}"]
|
138 |
else:
|
139 |
current_thread_id.value = next(iter(sessions))
|
140 |
-
session_names.value = [f"PREVIOUS: {k}" for k in sessions if sessions[k]]
|
141 |
|
142 |
def get_dropdown_choices():
|
143 |
-
|
144 |
-
|
145 |
-
|
146 |
-
|
147 |
-
|
148 |
-
choices
|
149 |
-
|
150 |
-
|
151 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
152 |
|
153 |
-
chatbot_ui = gr.Chatbot(label="Conversation", height=
|
154 |
|
155 |
with gr.Row():
|
156 |
-
msg = gr.Textbox(placeholder="
|
157 |
-
|
158 |
|
159 |
-
|
160 |
|
161 |
def start_new_chat():
|
162 |
new_id = str(uuid.uuid4())
|
163 |
sessions[new_id] = []
|
164 |
save_all_sessions(sessions)
|
165 |
-
|
166 |
-
|
167 |
-
return new_id, [], gr.update(choices=updated, value=display), display
|
168 |
|
169 |
-
def switch_chat(
|
170 |
-
|
171 |
-
|
|
|
|
|
172 |
|
173 |
-
def respond(message,
|
174 |
if not message.strip():
|
175 |
-
yield
|
176 |
return
|
177 |
-
|
178 |
-
|
179 |
-
|
180 |
-
|
181 |
-
|
182 |
-
|
183 |
-
|
184 |
-
|
|
|
|
|
|
|
185 |
save_all_sessions(sessions)
|
|
|
|
|
|
|
|
|
|
|
|
|
186 |
|
187 |
def clear_chat(thread_id):
|
188 |
sessions[thread_id] = []
|
189 |
save_all_sessions(sessions)
|
190 |
return []
|
191 |
|
192 |
-
#
|
193 |
-
new_chat_btn.click(
|
194 |
-
|
195 |
-
|
196 |
-
|
197 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
198 |
|
199 |
return demo
|
200 |
|
201 |
-
#
|
202 |
if __name__ == "__main__":
|
203 |
try:
|
204 |
demo = launch_interface()
|
205 |
-
demo.launch()
|
206 |
except Exception as e:
|
207 |
-
logger.critical(f"
|
|
|
21 |
logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s")
|
22 |
logger = logging.getLogger(__name__)
|
23 |
|
24 |
+
# === API Key Configuration ===
|
25 |
+
# Get API key from Hugging Face secrets (environment variable)
|
26 |
+
GEMINI_API_KEY = os.getenv("GEMINI_API_KEY")
|
27 |
+
if not GEMINI_API_KEY:
|
28 |
+
raise ValueError("GEMINI_API_KEY not found in environment variables. Please set it in Hugging Face secrets.")
|
29 |
+
|
30 |
+
try:
|
31 |
+
genai.configure(api_key=GEMINI_API_KEY)
|
32 |
+
except Exception as e:
|
33 |
+
logger.error(f"Failed to configure Gemini API: {e}")
|
34 |
+
raise
|
35 |
|
36 |
# === Chat Storage ===
|
37 |
HISTORY_FILE = "chat_history.json"
|
38 |
|
39 |
def load_all_sessions():
|
40 |
+
try:
|
41 |
+
if os.path.exists(HISTORY_FILE):
|
42 |
+
with open(HISTORY_FILE, "r", encoding="utf-8") as f:
|
43 |
+
return json.load(f)
|
44 |
+
except Exception as e:
|
45 |
+
logger.error(f"Error loading sessions: {e}")
|
46 |
return {}
|
47 |
|
48 |
def save_all_sessions(sessions):
|
49 |
+
try:
|
50 |
+
with open(HISTORY_FILE, "w", encoding="utf-8") as f:
|
51 |
+
json.dump(sessions, f, indent=2)
|
52 |
+
except Exception as e:
|
53 |
+
logger.error(f"Error saving sessions: {e}")
|
54 |
|
55 |
+
# Initialize sessions
|
56 |
sessions = load_all_sessions()
|
57 |
|
58 |
# === Gemini Chatbot ===
|
|
|
73 |
HumanMessagePromptTemplate.from_template("{input}")
|
74 |
])
|
75 |
|
76 |
+
try:
|
77 |
+
self.model = ChatGoogleGenerativeAI(
|
78 |
+
model="gemini-1.5-flash", # Updated to more stable model
|
79 |
+
temperature=0.7,
|
80 |
+
top_p=0.95,
|
81 |
+
google_api_key=GEMINI_API_KEY,
|
82 |
+
convert_system_message_to_human=True
|
83 |
+
)
|
84 |
+
except Exception as e:
|
85 |
+
logger.error(f"Failed to initialize Gemini model: {e}")
|
86 |
+
raise
|
87 |
|
88 |
def call_model(state: MessagesState):
|
89 |
+
try:
|
90 |
+
chat_history = state["messages"][:-1]
|
91 |
+
user_input = state["messages"][-1].content
|
92 |
+
formatted_messages = self.prompt.format_messages(
|
93 |
+
chat_history=chat_history,
|
94 |
+
input=user_input
|
95 |
+
)
|
96 |
+
response = self.model.invoke(formatted_messages)
|
97 |
+
return {"messages": response}
|
98 |
+
except Exception as e:
|
99 |
+
logger.error(f"Model invocation error: {e}")
|
100 |
+
raise
|
101 |
|
102 |
workflow = StateGraph(state_schema=MessagesState)
|
103 |
workflow.add_node("model", call_model)
|
|
|
120 |
response = self.app.invoke({"messages": full_history}, config)
|
121 |
full_text = response["messages"][-1].content
|
122 |
|
123 |
+
# Stream response character by character
|
124 |
full_response = ""
|
125 |
for char in full_text:
|
126 |
full_response += char
|
|
|
129 |
|
130 |
except Exception as e:
|
131 |
logger.error(f"Response error: {e}")
|
132 |
+
yield f"⚠ Error: {type(e).__name__} — {str(e)}"
|
133 |
|
134 |
+
# Initialize chatbot
|
135 |
+
try:
|
136 |
+
chatbot = GeminiChatbot()
|
137 |
+
except Exception as e:
|
138 |
+
logger.critical(f"Failed to initialize chatbot: {e}")
|
139 |
+
raise
|
140 |
|
141 |
# === Gradio UI ===
|
142 |
def launch_interface():
|
143 |
with gr.Blocks(
|
144 |
theme=gr.themes.Base(),
|
145 |
css="""
|
146 |
+
body { background-color: #f0f2f6; }
|
147 |
+
.gr-block { background-color: white; }
|
148 |
+
.gr-textbox textarea { background-color: white; }
|
149 |
+
.gr-chatbot { background-color: white; border-radius: 10px; }
|
150 |
+
.gr-button {
|
151 |
+
margin: 5px;
|
152 |
+
border-radius: 5px;
|
153 |
}
|
154 |
+
.gr-markdown h2 { text-align: center; }
|
155 |
"""
|
156 |
) as demo:
|
157 |
demo.title = "LangChain Powered ChatBot"
|
158 |
gr.Markdown("## LangChain Powered ChatBot")
|
159 |
|
160 |
+
# Initialize session state
|
161 |
current_thread_id = gr.State()
|
|
|
162 |
history = gr.State([])
|
163 |
+
|
164 |
+
# Initialize or load sessions
|
165 |
if not sessions:
|
166 |
new_id = str(uuid.uuid4())
|
167 |
sessions[new_id] = []
|
168 |
save_all_sessions(sessions)
|
169 |
current_thread_id.value = new_id
|
|
|
170 |
else:
|
171 |
current_thread_id.value = next(iter(sessions))
|
|
|
172 |
|
173 |
def get_dropdown_choices():
|
174 |
+
choices = []
|
175 |
+
for session_id in sessions:
|
176 |
+
if sessions[session_id]: # Only show non-empty sessions
|
177 |
+
first_msg = sessions[session_id][0][0][:20] # First message snippet
|
178 |
+
choices.append((f"Chat: {first_msg}...", session_id))
|
179 |
+
choices.append(("+ New Chat", current_thread_id.value))
|
180 |
+
return choices
|
181 |
+
|
182 |
+
# UI Components
|
183 |
+
with gr.Row():
|
184 |
+
new_chat_btn = gr.Button("+ New Chat", variant="primary")
|
185 |
+
session_selector = gr.Dropdown(
|
186 |
+
label="Your Chats",
|
187 |
+
choices=get_dropdown_choices(),
|
188 |
+
value=current_thread_id.value,
|
189 |
+
interactive=True
|
190 |
+
)
|
191 |
|
192 |
+
chatbot_ui = gr.Chatbot(label="Conversation", height=400)
|
193 |
|
194 |
with gr.Row():
|
195 |
+
msg = gr.Textbox(placeholder="Type your message...", container=False, scale=9)
|
196 |
+
send_btn = gr.Button("Send", variant="primary", scale=1)
|
197 |
|
198 |
+
clear_btn = gr.Button("Clear Current Chat")
|
199 |
|
200 |
def start_new_chat():
|
201 |
new_id = str(uuid.uuid4())
|
202 |
sessions[new_id] = []
|
203 |
save_all_sessions(sessions)
|
204 |
+
current_thread_id.value = new_id
|
205 |
+
return [], gr.Dropdown.update(choices=get_dropdown_choices(), value=new_id)
|
|
|
206 |
|
207 |
+
def switch_chat(session_id):
|
208 |
+
if session_id == current_thread_id.value:
|
209 |
+
return current_thread_id.value, sessions.get(session_id, [])
|
210 |
+
current_thread_id.value = session_id
|
211 |
+
return session_id, sessions.get(session_id, [])
|
212 |
|
213 |
+
def respond(message, chat_history, thread_id):
|
214 |
if not message.strip():
|
215 |
+
yield chat_history
|
216 |
return
|
217 |
+
|
218 |
+
chat_history.append((message, ""))
|
219 |
+
yield chat_history
|
220 |
+
|
221 |
+
full_response = ""
|
222 |
+
for chunk in chatbot.get_response(message, chat_history[:-1], thread_id):
|
223 |
+
full_response = chunk
|
224 |
+
chat_history[-1] = (message, full_response)
|
225 |
+
yield chat_history
|
226 |
+
|
227 |
+
sessions[thread_id] = chat_history
|
228 |
save_all_sessions(sessions)
|
229 |
+
|
230 |
+
# Update dropdown if this was first message
|
231 |
+
if len(chat_history) == 1:
|
232 |
+
yield chat_history, gr.Dropdown.update(choices=get_dropdown_choices())
|
233 |
+
else:
|
234 |
+
yield chat_history
|
235 |
|
236 |
def clear_chat(thread_id):
|
237 |
sessions[thread_id] = []
|
238 |
save_all_sessions(sessions)
|
239 |
return []
|
240 |
|
241 |
+
# Event handlers
|
242 |
+
new_chat_btn.click(
|
243 |
+
start_new_chat,
|
244 |
+
outputs=[chatbot_ui, session_selector]
|
245 |
+
)
|
246 |
+
|
247 |
+
session_selector.change(
|
248 |
+
switch_chat,
|
249 |
+
inputs=session_selector,
|
250 |
+
outputs=[current_thread_id, chatbot_ui]
|
251 |
+
)
|
252 |
+
|
253 |
+
send_btn.click(
|
254 |
+
respond,
|
255 |
+
inputs=[msg, chatbot_ui, current_thread_id],
|
256 |
+
outputs=[chatbot_ui, session_selector]
|
257 |
+
).then(lambda: "", None, msg)
|
258 |
+
|
259 |
+
msg.submit(
|
260 |
+
respond,
|
261 |
+
inputs=[msg, chatbot_ui, current_thread_id],
|
262 |
+
outputs=[chatbot_ui, session_selector]
|
263 |
+
).then(lambda: "", None, msg)
|
264 |
+
|
265 |
+
clear_btn.click(
|
266 |
+
clear_chat,
|
267 |
+
inputs=[current_thread_id],
|
268 |
+
outputs=[chatbot_ui]
|
269 |
+
)
|
270 |
|
271 |
return demo
|
272 |
|
273 |
+
# Run the app
|
274 |
if __name__ == "__main__":
|
275 |
try:
|
276 |
demo = launch_interface()
|
277 |
+
demo.launch(server_name="0.0.0.0", server_port=7860)
|
278 |
except Exception as e:
|
279 |
+
logger.critical(f"Application failed to start: {e}")
|