Update app.py
Browse files
app.py
CHANGED
@@ -4,9 +4,6 @@ import json
|
|
4 |
import time
|
5 |
import gradio as gr
|
6 |
import logging
|
7 |
-
|
8 |
-
|
9 |
-
|
10 |
import google.generativeai as genai
|
11 |
|
12 |
from langgraph.graph import START, MessagesState, StateGraph
|
@@ -24,19 +21,11 @@ from langchain_google_genai import ChatGoogleGenerativeAI
|
|
24 |
logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s")
|
25 |
logger = logging.getLogger(__name__)
|
26 |
|
27 |
-
#
|
28 |
-
GEMINI_API_KEY =
|
29 |
-
|
30 |
-
if not GEMINI_API_KEY:
|
31 |
-
raise RuntimeError("GEMINI_API_KEY not found in Hugging Face Secrets")
|
32 |
-
|
33 |
-
# Ensure it's accessible globally
|
34 |
os.environ["GOOGLE_API_KEY"] = GEMINI_API_KEY
|
35 |
-
|
36 |
-
# Configure Google GenAI
|
37 |
genai.configure(api_key=GEMINI_API_KEY)
|
38 |
|
39 |
-
|
40 |
# === Chat Storage ===
|
41 |
HISTORY_FILE = "chat_history.json"
|
42 |
|
@@ -52,7 +41,7 @@ def save_all_sessions(sessions):
|
|
52 |
|
53 |
sessions = load_all_sessions()
|
54 |
|
55 |
-
# === Gemini
|
56 |
class GeminiChatbot:
|
57 |
def __init__(self):
|
58 |
self.setup_model()
|
@@ -93,10 +82,7 @@ class GeminiChatbot:
|
|
93 |
self.app = workflow.compile(checkpointer=self.memory)
|
94 |
|
95 |
def get_response(self, user_message, history, thread_id):
|
96 |
-
from langchain_core.messages import HumanMessage, AIMessage
|
97 |
-
|
98 |
try:
|
99 |
-
# Format chat history for LangChain
|
100 |
langchain_history = []
|
101 |
for user, bot in history:
|
102 |
langchain_history.append(HumanMessage(content=user))
|
@@ -106,7 +92,6 @@ class GeminiChatbot:
|
|
106 |
full_history = langchain_history + [input_msg]
|
107 |
config = {"configurable": {"thread_id": thread_id}}
|
108 |
|
109 |
-
# Get final response
|
110 |
response = self.app.invoke({"messages": full_history}, config)
|
111 |
full_text = response["messages"][-1].content
|
112 |
|
@@ -120,7 +105,6 @@ class GeminiChatbot:
|
|
120 |
logger.error(f"Response error: {e}")
|
121 |
yield f"⚠ Error: {type(e).__name__} — {str(e)}"
|
122 |
|
123 |
-
|
124 |
chatbot = GeminiChatbot()
|
125 |
|
126 |
# === Gradio UI ===
|
@@ -159,7 +143,6 @@ def launch_interface():
|
|
159 |
def get_dropdown_choices():
|
160 |
return [f"PREVIOUS: {k}" for k in sessions if sessions[k]] + [f"NEW: {current_thread_id.value}"]
|
161 |
|
162 |
-
# UI
|
163 |
new_chat_btn = gr.Button("New Chat", variant="primary")
|
164 |
session_selector = gr.Dropdown(
|
165 |
label="Chats",
|
@@ -176,7 +159,6 @@ def launch_interface():
|
|
176 |
|
177 |
clear = gr.Button("Clear Current Chat")
|
178 |
|
179 |
-
# === Event Functions ===
|
180 |
def start_new_chat():
|
181 |
new_id = str(uuid.uuid4())
|
182 |
sessions[new_id] = []
|
@@ -208,7 +190,7 @@ def launch_interface():
|
|
208 |
save_all_sessions(sessions)
|
209 |
return []
|
210 |
|
211 |
-
#
|
212 |
new_chat_btn.click(start_new_chat, outputs=[current_thread_id, chatbot_ui, session_selector, session_selector])
|
213 |
session_selector.change(switch_chat, inputs=session_selector, outputs=[current_thread_id, chatbot_ui, session_selector])
|
214 |
send.click(respond, [msg, chatbot_ui, current_thread_id], [chatbot_ui]).then(lambda: "", None, msg)
|
|
|
4 |
import time
|
5 |
import gradio as gr
|
6 |
import logging
|
|
|
|
|
|
|
7 |
import google.generativeai as genai
|
8 |
|
9 |
from langgraph.graph import START, MessagesState, StateGraph
|
|
|
21 |
logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s")
|
22 |
logger = logging.getLogger(__name__)
|
23 |
|
24 |
+
# === HARD-CODED API KEY (for testing only) ===
|
25 |
+
GEMINI_API_KEY = AIzaSyCZtDtt7K6g3IgR0GsQ2pil4xsu7Jrt_lk
|
|
|
|
|
|
|
|
|
|
|
26 |
os.environ["GOOGLE_API_KEY"] = GEMINI_API_KEY
|
|
|
|
|
27 |
genai.configure(api_key=GEMINI_API_KEY)
|
28 |
|
|
|
29 |
# === Chat Storage ===
|
30 |
HISTORY_FILE = "chat_history.json"
|
31 |
|
|
|
41 |
|
42 |
sessions = load_all_sessions()
|
43 |
|
44 |
+
# === Gemini Chatbot ===
|
45 |
class GeminiChatbot:
|
46 |
def __init__(self):
|
47 |
self.setup_model()
|
|
|
82 |
self.app = workflow.compile(checkpointer=self.memory)
|
83 |
|
84 |
def get_response(self, user_message, history, thread_id):
|
|
|
|
|
85 |
try:
|
|
|
86 |
langchain_history = []
|
87 |
for user, bot in history:
|
88 |
langchain_history.append(HumanMessage(content=user))
|
|
|
92 |
full_history = langchain_history + [input_msg]
|
93 |
config = {"configurable": {"thread_id": thread_id}}
|
94 |
|
|
|
95 |
response = self.app.invoke({"messages": full_history}, config)
|
96 |
full_text = response["messages"][-1].content
|
97 |
|
|
|
105 |
logger.error(f"Response error: {e}")
|
106 |
yield f"⚠ Error: {type(e).__name__} — {str(e)}"
|
107 |
|
|
|
108 |
chatbot = GeminiChatbot()
|
109 |
|
110 |
# === Gradio UI ===
|
|
|
143 |
def get_dropdown_choices():
|
144 |
return [f"PREVIOUS: {k}" for k in sessions if sessions[k]] + [f"NEW: {current_thread_id.value}"]
|
145 |
|
|
|
146 |
new_chat_btn = gr.Button("New Chat", variant="primary")
|
147 |
session_selector = gr.Dropdown(
|
148 |
label="Chats",
|
|
|
159 |
|
160 |
clear = gr.Button("Clear Current Chat")
|
161 |
|
|
|
162 |
def start_new_chat():
|
163 |
new_id = str(uuid.uuid4())
|
164 |
sessions[new_id] = []
|
|
|
190 |
save_all_sessions(sessions)
|
191 |
return []
|
192 |
|
193 |
+
# Events
|
194 |
new_chat_btn.click(start_new_chat, outputs=[current_thread_id, chatbot_ui, session_selector, session_selector])
|
195 |
session_selector.change(switch_chat, inputs=session_selector, outputs=[current_thread_id, chatbot_ui, session_selector])
|
196 |
send.click(respond, [msg, chatbot_ui, current_thread_id], [chatbot_ui]).then(lambda: "", None, msg)
|