mgbam commited on
Commit
a1bb249
Β·
verified Β·
1 Parent(s): 0fe5206

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +301 -0
app.py ADDED
@@ -0,0 +1,301 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # app.py
2
+ # Advanced Hugging Face Space – Multi-Agent Chatbot
3
+ #
4
+ # Developed with a forward-looking vision, inspired by Bill Gates’ drive for technological innovation.
5
+ # This app leverages LangGraph, DeepSeek-R1 (via text-based function calling), and Agentic RAG to deliver
6
+ # a multi-agent chatbot capable of autonomous reasoning and action.
7
+ #
8
+ # To deploy:
9
+ # 1. Add your API keys to the Hugging Face Space secrets (e.g., DEEPSEEK_API_KEY).
10
+ # 2. Install dependencies via a requirements.txt file.
11
+ # 3. Enjoy a robust and scalable AI assistant for research and development.
12
+
13
+ import os
14
+ import re
15
+ import logging
16
+ import streamlit as st
17
+ import requests
18
+ from typing import Sequence
19
+ from typing_extensions import TypedDict, Annotated
20
+
21
+ from langchain_openai import OpenAIEmbeddings
22
+ from langchain_community.vectorstores import Chroma
23
+ from langchain_core.messages import HumanMessage, AIMessage, ToolMessage
24
+ from langchain.text_splitter import RecursiveCharacterTextSplitter
25
+ from langchain.tools.retriever import create_retriever_tool
26
+ from langgraph.graph import END, StateGraph, START
27
+ from langgraph.prebuilt import ToolNode
28
+ from langgraph.graph.message import add_messages
29
+
30
+ # Configure logging for better observability
31
+ logging.basicConfig(level=logging.INFO)
32
+ logger = logging.getLogger(__name__)
33
+
34
+ # --- Dummy Data Setup ---
35
+ research_texts = [
36
+ "Research Report: Results of a New AI Model Improving Image Recognition Accuracy to 98%",
37
+ "Academic Paper Summary: Why Transformers Became the Mainstream Architecture in Natural Language Processing",
38
+ "Latest Trends in Machine Learning Methods Using Quantum Computing"
39
+ ]
40
+
41
+ development_texts = [
42
+ "Project A: UI Design Completed, API Integration in Progress",
43
+ "Project B: Testing New Feature X, Bug Fixes Needed",
44
+ "Product Y: In the Performance Optimization Stage Before Release"
45
+ ]
46
+
47
+ # --- Preprocessing & Embeddings ---
48
+ splitter = RecursiveCharacterTextSplitter(chunk_size=100, chunk_overlap=10)
49
+ research_docs = splitter.create_documents(research_texts)
50
+ development_docs = splitter.create_documents(development_texts)
51
+
52
+ embeddings = OpenAIEmbeddings(
53
+ model="text-embedding-3-large"
54
+ )
55
+
56
+ research_vectorstore = Chroma.from_documents(
57
+ documents=research_docs,
58
+ embedding=embeddings,
59
+ collection_name="research_collection"
60
+ )
61
+ development_vectorstore = Chroma.from_documents(
62
+ documents=development_docs,
63
+ embedding=embeddings,
64
+ collection_name="development_collection"
65
+ )
66
+
67
+ research_retriever = research_vectorstore.as_retriever()
68
+ development_retriever = development_vectorstore.as_retriever()
69
+
70
+ research_tool = create_retriever_tool(
71
+ research_retriever,
72
+ "research_db_tool",
73
+ "Search information from the research database."
74
+ )
75
+ development_tool = create_retriever_tool(
76
+ development_retriever,
77
+ "development_db_tool",
78
+ "Search information from the development database."
79
+ )
80
+ tools = [research_tool, development_tool]
81
+
82
+ # --- Agent and Workflow Functions ---
83
+ class AgentState(TypedDict):
84
+ messages: Annotated[Sequence[AIMessage | HumanMessage | ToolMessage], add_messages]
85
+
86
+ def agent(state: AgentState):
87
+ logger.info("Agent invoked")
88
+ messages = state["messages"]
89
+ user_message = messages[0][1] if isinstance(messages[0], tuple) else messages[0].content
90
+
91
+ prompt = f"""Given this user question: "{user_message}"
92
+ If it's about research or academic topics, respond EXACTLY in this format:
93
+ SEARCH_RESEARCH: <search terms>
94
+
95
+ If it's about development status, respond EXACTLY in this format:
96
+ SEARCH_DEV: <search terms>
97
+
98
+ Otherwise, just answer directly.
99
+ """
100
+ headers = {
101
+ "Accept": "application/json",
102
+ "Authorization": f"Bearer {os.environ.get('DEEPSEEK_API_KEY')}",
103
+ "Content-Type": "application/json"
104
+ }
105
+ data = {
106
+ "model": "deepseek-chat",
107
+ "messages": [{"role": "user", "content": prompt}],
108
+ "temperature": 0.7,
109
+ "max_tokens": 1024
110
+ }
111
+
112
+ response = requests.post(
113
+ "https://api.deepseek.com/v1/chat/completions",
114
+ headers=headers,
115
+ json=data,
116
+ verify=False
117
+ )
118
+ if response.status_code == 200:
119
+ response_text = response.json()['choices'][0]['message']['content']
120
+ logger.info(f"DeepSeek response: {response_text}")
121
+ if "SEARCH_RESEARCH:" in response_text:
122
+ query = response_text.split("SEARCH_RESEARCH:")[1].strip()
123
+ results = research_retriever.invoke(query)
124
+ return {"messages": [AIMessage(content=f'Action: research_db_tool\n{{"query": "{query}"}}\n\nResults: {str(results)}')]}
125
+ elif "SEARCH_DEV:" in response_text:
126
+ query = response_text.split("SEARCH_DEV:")[1].strip()
127
+ results = development_retriever.invoke(query)
128
+ return {"messages": [AIMessage(content=f'Action: development_db_tool\n{{"query": "{query}"}}\n\nResults: {str(results)}')]}
129
+ else:
130
+ return {"messages": [AIMessage(content=response_text)]}
131
+ else:
132
+ error_msg = f"DeepSeek API call failed: {response.text}"
133
+ logger.error(error_msg)
134
+ raise Exception(error_msg)
135
+
136
+ def simple_grade_documents(state: AgentState):
137
+ last_message = state["messages"][-1]
138
+ logger.info(f"Grading message: {last_message.content}")
139
+ if "Results: [Document" in last_message.content:
140
+ return "generate"
141
+ else:
142
+ return "rewrite"
143
+
144
+ def generate(state: AgentState):
145
+ logger.info("Generating final answer")
146
+ messages = state["messages"]
147
+ question = messages[0].content if not isinstance(messages[0], tuple) else messages[0][1]
148
+ last_message = messages[-1]
149
+ docs = ""
150
+ if "Results: [" in last_message.content:
151
+ docs = last_message.content[last_message.content.find("Results: ["):]
152
+ headers = {
153
+ "Accept": "application/json",
154
+ "Authorization": f"Bearer {os.environ.get('DEEPSEEK_API_KEY')}",
155
+ "Content-Type": "application/json"
156
+ }
157
+ prompt = f"""Based on these research documents, summarize the latest advancements in AI:
158
+ Question: {question}
159
+ Documents: {docs}
160
+ Focus on extracting and synthesizing the key findings from the research papers.
161
+ """
162
+ data = {
163
+ "model": "deepseek-chat",
164
+ "messages": [{"role": "user", "content": prompt}],
165
+ "temperature": 0.7,
166
+ "max_tokens": 1024
167
+ }
168
+ response = requests.post(
169
+ "https://api.deepseek.com/v1/chat/completions",
170
+ headers=headers,
171
+ json=data,
172
+ verify=False
173
+ )
174
+ if response.status_code == 200:
175
+ response_text = response.json()['choices'][0]['message']['content']
176
+ return {"messages": [AIMessage(content=response_text)]}
177
+ else:
178
+ error_msg = f"DeepSeek API generate call failed: {response.text}"
179
+ logger.error(error_msg)
180
+ raise Exception(error_msg)
181
+
182
+ def rewrite(state: AgentState):
183
+ logger.info("Rewriting question")
184
+ original_question = state["messages"][0].content if state["messages"] else "N/A"
185
+ headers = {
186
+ "Accept": "application/json",
187
+ "Authorization": f"Bearer {os.environ.get('DEEPSEEK_API_KEY')}",
188
+ "Content-Type": "application/json"
189
+ }
190
+ data = {
191
+ "model": "deepseek-chat",
192
+ "messages": [{"role": "user", "content": f"Rewrite this question to be more specific and clearer: {original_question}"}],
193
+ "temperature": 0.7,
194
+ "max_tokens": 1024
195
+ }
196
+ response = requests.post(
197
+ "https://api.deepseek.com/v1/chat/completions",
198
+ headers=headers,
199
+ json=data,
200
+ verify=False
201
+ )
202
+ if response.status_code == 200:
203
+ response_text = response.json()['choices'][0]['message']['content']
204
+ return {"messages": [AIMessage(content=response_text)]}
205
+ else:
206
+ error_msg = f"DeepSeek API rewrite call failed: {response.text}"
207
+ logger.error(error_msg)
208
+ raise Exception(error_msg)
209
+
210
+ tools_pattern = re.compile(r"Action: .*")
211
+ def custom_tools_condition(state: AgentState):
212
+ last_message = state["messages"][-1]
213
+ if tools_pattern.match(last_message.content):
214
+ return "tools"
215
+ return END
216
+
217
+ # Build the workflow using LangGraph's StateGraph
218
+ workflow = StateGraph(AgentState)
219
+ workflow.add_node("agent", agent)
220
+ retrieve_node = ToolNode(tools)
221
+ workflow.add_node("retrieve", retrieve_node)
222
+ workflow.add_node("rewrite", rewrite)
223
+ workflow.add_node("generate", generate)
224
+ workflow.add_edge(START, "agent")
225
+ workflow.add_conditional_edges("agent", custom_tools_condition, {"tools": "retrieve", END: END})
226
+ workflow.add_conditional_edges("retrieve", simple_grade_documents)
227
+ workflow.add_edge("generate", END)
228
+ workflow.add_edge("rewrite", "agent")
229
+ app_workflow = workflow.compile()
230
+
231
+ def process_question(user_question, app, config):
232
+ events = []
233
+ for event in app.stream({"messages": [("user", user_question)]}, config):
234
+ events.append(event)
235
+ return events
236
+
237
+ # --- Streamlit UI ---
238
+ def main():
239
+ st.set_page_config(page_title="Advanced AI R&D Assistant", layout="wide", initial_sidebar_state="expanded")
240
+ st.markdown(
241
+ """
242
+ <style>
243
+ .stApp { background-color: #f8f9fa; }
244
+ .stButton > button { width: 100%; margin-top: 20px; }
245
+ .data-box { padding: 20px; border-radius: 10px; margin: 10px 0; }
246
+ .research-box { background-color: #e3f2fd; border-left: 5px solid #1976d2; }
247
+ .dev-box { background-color: #e8f5e9; border-left: 5px solid #43a047; }
248
+ </style>
249
+ """, unsafe_allow_html=True
250
+ )
251
+
252
+ # Sidebar: Display available data
253
+ with st.sidebar:
254
+ st.header("πŸ“š Available Data")
255
+ st.subheader("Research Database")
256
+ for text in research_texts:
257
+ st.markdown(f'<div class="data-box research-box">{text}</div>', unsafe_allow_html=True)
258
+ st.subheader("Development Database")
259
+ for text in development_texts:
260
+ st.markdown(f'<div class="data-box dev-box">{text}</div>', unsafe_allow_html=True)
261
+
262
+ st.title("πŸ€– Advanced AI R&D Assistant")
263
+ st.markdown("---")
264
+ query = st.text_area("Enter your question:", height=100, placeholder="e.g., What is the latest advancement in AI research?")
265
+
266
+ col1, col2 = st.columns([1, 2])
267
+ with col1:
268
+ if st.button("πŸ” Get Answer", use_container_width=True):
269
+ if query:
270
+ with st.spinner('Processing your question...'):
271
+ events = process_question(query, app_workflow, {"configurable": {"thread_id": "1"}})
272
+ for event in events:
273
+ if 'agent' in event:
274
+ with st.expander("πŸ”„ Processing Step", expanded=True):
275
+ content = event['agent']['messages'][0].content
276
+ if "Results:" in content:
277
+ st.markdown("### πŸ“‘ Retrieved Documents:")
278
+ docs = content[content.find("Results:"):]
279
+ st.info(docs)
280
+ elif 'generate' in event:
281
+ st.markdown("### ✨ Final Answer:")
282
+ st.success(event['generate']['messages'][0].content)
283
+ else:
284
+ st.warning("⚠️ Please enter a question first!")
285
+ with col2:
286
+ st.markdown(
287
+ """
288
+ ### 🎯 How to Use
289
+ 1. Type your question in the text box.
290
+ 2. Click "Get Answer" to process.
291
+ 3. View retrieved documents and the final answer.
292
+
293
+ ### πŸ’‘ Example Questions
294
+ - What are the latest advancements in AI research?
295
+ - What is the status of Project A?
296
+ - What are the current trends in machine learning?
297
+ """
298
+ )
299
+
300
+ if __name__ == "__main__":
301
+ main()