mgbam commited on
Commit
1e0350f
Β·
verified Β·
1 Parent(s): 1d3eda8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +285 -40
app.py CHANGED
@@ -1,50 +1,295 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import streamlit as st
 
 
 
2
 
3
- def main():
4
- st.set_page_config(
5
- page_title="Enhanced Contrast Chatbot",
6
- layout="wide",
7
- initial_sidebar_state="expanded"
8
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9
 
10
- # Custom CSS to improve text visibility
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
11
  st.markdown("""
12
- <style>
13
- /* Force a white background for the main app area */
14
- .stApp {
15
- background-color: #ffffff !important;
16
- }
17
-
18
- /* Make text darker for better contrast */
19
- html, body, [class^="css"] {
20
- color: #111111 !important;
21
- }
22
-
23
- /* Adjust label text (like "Enter your question") */
24
- .stTextArea label {
25
- color: #111111 !important;
26
- }
27
-
28
- /* Make sure sidebar text is also dark */
29
- .css-1v3fvcr {
30
- color: #111111 !important;
31
- }
32
-
33
- /* Example: You can also adjust the background color of
34
- your "data-box" classes if needed */
35
- .data-box {
36
- background-color: #f0f0f0 !important;
37
- color: #111111 !important;
38
- }
39
- </style>
40
  """, unsafe_allow_html=True)
41
 
42
- st.title("Enhanced Contrast Chatbot")
43
- st.markdown("Try typing your question below to see if the text is clearer now:")
 
 
 
 
 
 
 
 
 
 
 
44
 
45
- user_query = st.text_area("Enter your question here:")
46
- if st.button("Submit"):
47
- st.write("Your query:", user_query)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
48
 
49
  if __name__ == "__main__":
50
  main()
 
1
+ # app.py
2
+ # Ultra-Dark Advanced AI R&D Assistant
3
+ #
4
+ # In the spirit of innovation and clarity, this app is built to be robust, scalable,
5
+ # and visually striking. It leverages LangGraph, DeepSeek-R1, and local Chroma for fast, in-memory vector storage.
6
+ #
7
+ # Before deploying, make sure you set the following environment variables:
8
+ # - DEEP_SEEK_API: Your DeepSeek API key.
9
+ # - OPENAI_API_KEY: Your OpenAI API key.
10
+ #
11
+ # Written with a vision for tomorrowβ€”by someone who believes in building the future.
12
+
13
+ import os
14
+ import re
15
+ import logging
16
  import streamlit as st
17
+ import requests
18
+ from typing import Sequence
19
+ from typing_extensions import TypedDict, Annotated
20
 
21
+ # Imports for LangChain (ensure langchain-community is installed)
22
+ from langchain.embeddings.openai import OpenAIEmbeddings
23
+ from langchain.vectorstores import Chroma
24
+ from langchain.schema import HumanMessage, AIMessage
25
+ from langchain.text_splitter import RecursiveCharacterTextSplitter
26
+ from langchain.tools.retriever import create_retriever_tool
27
+
28
+ # Imports for LangGraph
29
+ from langgraph.graph import END, StateGraph, START
30
+ from langgraph.prebuilt import ToolNode
31
+ from langgraph.graph.message import add_messages
32
+
33
+ # Import Chroma settings for local storage
34
+ from chromadb.config import Settings
35
+
36
+ # Set up logging
37
+ logging.basicConfig(level=logging.INFO)
38
+ logger = logging.getLogger(__name__)
39
+
40
+ # --- Define our data ---
41
+ research_texts = [
42
+ "Research Report: Results of a New AI Model Improving Image Recognition Accuracy to 98%",
43
+ "Academic Paper Summary: Why Transformers Became the Mainstream Architecture in Natural Language Processing",
44
+ "Latest Trends in Machine Learning Methods Using Quantum Computing"
45
+ ]
46
+ development_texts = [
47
+ "Project A: UI Design Completed, API Integration in Progress",
48
+ "Project B: Testing New Feature X, Bug Fixes Needed",
49
+ "Product Y: In the Performance Optimization Stage Before Release"
50
+ ]
51
+
52
+ # --- Preprocess and create embeddings ---
53
+ splitter = RecursiveCharacterTextSplitter(chunk_size=100, chunk_overlap=10)
54
+ research_docs = splitter.create_documents(research_texts)
55
+ development_docs = splitter.create_documents(development_texts)
56
+
57
+ # Initialize embeddings with your OpenAI API key
58
+ embeddings = OpenAIEmbeddings(
59
+ model="text-embedding-3-large",
60
+ openai_api_key=os.environ.get("OPENAI_API_KEY")
61
+ )
62
+
63
+ # Use local in-memory settings to avoid tenant issues
64
+ client_settings = Settings(
65
+ chroma_api_impl="local",
66
+ persist_directory=None # Set to a directory like ".chroma" if persistence is needed
67
+ )
68
+
69
+ research_vectorstore = Chroma.from_documents(
70
+ documents=research_docs,
71
+ embedding=embeddings,
72
+ collection_name="research_collection",
73
+ client_settings=client_settings
74
+ )
75
+ development_vectorstore = Chroma.from_documents(
76
+ documents=development_docs,
77
+ embedding=embeddings,
78
+ collection_name="development_collection",
79
+ client_settings=client_settings
80
+ )
81
+
82
+ research_retriever = research_vectorstore.as_retriever()
83
+ development_retriever = development_vectorstore.as_retriever()
84
+
85
+ research_tool = create_retriever_tool(
86
+ research_retriever,
87
+ "research_db_tool",
88
+ "Search information from the research database."
89
+ )
90
+ development_tool = create_retriever_tool(
91
+ development_retriever,
92
+ "development_db_tool",
93
+ "Search information from the development database."
94
+ )
95
+ tools = [research_tool, development_tool]
96
+
97
+ # --- Define our agent and workflow functions ---
98
+ class AgentState(TypedDict):
99
+ messages: Annotated[Sequence[AIMessage | HumanMessage], add_messages]
100
 
101
+ def agent(state: AgentState):
102
+ logger.info("Agent invoked")
103
+ messages = state["messages"]
104
+ user_message = messages[0][1] if isinstance(messages[0], tuple) else messages[0].content
105
+ prompt = f"""Given this user question: "{user_message}"
106
+ If it's about research or academic topics, respond EXACTLY in this format:
107
+ SEARCH_RESEARCH: <search terms>
108
+
109
+ If it's about development status, respond EXACTLY in this format:
110
+ SEARCH_DEV: <search terms>
111
+
112
+ Otherwise, just answer directly.
113
+ """
114
+ headers = {
115
+ "Accept": "application/json",
116
+ "Authorization": f"Bearer {os.environ.get('DEEP_SEEK_API')}",
117
+ "Content-Type": "application/json"
118
+ }
119
+ data = {
120
+ "model": "deepseek-chat",
121
+ "messages": [{"role": "user", "content": prompt}],
122
+ "temperature": 0.7,
123
+ "max_tokens": 1024
124
+ }
125
+ response = requests.post("https://api.deepseek.com/v1/chat/completions",
126
+ headers=headers, json=data, verify=False)
127
+ if response.status_code == 200:
128
+ response_text = response.json()['choices'][0]['message']['content']
129
+ logger.info(f"DeepSeek response: {response_text}")
130
+ if "SEARCH_RESEARCH:" in response_text:
131
+ query = response_text.split("SEARCH_RESEARCH:")[1].strip()
132
+ results = research_retriever.invoke(query)
133
+ return {"messages": [AIMessage(content=f'Action: research_db_tool\n{{"query": "{query}"}}\n\nResults: {str(results)}')]}
134
+ elif "SEARCH_DEV:" in response_text:
135
+ query = response_text.split("SEARCH_DEV:")[1].strip()
136
+ results = development_retriever.invoke(query)
137
+ return {"messages": [AIMessage(content=f'Action: development_db_tool\n{{"query": "{query}"}}\n\nResults: {str(results)}')]}
138
+ else:
139
+ return {"messages": [AIMessage(content=response_text)]}
140
+ else:
141
+ error_msg = f"DeepSeek API call failed: {response.text}"
142
+ logger.error(error_msg)
143
+ raise Exception(error_msg)
144
+
145
+ def simple_grade_documents(state: AgentState):
146
+ last_message = state["messages"][-1]
147
+ logger.info(f"Grading message: {last_message.content}")
148
+ return "generate" if "Results: [Document" in last_message.content else "rewrite"
149
+
150
+ def generate(state: AgentState):
151
+ logger.info("Generating final answer")
152
+ messages = state["messages"]
153
+ question = messages[0].content if not isinstance(messages[0], tuple) else messages[0][1]
154
+ last_message = messages[-1]
155
+ docs = last_message.content[last_message.content.find("Results: ["):] if "Results: [" in last_message.content else ""
156
+ headers = {
157
+ "Accept": "application/json",
158
+ "Authorization": f"Bearer {os.environ.get('DEEP_SEEK_API')}",
159
+ "Content-Type": "application/json"
160
+ }
161
+ prompt = f"""Based on these research documents, summarize the latest advancements in AI:
162
+ Question: {question}
163
+ Documents: {docs}
164
+ Focus on extracting and synthesizing the key findings from the research papers.
165
+ """
166
+ data = {
167
+ "model": "deepseek-chat",
168
+ "messages": [{"role": "user", "content": prompt}],
169
+ "temperature": 0.7,
170
+ "max_tokens": 1024
171
+ }
172
+ response = requests.post("https://api.deepseek.com/v1/chat/completions",
173
+ headers=headers, json=data, verify=False)
174
+ if response.status_code == 200:
175
+ response_text = response.json()['choices'][0]['message']['content']
176
+ return {"messages": [AIMessage(content=response_text)]}
177
+ else:
178
+ error_msg = f"DeepSeek API generate call failed: {response.text}"
179
+ logger.error(error_msg)
180
+ raise Exception(error_msg)
181
+
182
+ def rewrite(state: AgentState):
183
+ logger.info("Rewriting question")
184
+ original_question = state["messages"][0].content if state["messages"] else "N/A"
185
+ headers = {
186
+ "Accept": "application/json",
187
+ "Authorization": f"Bearer {os.environ.get('DEEP_SEEK_API')}",
188
+ "Content-Type": "application/json"
189
+ }
190
+ data = {
191
+ "model": "deepseek-chat",
192
+ "messages": [{"role": "user", "content": f"Rewrite this question to be more specific and clearer: {original_question}"}],
193
+ "temperature": 0.7,
194
+ "max_tokens": 1024
195
+ }
196
+ response = requests.post("https://api.deepseek.com/v1/chat/completions",
197
+ headers=headers, json=data, verify=False)
198
+ if response.status_code == 200:
199
+ response_text = response.json()['choices'][0]['message']['content']
200
+ return {"messages": [AIMessage(content=response_text)]}
201
+ else:
202
+ error_msg = f"DeepSeek API rewrite call failed: {response.text}"
203
+ logger.error(error_msg)
204
+ raise Exception(error_msg)
205
+
206
+ tools_pattern = re.compile(r"Action: .*")
207
+ def custom_tools_condition(state: AgentState):
208
+ last_message = state["messages"][-1]
209
+ return "tools" if tools_pattern.match(last_message.content) else END
210
+
211
+ # Build the workflow using LangGraph's StateGraph
212
+ workflow = StateGraph(AgentState)
213
+ workflow.add_node("agent", agent)
214
+ retrieve_node = ToolNode(tools)
215
+ workflow.add_node("retrieve", retrieve_node)
216
+ workflow.add_node("rewrite", rewrite)
217
+ workflow.add_node("generate", generate)
218
+ workflow.add_edge(START, "agent")
219
+ workflow.add_conditional_edges("agent", custom_tools_condition, {"tools": "retrieve", END: END})
220
+ workflow.add_conditional_edges("retrieve", simple_grade_documents)
221
+ workflow.add_edge("generate", END)
222
+ workflow.add_edge("rewrite", "agent")
223
+ app_workflow = workflow.compile()
224
+
225
+ def process_question(user_question, app, config):
226
+ events = []
227
+ for event in app.stream({"messages": [("user", user_question)]}, config):
228
+ events.append(event)
229
+ return events
230
+
231
+ # --- Streamlit UI with Ultra-Dark Theme ---
232
+ def main():
233
+ st.set_page_config(page_title="Ultra-Dark AI R&D Assistant", layout="wide", initial_sidebar_state="expanded")
234
  st.markdown("""
235
+ <style>
236
+ /* Ultra-dark background for the app */
237
+ .stApp { background-color: #121212 !important; }
238
+ /* Force text to be light for maximum contrast */
239
+ html, body, [class*="css"] { color: #e0e0e0 !important; }
240
+ /* Override default Streamlit input labels and other text */
241
+ .stTextArea label, .stMarkdown, .stHeader, .stTitle { color: #ffffff !important; }
242
+ /* Sidebar styling */
243
+ .css-1d391kg { background-color: #1f1f1f !important; }
244
+ .data-box { background-color: #1e1e1e !important; color: #e0e0e0 !important; padding: 20px; border-radius: 10px; margin: 10px 0; }
245
+ </style>
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
246
  """, unsafe_allow_html=True)
247
 
248
+ # Sidebar: Display available data with dark styling
249
+ with st.sidebar:
250
+ st.header("πŸ“š Available Data")
251
+ st.subheader("Research Database")
252
+ for text in research_texts:
253
+ st.markdown(f'<div class="data-box">{text}</div>', unsafe_allow_html=True)
254
+ st.subheader("Development Database")
255
+ for text in development_texts:
256
+ st.markdown(f'<div class="data-box">{text}</div>', unsafe_allow_html=True)
257
+
258
+ st.title("πŸ€– Ultra-Dark AI R&D Assistant")
259
+ st.markdown("---")
260
+ query = st.text_area("Enter your question:", height=100, placeholder="e.g., What are the latest advancements in AI research?")
261
 
262
+ col1, col2 = st.columns([1, 2])
263
+ with col1:
264
+ if st.button("πŸ” Get Answer", use_container_width=True):
265
+ if query:
266
+ with st.spinner('Processing your question...'):
267
+ events = process_question(query, app_workflow, {"configurable": {"thread_id": "1"}})
268
+ for event in events:
269
+ if 'agent' in event:
270
+ with st.expander("πŸ”„ Processing Step", expanded=True):
271
+ content = event['agent']['messages'][0].content
272
+ if "Results:" in content:
273
+ st.markdown("### πŸ“‘ Retrieved Documents:")
274
+ docs = content[content.find("Results:"):]
275
+ st.info(docs)
276
+ elif 'generate' in event:
277
+ st.markdown("### ✨ Final Answer:")
278
+ st.success(event['generate']['messages'][0].content)
279
+ else:
280
+ st.warning("⚠️ Please enter a question first!")
281
+ with col2:
282
+ st.markdown("""
283
+ ### 🎯 How to Use
284
+ 1. Type your question in the text box.
285
+ 2. Click "Get Answer" to process.
286
+ 3. View retrieved documents and the final answer.
287
+
288
+ ### πŸ’‘ Example Questions
289
+ - What are the latest advancements in AI research?
290
+ - What is the status of Project A?
291
+ - What are the current trends in machine learning?
292
+ """)
293
 
294
  if __name__ == "__main__":
295
  main()