mgbam commited on
Commit
b54d7d2
Β·
verified Β·
1 Parent(s): 3f9045d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +13 -16
app.py CHANGED
@@ -1,8 +1,8 @@
1
- from langchain_openai import OpenAIEmbeddings # Updated import path
2
- from langchain.vectorstores import Chroma
3
- from langchain.schema import HumanMessage, AIMessage
4
- from langchain.text_splitter import RecursiveCharacterTextSplitter
5
- from langgraph.graph import END, StateGraph, START
6
  from langgraph.prebuilt import ToolNode
7
  from langgraph.graph.message import add_messages
8
  from typing_extensions import TypedDict, Annotated
@@ -37,8 +37,8 @@ research_docs = splitter.create_documents(research_texts)
37
  development_docs = splitter.create_documents(development_texts)
38
 
39
  embeddings = OpenAIEmbeddings(
40
- model="text-embedding-3-large"
41
- # dimensions=1024 # Uncomment if needed
42
  )
43
 
44
  research_vectorstore = Chroma.from_documents(
@@ -110,8 +110,7 @@ Otherwise, just answer directly.
110
  response = requests.post(
111
  "https://api.deepseek.com/v1/chat/completions",
112
  headers=headers,
113
- json=data,
114
- verify=False
115
  )
116
 
117
  if response.status_code == 200:
@@ -186,8 +185,7 @@ Focus on extracting and synthesizing the key findings from the research papers.
186
  response = requests.post(
187
  "https://api.deepseek.com/v1/chat/completions",
188
  headers=headers,
189
- json=data,
190
- verify=False
191
  )
192
 
193
  if response.status_code == 200:
@@ -221,8 +219,7 @@ def rewrite(state: AgentState):
221
  response = requests.post(
222
  "https://api.deepseek.com/v1/chat/completions",
223
  headers=headers,
224
- json=data,
225
- verify=False
226
  )
227
 
228
  if response.status_code == 200:
@@ -273,9 +270,9 @@ app = workflow.compile()
273
  # --------------------------
274
  # Process Question Function
275
  # --------------------------
276
- def process_question(user_question, app, config):
277
  events = []
278
- for event in app.stream({"messages": [("user", user_question)]}, config):
279
  events.append(event)
280
  return events
281
 
@@ -319,7 +316,7 @@ def main():
319
  if st.button("πŸ” Get Answer", use_container_width=True):
320
  if query:
321
  with st.spinner('Processing your question...'):
322
- events = process_question(query, app, {"configurable": {"thread_id": "1"}})
323
  for event in events:
324
  if 'agent' in event:
325
  with st.expander("πŸ”„ Processing Step", expanded=True):
 
1
+ from langchain_openai import OpenAIEmbeddings
2
+ from langchain_community.vectorstores import Chroma
3
+ from langchain_core.messages import HumanMessage, AIMessage
4
+ from langchain_text_splitters import RecursiveCharacterTextSplitter
5
+ from langgraph import StateGraph, END, START
6
  from langgraph.prebuilt import ToolNode
7
  from langgraph.graph.message import add_messages
8
  from typing_extensions import TypedDict, Annotated
 
37
  development_docs = splitter.create_documents(development_texts)
38
 
39
  embeddings = OpenAIEmbeddings(
40
+ model="text-embedding-3-large",
41
+ dimensions=1024
42
  )
43
 
44
  research_vectorstore = Chroma.from_documents(
 
110
  response = requests.post(
111
  "https://api.deepseek.com/v1/chat/completions",
112
  headers=headers,
113
+ json=data
 
114
  )
115
 
116
  if response.status_code == 200:
 
185
  response = requests.post(
186
  "https://api.deepseek.com/v1/chat/completions",
187
  headers=headers,
188
+ json=data
 
189
  )
190
 
191
  if response.status_code == 200:
 
219
  response = requests.post(
220
  "https://api.deepseek.com/v1/chat/completions",
221
  headers=headers,
222
+ json=data
 
223
  )
224
 
225
  if response.status_code == 200:
 
270
  # --------------------------
271
  # Process Question Function
272
  # --------------------------
273
+ def process_question(user_question):
274
  events = []
275
+ for event in app.stream({"messages": [("user", user_question)]}):
276
  events.append(event)
277
  return events
278
 
 
316
  if st.button("πŸ” Get Answer", use_container_width=True):
317
  if query:
318
  with st.spinner('Processing your question...'):
319
+ events = process_question(query)
320
  for event in events:
321
  if 'agent' in event:
322
  with st.expander("πŸ”„ Processing Step", expanded=True):