ArturG9 commited on
Commit
8b2e6a2
·
verified ·
1 Parent(s): 122615a

Update functions.py

Browse files
Files changed (1) hide show
  1. functions.py +30 -11
functions.py CHANGED
@@ -26,25 +26,44 @@ from langgraph.graph import START, END, StateGraph
26
 
27
 
28
 
29
- async def predict_custom_agent_answer(example: dict):
 
 
 
 
 
30
  config = {"configurable": {"thread_id": str(uuid.uuid4())}}
31
-
32
  try:
33
  # Invoke the custom graph with the input question
34
- state_dict = await custom_graph.ainvoke(
35
- {"question": example["input"], "steps": []}, config
36
  )
 
 
 
37
 
38
- # Check if 'generation' exists and is not empty
 
 
 
 
 
 
 
39
  if 'generation' in state_dict and state_dict['generation']:
40
- return {"response": state_dict["generation"], "steps": state_dict["steps"]}
 
 
 
 
41
  else:
42
- # Raise an exception if the generation is missing or empty
43
- print("Your question violates toxicity rules or contains sensitive information.")
44
-
45
  except Exception as e:
46
- # Raise the exception with a custom message
47
- print("An error occurred: Try to change the question.")
48
 
49
 
50
 
 
26
 
27
 
28
 
29
+ async def handle_userinput(user_question, custom_graph):
30
+ # Add the user's question to the chat history and display it in the UI
31
+ st.session_state.messages.append({"role": "user", "content": user_question})
32
+ st.chat_message("user").write(user_question)
33
+
34
+ # Generate a unique thread ID for the graph's state
35
  config = {"configurable": {"thread_id": str(uuid.uuid4())}}
36
+
37
  try:
38
  # Invoke the custom graph with the input question
39
+ state_dict = await custom_graph.ainvoke(
40
+ {"question": user_question, "steps": []}, config
41
  )
42
+
43
+ # Retrieve the documents from the graph's state (if available)
44
+ docs = state_dict.get("documents", [])
45
 
46
+ # Display the retrieved documents in the sidebar
47
+ with st.sidebar:
48
+ st.subheader("Your documents")
49
+ with st.spinner("Processing"):
50
+ for doc in docs:
51
+ st.write(f"Document: {doc.page_content}") # Assuming doc.page_content contains the text
52
+
53
+ # Check if a response (generation) was produced by the graph
54
  if 'generation' in state_dict and state_dict['generation']:
55
+ response = state_dict["generation"]
56
+
57
+ # Add the assistant's response to the chat history and display it
58
+ st.session_state.messages.append({"role": "assistant", "content": response})
59
+ st.chat_message("assistant").write(response)
60
  else:
61
+ # Handle cases where no valid generation is present
62
+ st.chat_message("assistant").write("Your question violates toxicity rules or contains sensitive information.")
63
+
64
  except Exception as e:
65
+ # Display an error message in case of failure
66
+ st.chat_message("assistant").write("An error occurred: Try to change the question.")
67
 
68
 
69