Rahul-8799 commited on
Commit
e98599b
Β·
verified Β·
1 Parent(s): e26200a

Update utils/langgraph_pipeline.py

Browse files
Files changed (1) hide show
  1. utils/langgraph_pipeline.py +53 -36
utils/langgraph_pipeline.py CHANGED
@@ -1,5 +1,14 @@
 
 
 
 
 
 
 
 
1
  from langgraph.graph import StateGraph, END
2
  from langgraph.prebuilt import ToolNode
 
3
  from agents import (
4
  product_manager_agent,
5
  project_manager_agent,
@@ -7,16 +16,14 @@ from agents import (
7
  software_engineer_agent,
8
  quality_assurance_agent,
9
  )
10
- from langchain_core.messages import HumanMessage, AIMessage
11
- from langchain_core.messages.base import BaseMessage
12
- from typing import TypedDict, List
13
-
14
 
 
 
 
15
  class InputState(TypedDict):
16
  messages: List[BaseMessage]
17
  chat_log: list
18
 
19
-
20
  class OutputState(TypedDict):
21
  pm_output: str
22
  proj_output: str
@@ -25,20 +32,25 @@ class OutputState(TypedDict):
25
  qa_output: str
26
  chat_log: list
27
 
 
 
 
 
 
 
 
 
 
 
 
 
28
 
29
- # βœ… Fixed Bridge node (must return a proper state with messages and chat_log)
30
- def bridge_to_pm(state: dict) -> dict:
31
- messages = state.get("messages", [])
32
- chat_log = state.get("chat_log", [])
33
-
34
- if not messages or not isinstance(messages[-1], HumanMessage):
35
- raise ValueError("Expected last message to be a HumanMessage")
36
-
37
- user_prompt = messages[-1].content
38
 
39
- structured_context = f"""# Stakeholder Prompt
40
 
41
- "{user_prompt}"
42
 
43
  Please convert this into a structured product specification including:
44
  - Goals
@@ -46,26 +58,32 @@ Please convert this into a structured product specification including:
46
  - User Stories
47
  - Success Metrics
48
  """
49
-
50
- new_ai_msg = AIMessage(content=structured_context)
51
-
52
  return {
53
- "messages": messages + [new_ai_msg], # Important: last item must be AIMessage
54
- "chat_log": chat_log + [{"role": "System", "content": structured_context}],
55
  }
56
 
 
 
 
 
 
 
 
57
 
58
- # βœ… Wrap bridge node in a ToolNode
59
- bridge_node = ToolNode([bridge_to_pm])
60
-
61
- # βœ… Other agents as ToolNodes
62
- pm_node = ToolNode([product_manager_agent.run])
63
  proj_node = ToolNode([project_manager_agent.run])
64
  arch_node = ToolNode([software_architect_agent.run])
65
- dev_node = ToolNode([software_engineer_agent.run])
66
- qa_node = ToolNode([quality_assurance_agent.run])
67
 
68
- # βœ… Build LangGraph
 
 
69
  graph = StateGraph(input=InputState, output=OutputState)
70
 
71
  graph.add_node("Bridge", bridge_node)
@@ -85,16 +103,15 @@ graph.add_edge("QualityAssurance", END)
85
 
86
  compiled_graph = graph.compile()
87
 
88
-
89
- # βœ… Run function
 
90
  def run_pipeline_and_save(prompt: str):
91
  initial_state = {
92
  "messages": [HumanMessage(content=prompt)],
93
  "chat_log": [],
94
  }
95
-
96
- # Sanity check
97
- assert isinstance(initial_state["messages"][-1], HumanMessage)
98
-
99
  final_state = compiled_graph.invoke(initial_state)
100
- return final_state["chat_log"], final_state["qa_output"]
 
1
+ # MAC/utils/langgraph_pipeline.py
2
+
3
+ from typing import TypedDict, List
4
+
5
+ from langchain_core.messages import HumanMessage, AIMessage
6
+ from langchain_core.messages.base import BaseMessage
7
+ from langchain_core.tools.structured import StructuredTool
8
+
9
  from langgraph.graph import StateGraph, END
10
  from langgraph.prebuilt import ToolNode
11
+
12
  from agents import (
13
  product_manager_agent,
14
  project_manager_agent,
 
16
  software_engineer_agent,
17
  quality_assurance_agent,
18
  )
 
 
 
 
19
 
20
+ # β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”
21
+ # 1) State types
22
+ # β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”
23
  class InputState(TypedDict):
24
  messages: List[BaseMessage]
25
  chat_log: list
26
 
 
27
  class OutputState(TypedDict):
28
  pm_output: str
29
  proj_output: str
 
32
  qa_output: str
33
  chat_log: list
34
 
35
+ # β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”
36
+ # 2) Bridge β†’ ProductManager
37
+ # β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”
38
+ def bridge_to_product_manager(state: dict) -> dict:
39
+ """
40
+ Convert the last HumanMessage into a structured system AIMessage
41
+ that the Product Manager agent can consume.
42
+ """
43
+ msgs = state.get("messages")
44
+ logs = state.get("chat_log", [])
45
+ if not isinstance(msgs, list) or not msgs or not isinstance(msgs[-1], HumanMessage):
46
+ raise ValueError("Expected state['messages'] to be a non-empty list ending in a HumanMessage")
47
 
48
+ user_text = msgs[-1].content
49
+ spec = f"""# Stakeholder Prompt
 
 
 
 
 
 
 
50
 
51
+ A new product request has been submitted:
52
 
53
+ "{user_text}"
54
 
55
  Please convert this into a structured product specification including:
56
  - Goals
 
58
  - User Stories
59
  - Success Metrics
60
  """
61
+ ai = AIMessage(content=spec)
 
 
62
  return {
63
+ "messages": msgs + [ai],
64
+ "chat_log": logs + [{"role": "System", "content": spec}],
65
  }
66
 
67
+ # Create a StructuredTool (with explicit description) for the bridge
68
+ bridge_tool = StructuredTool.from_function(
69
+ func=bridge_to_product_manager,
70
+ name="bridge_to_product_manager",
71
+ description="Generate a structured AIMessage from a HumanMessage for the Product Manager agent."
72
+ )
73
+ bridge_node = ToolNode([bridge_tool])
74
 
75
+ # β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”
76
+ # 3) Wrap your LLM agents
77
+ # β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”
78
+ pm_node = ToolNode([product_manager_agent.run])
 
79
  proj_node = ToolNode([project_manager_agent.run])
80
  arch_node = ToolNode([software_architect_agent.run])
81
+ dev_node = ToolNode([software_engineer_agent.run])
82
+ qa_node = ToolNode([quality_assurance_agent.run])
83
 
84
+ # β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”
85
+ # 4) Build & compile the graph
86
+ # β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”
87
  graph = StateGraph(input=InputState, output=OutputState)
88
 
89
  graph.add_node("Bridge", bridge_node)
 
103
 
104
  compiled_graph = graph.compile()
105
 
106
+ # β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”
107
+ # 5) Pipeline entrypoint
108
+ # β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”
109
  def run_pipeline_and_save(prompt: str):
110
  initial_state = {
111
  "messages": [HumanMessage(content=prompt)],
112
  "chat_log": [],
113
  }
114
+ # this invoke will now see:
115
+ # Bridge β†’ PM β†’ ProjectManager β†’ Architect β†’ Engineer β†’ QA β†’ END
 
 
116
  final_state = compiled_graph.invoke(initial_state)
117
+ return final_state["chat_log"], final_state["qa_output"]