DarkRodry commited on
Commit
3ee0dc2
Β·
1 Parent(s): c547a9c

add memory

Browse files
Files changed (1) hide show
  1. app.py +14 -4
app.py CHANGED
@@ -8,6 +8,7 @@ from langgraph.prebuilt import ToolNode
8
  from langgraph.graph import START, StateGraph
9
  from langgraph.prebuilt import tools_condition
10
  from langchain.chat_models import init_chat_model
 
11
 
12
  from tools import *
13
 
@@ -16,7 +17,6 @@ if not os.environ.get("GOOGLE_API_KEY"):
16
 
17
  # Generate the chat interface, including the tools
18
  chat = init_chat_model("gemini-2.5-flash", model_provider="google_genai")
19
-
20
  tools = [guest_info_tool, weather_info_tool, hub_stats_tool, search_tool]
21
  chat_with_tools = chat.bind_tools(tools)
22
 
@@ -32,6 +32,8 @@ def assistant(state: AgentState):
32
  ## The graph
33
  builder = StateGraph(AgentState)
34
 
 
 
35
  # Define nodes: these do the work
36
  builder.add_node("assistant", assistant)
37
  builder.add_node("tools", ToolNode(tools))
@@ -45,10 +47,18 @@ builder.add_conditional_edges(
45
  tools_condition,
46
  )
47
  builder.add_edge("tools", "assistant")
48
- alfred = builder.compile()
 
 
 
 
 
 
 
 
49
 
50
- messages = [HumanMessage(content="Who is Facebook and what's their most popular model?")]
51
- response = alfred.invoke({"messages": messages})
52
 
53
  print("🎩 Alfred's Response:")
54
  print(response['messages'][-1].content)
 
8
  from langgraph.graph import START, StateGraph
9
  from langgraph.prebuilt import tools_condition
10
  from langchain.chat_models import init_chat_model
11
+ from langgraph.checkpoint.memory import MemorySaver
12
 
13
  from tools import *
14
 
 
17
 
18
  # Generate the chat interface, including the tools
19
  chat = init_chat_model("gemini-2.5-flash", model_provider="google_genai")
 
20
  tools = [guest_info_tool, weather_info_tool, hub_stats_tool, search_tool]
21
  chat_with_tools = chat.bind_tools(tools)
22
 
 
32
  ## The graph
33
  builder = StateGraph(AgentState)
34
 
35
+ memory = MemorySaver()
36
+
37
  # Define nodes: these do the work
38
  builder.add_node("assistant", assistant)
39
  builder.add_node("tools", ToolNode(tools))
 
47
  tools_condition,
48
  )
49
  builder.add_edge("tools", "assistant")
50
+ alfred = builder.compile(checkpointer=memory)
51
+
52
+ config = {"configurable": {"thread_id": "1"}}
53
+ # First interaction
54
+ response = alfred.invoke({"messages": [HumanMessage(content="Tell me about 'Lady Ada Lovelace'. What's her background and how is she related to me?")]}, config=config)
55
+
56
+ print("🎩 Alfred's Response:")
57
+ print(response['messages'][-1].content)
58
+ print()
59
 
60
+ # Second interaction (referencing the first)
61
+ response = alfred.invoke({"messages": [HumanMessage(content="What projects is she currently working on?")]}, config=config)
62
 
63
  print("🎩 Alfred's Response:")
64
  print(response['messages'][-1].content)