Update app.py
Browse files
app.py
CHANGED
@@ -8,7 +8,7 @@ from langgraph.prebuilt import ToolNode
|
|
8 |
from langgraph.graph import START, StateGraph
|
9 |
from langgraph.prebuilt import tools_condition
|
10 |
from langgraph.checkpoint.memory import MemorySaver
|
11 |
-
from tools import search_tool,
|
12 |
from retriever import guest_info_tool
|
13 |
import gradio as gr
|
14 |
|
@@ -19,14 +19,13 @@ from langchain_google_genai import ChatGoogleGenerativeAI
|
|
19 |
#llm = ChatGroq(model="qwen-2.5-coder-32b")
|
20 |
llm = ChatGoogleGenerativeAI(model="gemini-2.0-flash")
|
21 |
|
22 |
-
tools = [search_tool,
|
23 |
chat_with_tools = llm.bind_tools(tools)
|
24 |
|
25 |
# System message
|
26 |
sys_msg = SystemMessage(content="""
|
27 |
Role:
|
28 |
You are a helpful agent and hosting a party.
|
29 |
-
|
30 |
STRICT RULES:
|
31 |
1. Follow a THINK → TOOL → THINK → RESPOND approach:
|
32 |
- THINK: Analyze the request and decide if any tool call is required or if it can be answered without a tool.
|
@@ -41,9 +40,9 @@ class AgentState(TypedDict):
|
|
41 |
messages: Annotated[list[AnyMessage], add_messages]
|
42 |
|
43 |
def assistant(state: AgentState):
|
44 |
-
|
45 |
-
"messages":
|
46 |
-
}
|
47 |
|
48 |
## The graph
|
49 |
builder = StateGraph(AgentState)
|
@@ -78,14 +77,12 @@ def alfred_response(question):
|
|
78 |
|
79 |
# Gradio
|
80 |
|
81 |
-
input_textbox = gr.Textbox(label="Type your query here:", placeholder="Hi", lines=5)
|
82 |
-
output_textbox = gr.Textbox(label="Type your query here:", placeholder="Hi", lines=5)
|
83 |
gr.Interface(
|
84 |
fn=alfred_response,
|
85 |
inputs="text",
|
86 |
outputs="text",
|
87 |
-
title="Party Organizer
|
88 |
description="Helps you answer with different asks during Party",
|
89 |
examples=[["Whats weather now in Bangalore?"], ["The weather in Bangalore is Rainy with a temperature of 15°C."]],
|
90 |
live=True
|
91 |
-
).launch(
|
|
|
8 |
from langgraph.graph import START, StateGraph
|
9 |
from langgraph.prebuilt import tools_condition
|
10 |
from langgraph.checkpoint.memory import MemorySaver
|
11 |
+
from tools import search_tool, hub_stats_tool, weather_info_tool
|
12 |
from retriever import guest_info_tool
|
13 |
import gradio as gr
|
14 |
|
|
|
19 |
#llm = ChatGroq(model="qwen-2.5-coder-32b")
|
20 |
llm = ChatGoogleGenerativeAI(model="gemini-2.0-flash")
|
21 |
|
22 |
+
tools = [search_tool, hub_stats_tool, guest_info_tool, weather_info_tool]
|
23 |
chat_with_tools = llm.bind_tools(tools)
|
24 |
|
25 |
# System message
|
26 |
sys_msg = SystemMessage(content="""
|
27 |
Role:
|
28 |
You are a helpful agent and hosting a party.
|
|
|
29 |
STRICT RULES:
|
30 |
1. Follow a THINK → TOOL → THINK → RESPOND approach:
|
31 |
- THINK: Analyze the request and decide if any tool call is required or if it can be answered without a tool.
|
|
|
40 |
messages: Annotated[list[AnyMessage], add_messages]
|
41 |
|
42 |
def assistant(state: AgentState):
|
43 |
+
if len(state["messages"]) > 7:
|
44 |
+
return {"messages": chat_with_tools.invoke([sys_msg] + state["messages"][-6:])}
|
45 |
+
return {"messages": chat_with_tools.invoke([sys_msg] + state["messages"])}
|
46 |
|
47 |
## The graph
|
48 |
builder = StateGraph(AgentState)
|
|
|
77 |
|
78 |
# Gradio
|
79 |
|
|
|
|
|
80 |
gr.Interface(
|
81 |
fn=alfred_response,
|
82 |
inputs="text",
|
83 |
outputs="text",
|
84 |
+
title="Party Organizer Assistant",
|
85 |
description="Helps you answer with different asks during Party",
|
86 |
examples=[["Whats weather now in Bangalore?"], ["The weather in Bangalore is Rainy with a temperature of 15°C."]],
|
87 |
live=True
|
88 |
+
).launch()
|