Party / app.py
anupom100's picture
Update app.py
0047b94 verified
# App Section
import os
from typing import TypedDict, Annotated
from langgraph.graph.message import add_messages
from langchain_core.messages import AnyMessage, HumanMessage, AIMessage, SystemMessage
from langgraph.prebuilt import ToolNode
from langgraph.graph import START, StateGraph
from langgraph.prebuilt import tools_condition
from langgraph.checkpoint.memory import MemorySaver
from tools import search_tool, hub_stats_tool, weather_info_tool
from retriever import guest_info_tool
import gradio as gr
from langchain_google_genai import ChatGoogleGenerativeAI
# Generate the chat interface, including the tools
#llm = ChatGroq(model="qwen-2.5-coder-32b")
llm = ChatGoogleGenerativeAI(model="gemini-2.0-flash")
tools = [search_tool, hub_stats_tool, guest_info_tool, weather_info_tool]
chat_with_tools = llm.bind_tools(tools)
# System message
sys_msg = SystemMessage(content="""
Role:
You are a helpful agent and hosting a party.
STRICT RULES:
1. Follow a THINK → TOOL → THINK → RESPOND approach:
- THINK: Analyze the request and decide if any tool call is required or if it can be answered without a tool.
- TOOL: Perform only the necessary tool calls and collect responses.
- THINK: Re-evaluate tool response and determine the next step.
- RESPOND: Repeat THINK/TOOL/THINK as many times as required before providing a final answer.
2. If no relevant tool exists, inform the user and provide guidance instead of making assumptions.
""")
# Generate the AgentState and Agent graph
class AgentState(TypedDict):
messages: Annotated[list[AnyMessage], add_messages]
def assistant(state: AgentState):
if len(state["messages"]) > 7:
return {"messages": chat_with_tools.invoke([sys_msg] + state["messages"][-6:])}
return {"messages": chat_with_tools.invoke([sys_msg] + state["messages"])}
## The graph
builder = StateGraph(AgentState)
# Define nodes: these do the work
builder.add_node("assistant", assistant)
builder.add_node("tools", ToolNode(tools))
# Define edges: these determine how the control flow moves
builder.add_edge(START, "assistant")
builder.add_conditional_edges(
"assistant",
# If the latest message requires a tool, route to tools
# Otherwise, provide a direct response
tools_condition,
)
builder.add_edge("tools", "assistant")
memory = MemorySaver()
alfred = builder.compile(checkpointer=memory)
config = {"configurable": {"thread_id": "7"}}
#alfred
def alfred_response(question):
messages = [HumanMessage(content=question)]
response = alfred.invoke({"messages": messages}, config)
return response['messages'][-1].content
#print("🎩 Alfred's Response:")
#print(response['messages'][-1].content)
# Gradio
gr.Interface(
fn=alfred_response,
inputs="text",
outputs="text",
title="Party Organizer Assistant",
description="Helps you answer with different asks during Party",
examples=[["Whats weather now in Bangalore?"]],
).launch()