ConvoAI / app.py
dlaima's picture
Update app.py
a025ba2 verified
import gradio as gr
import os
import wikipedia
from langchain_community.chat_models import ChatOpenAI
from langchain.memory import ConversationBufferMemory
from langchain.agents import AgentExecutor, initialize_agent
from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain.tools import Tool
# Define tools
def create_your_own(query: str) -> str:
"""This function can do whatever you would like once you fill it in"""
return query[::-1]
def get_current_temperature(query: str) -> str:
return "It's sunny and 75°F."
def search_wikipedia(query: str) -> str:
try:
summary = wikipedia.summary(query, sentences=2)
return summary
except wikipedia.exceptions.DisambiguationError as e:
return f"Multiple results found: {', '.join(e.options[:5])}"
except wikipedia.exceptions.PageError:
return "No relevant Wikipedia page found."
tools = [
Tool(name="Temperature", func=get_current_temperature, description="Get current temperature"),
Tool(name="Search Wikipedia", func=search_wikipedia, description="Search Wikipedia"),
Tool(name="Create Your Own", func=create_your_own, description="Custom tool for processing input")
]
# Define chatbot class
class cbfs:
def __init__(self, tools):
self.model = ChatOpenAI(temperature=0, openai_api_key=os.getenv("OPENAI_API_KEY"))
self.memory = ConversationBufferMemory(return_messages=True, memory_key="chat_history", ai_prefix="Assistant")
self.prompt = ChatPromptTemplate.from_messages([
("system", "You are a helpful but sassy assistant. Remember what the user tells you in the conversation."),
MessagesPlaceholder(variable_name="chat_history"),
("user", "{input}"),
MessagesPlaceholder(variable_name="agent_scratchpad")
])
self.chain = initialize_agent(
tools=tools,
llm=self.model,
agent="zero-shot-react-description",
verbose=True,
memory=self.memory
)
def convchain(self, query):
if not query:
return "Please enter a query."
try:
result = self.chain.invoke({"input": query})
response = result.get("output", "No response generated.")
self.memory.save_context({"input": query}, {"output": response})
print("Agent Execution Result:", response) # Debugging output
return response
except Exception as e:
print("Execution Error:", str(e))
return f"Error: {str(e)}"
# Create chatbot instance
cb = cbfs(tools)
def process_query(query):
return cb.convchain(query)
# Define Gradio UI
with gr.Blocks() as demo:
with gr.Row():
inp = gr.Textbox(placeholder="Enter text here…", label="User Input")
output = gr.Textbox(placeholder="Response...", label="ChatBot Output", interactive=False)
inp.submit(process_query, inputs=inp, outputs=output)
demo.launch(share=True)