ConvoAI / app.py
dlaima's picture
Update app.py
3b64660 verified
raw
history blame
2.54 kB
import os
import gradio as gr
from langchain.agents import initialize_agent
from langchain.chat_models import ChatOpenAI
from langchain.memory import ConversationBufferMemory
from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain.tools import Tool
# Define the tool
def create_your_own(query: str) -> str:
"""This function can do whatever you would like once you fill it in"""
return query[::-1]
# Define other tools (example placeholders for context)
def get_current_temperature(query: str) -> str:
return "It's sunny and 75°F."
def search_wikipedia(query: str) -> str:
return "Wikipedia search results for: " + query
# Add the new tool to the list of available tools
tools = [
Tool(name="Temperature", func=get_current_temperature, description="Get current temperature"),
Tool(name="Search Wikipedia", func=search_wikipedia, description="Search Wikipedia"),
Tool(name="Create Your Own", func=create_your_own, description="Custom tool for processing input")
]
# Define the cbfs class for handling the agent
class cbfs:
def __init__(self, tools):
self.model = ChatOpenAI(temperature=0, openai_api_key=os.getenv("OPENAI_API_KEY"))
self.memory = ConversationBufferMemory(return_messages=True, memory_key="chat_history")
self.prompt = ChatPromptTemplate.from_messages([
("system", "You are a helpful but sassy assistant"),
MessagesPlaceholder(variable_name="chat_history"),
("user", "{input}"),
MessagesPlaceholder(variable_name="agent_scratchpad")
])
self.chain = initialize_agent(
tools=tools,
llm=self.model,
agent="chat-conversational-react-description",
memory=self.memory,
verbose=True
)
def convchain(self, query):
if not query:
return "Please enter a query."
result = self.chain.invoke({"input": query})
return result.get('output_text', "No response generated.")
# Create an instance of the agent
cb = cbfs(tools)
# Create the Gradio interface
def process_query(query):
return cb.convchain(query)
# Set up the Gradio interface
with gr.Blocks() as demo:
with gr.Row():
inp = gr.Textbox(placeholder="Enter text here…", label="User Input")
output = gr.Textbox(placeholder="Response...", label="ChatBot Output", interactive=False)
inp.submit(process_query, inputs=inp, outputs=output)
demo.launch(share=True)