File size: 2,537 Bytes
3b64660
f158561
3b64660
f158561
 
 
 
 
 
3b64660
f158561
 
 
 
 
 
 
 
 
 
 
 
4c4f363
 
 
 
 
f158561
 
 
 
 
3b64660
f158561
 
3b64660
f158561
 
 
 
3b64660
 
 
 
 
 
 
 
f158561
 
 
 
3b64660
 
f158561
 
 
 
 
 
 
 
 
 
 
 
 
 
3b64660
 
f158561
4c4f363
3b64660
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
import os
import gradio as gr
from langchain.agents import initialize_agent
from langchain.chat_models import ChatOpenAI
from langchain.memory import ConversationBufferMemory
from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain.tools import Tool

# Define the tool

def create_your_own(query: str) -> str:
    """This function can do whatever you would like once you fill it in"""
    return query[::-1]

# Define other tools (example placeholders for context)
def get_current_temperature(query: str) -> str:
    return "It's sunny and 75°F."

def search_wikipedia(query: str) -> str:
    return "Wikipedia search results for: " + query

# Add the new tool to the list of available tools
tools = [
    Tool(name="Temperature", func=get_current_temperature, description="Get current temperature"),
    Tool(name="Search Wikipedia", func=search_wikipedia, description="Search Wikipedia"),
    Tool(name="Create Your Own", func=create_your_own, description="Custom tool for processing input")
]

# Define the cbfs class for handling the agent
class cbfs:
    
    def __init__(self, tools):
        self.model = ChatOpenAI(temperature=0, openai_api_key=os.getenv("OPENAI_API_KEY"))
        self.memory = ConversationBufferMemory(return_messages=True, memory_key="chat_history")
        self.prompt = ChatPromptTemplate.from_messages([
            ("system", "You are a helpful but sassy assistant"),
            MessagesPlaceholder(variable_name="chat_history"),
            ("user", "{input}"),
            MessagesPlaceholder(variable_name="agent_scratchpad")
        ])
        
        self.chain = initialize_agent(
            tools=tools, 
            llm=self.model, 
            agent="chat-conversational-react-description",
            memory=self.memory,
            verbose=True
        )
    
    def convchain(self, query):
        if not query:
            return "Please enter a query."
        result = self.chain.invoke({"input": query})
        return result.get('output_text', "No response generated.")

# Create an instance of the agent
cb = cbfs(tools)

# Create the Gradio interface
def process_query(query):
    return cb.convchain(query)

# Set up the Gradio interface
with gr.Blocks() as demo:
    with gr.Row():
        inp = gr.Textbox(placeholder="Enter text here…", label="User Input")
        output = gr.Textbox(placeholder="Response...", label="ChatBot Output", interactive=False)
    
    inp.submit(process_query, inputs=inp, outputs=output)

demo.launch(share=True)