ysharma's picture
ysharma HF Staff
update
43190be
raw
history blame
2.18 kB
import gradio as gr
import os
import openai
import gradio as gr
# Get the value of the openai_api_key from environment variable
openai.api_key = os.getenv("OPENAI_API_KEY")
# Import things that are needed generically from langchain
from langchain import LLMMathChain, SerpAPIWrapper
from langchain.agents import AgentType, initialize_agent, load_tools
from langchain.chat_models import ChatOpenAI
from langchain.tools import BaseTool, StructuredTool, Tool, tool
from langchain.tools import MoveFileTool, format_tool_to_openai_function
from langchain.schema import (
AIMessage,
HumanMessage,
SystemMessage
)
from langchain.utilities import WikipediaAPIWrapper
from langchain.tools import AIPluginTool
# Setting up a system message for our Chatbot
#system = SystemMessage(content = "You are a helpful AI assistant") # that translates English to Pirate English.")
# driver
def predict(user_input, chatbot):
print(f"chatbot - {chatbot}")
print(f"user_input - {user_input}")
chat = ChatOpenAI(
#openai_api_key=openai_api_key,
temperature=1.0, #temperature, #1.0
streaming=True,
model='gpt-3.5-turbo-0613')
#messages = [system]
messages=[]
#function_call_decision = True if any(plugins) else False
if len(chatbot) != 0:
for conv in chatbot:
human = HumanMessage(content=conv[0])
ai = AIMessage(content=conv[1])
messages.append(human)
messages.append(ai)
messages.append(HumanMessage(content=user_input))
print(f"messages list is - {messages}")
else: # for first user message
messages.append(HumanMessage(content=user_input))
print(f"messages list is - {messages}")
# getting gpt3.5's response
gpt_response = chat(messages)
print(f"gpt_response - {gpt_response}")
bot_message = gpt_response.content
print(f"bot_message - {bot_message}")
chatbot.append((user_input, bot_message))
#return "", chatbot, None #"", chatbot
return bot_message
chatbot = gr.Chatbot()
gr.ChatInterface(predict, chatbot=chatbot, delete_last_btn="del").launch(share=False, debug=True) #examples=["How are you?", "What's up?"],