Spaces:
Sleeping
Sleeping
from langchain_openai import ChatOpenAI | |
from langchain.schema import AIMessage, HumanMessage | |
import gradio as gr | |
import os | |
key = os.getenv('key') | |
model = ChatOpenAI(model="gpt-4o-mini", api_key=key) | |
def predict(message, history): | |
history_langchain_format = [] | |
for msg in history: | |
if msg['role'] == "user": | |
history_langchain_format.append(HumanMessage(content=msg['content'])) | |
elif msg['role'] == "assistant": | |
history_langchain_format.append(AIMessage(content=msg['content'])) | |
history_langchain_format.append(HumanMessage(content=message)) | |
gpt_response = model.invoke(history_langchain_format) | |
return gpt_response.content | |
demo = gr.ChatInterface( | |
predict, | |
type="messages" | |
) | |
demo.launch() | |