Spaces:
Sleeping
Sleeping
Create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from langchain_openai import ChatOpenAI
|
2 |
+
from langchain.schema import AIMessage, HumanMessage
|
3 |
+
import gradio as gr
|
4 |
+
|
5 |
+
model = ChatOpenAI(model="gpt-4o-mini", api_key=key)
|
6 |
+
|
7 |
+
def predict(message, history):
|
8 |
+
history_langchain_format = []
|
9 |
+
for msg in history:
|
10 |
+
if msg['role'] == "user":
|
11 |
+
history_langchain_format.append(HumanMessage(content=msg['content']))
|
12 |
+
elif msg['role'] == "assistant":
|
13 |
+
history_langchain_format.append(AIMessage(content=msg['content']))
|
14 |
+
history_langchain_format.append(HumanMessage(content=message))
|
15 |
+
gpt_response = model.invoke(history_langchain_format)
|
16 |
+
return gpt_response.content
|
17 |
+
|
18 |
+
demo = gr.ChatInterface(
|
19 |
+
predict,
|
20 |
+
type="messages"
|
21 |
+
)
|
22 |
+
|
23 |
+
demo.launch()
|