Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -29,6 +29,7 @@ SAMPLE_RATE = 24000
|
|
29 |
class OpenAIHandler(AsyncStreamHandler):
|
30 |
def __init__(
|
31 |
self,
|
|
|
32 |
) -> None:
|
33 |
super().__init__(
|
34 |
expected_layout="mono",
|
@@ -38,9 +39,10 @@ class OpenAIHandler(AsyncStreamHandler):
|
|
38 |
)
|
39 |
self.connection = None
|
40 |
self.output_queue = asyncio.Queue()
|
|
|
41 |
|
42 |
def copy(self):
|
43 |
-
return OpenAIHandler()
|
44 |
|
45 |
async def start_up(
|
46 |
self,
|
@@ -51,7 +53,10 @@ class OpenAIHandler(AsyncStreamHandler):
|
|
51 |
model="gpt-4o-mini-realtime-preview-2024-12-17"
|
52 |
) as conn:
|
53 |
await conn.session.update(
|
54 |
-
session={
|
|
|
|
|
|
|
55 |
)
|
56 |
self.connection = conn
|
57 |
async for event in self.connection:
|
@@ -92,7 +97,7 @@ def update_chatbot(chatbot: list[dict], response: ResponseAudioTranscriptDoneEve
|
|
92 |
chatbot = gr.Chatbot(type="messages")
|
93 |
latest_message = gr.Textbox(type="text", visible=False)
|
94 |
stream = Stream(
|
95 |
-
OpenAIHandler(),
|
96 |
mode="send-receive",
|
97 |
modality="audio",
|
98 |
additional_inputs=[chatbot],
|
@@ -138,4 +143,4 @@ if __name__ == "__main__":
|
|
138 |
else:
|
139 |
import uvicorn
|
140 |
|
141 |
-
uvicorn.run(app, host="0.0.0.0", port=7860)
|
|
|
29 |
class OpenAIHandler(AsyncStreamHandler):
|
30 |
def __init__(
|
31 |
self,
|
32 |
+
system_prompt="You are a helpful assistant. Be concise and clear in your responses."
|
33 |
) -> None:
|
34 |
super().__init__(
|
35 |
expected_layout="mono",
|
|
|
39 |
)
|
40 |
self.connection = None
|
41 |
self.output_queue = asyncio.Queue()
|
42 |
+
self.system_prompt = system_prompt
|
43 |
|
44 |
def copy(self):
|
45 |
+
return OpenAIHandler(system_prompt=self.system_prompt)
|
46 |
|
47 |
async def start_up(
|
48 |
self,
|
|
|
53 |
model="gpt-4o-mini-realtime-preview-2024-12-17"
|
54 |
) as conn:
|
55 |
await conn.session.update(
|
56 |
+
session={
|
57 |
+
"turn_detection": {"type": "server_vad"},
|
58 |
+
"system_prompt": self.system_prompt
|
59 |
+
}
|
60 |
)
|
61 |
self.connection = conn
|
62 |
async for event in self.connection:
|
|
|
97 |
chatbot = gr.Chatbot(type="messages")
|
98 |
latest_message = gr.Textbox(type="text", visible=False)
|
99 |
stream = Stream(
|
100 |
+
OpenAIHandler(system_prompt="๋น์ ์ ์น์ ํ ํ๊ตญ์ด AI ๋น์์
๋๋ค. ๋ชจ๋ ์ง๋ฌธ์ ํ๊ตญ์ด๋ก ๊ฐ๊ฒฐํ๊ณ ๋ช
ํํ๊ฒ, ํญ์ ์กด๋๋ง๋ก ๋ต๋ณํ์ธ์."),
|
101 |
mode="send-receive",
|
102 |
modality="audio",
|
103 |
additional_inputs=[chatbot],
|
|
|
143 |
else:
|
144 |
import uvicorn
|
145 |
|
146 |
+
uvicorn.run(app, host="0.0.0.0", port=7860)
|