Spaces:
Runtime error
Runtime error
:gem: [Feature] Enable OpenAI API call from fastapi with EventSourceResponse of sse_starlette
Browse files- apis/chat_api.py +25 -12
- examples/chat_with_openai.py +7 -13
- examples/chat_with_post.py +1 -1
- networks/message_outputer.py +16 -7
- networks/message_parser.py +3 -4
apis/chat_api.py
CHANGED
@@ -1,13 +1,25 @@
|
|
1 |
import uvicorn
|
2 |
|
3 |
from fastapi import FastAPI
|
4 |
-
from fastapi.responses import StreamingResponse
|
5 |
from pydantic import BaseModel, Field
|
6 |
from conversations import (
|
7 |
ConversationConnector,
|
8 |
ConversationCreator,
|
9 |
ConversationSession,
|
10 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
11 |
|
12 |
|
13 |
class ChatAPIApp:
|
@@ -131,14 +143,14 @@ class ChatAPIApp:
|
|
131 |
description="(int) Invocation ID",
|
132 |
)
|
133 |
|
134 |
-
|
135 |
-
connector = ConversationConnector(
|
136 |
-
|
137 |
-
|
138 |
-
|
139 |
-
|
140 |
-
|
141 |
-
)
|
142 |
|
143 |
if item.invocation_id == 0:
|
144 |
# TODO: History Messages Merger
|
@@ -146,9 +158,10 @@ class ChatAPIApp:
|
|
146 |
else:
|
147 |
prompt = item.messages[-1]["content"]
|
148 |
|
149 |
-
return
|
150 |
-
connector.stream_chat(prompt=prompt, yield_output=True),
|
151 |
-
|
|
|
152 |
)
|
153 |
|
154 |
def setup_routes(self):
|
|
|
1 |
import uvicorn
|
2 |
|
3 |
from fastapi import FastAPI
|
|
|
4 |
from pydantic import BaseModel, Field
|
5 |
from conversations import (
|
6 |
ConversationConnector,
|
7 |
ConversationCreator,
|
8 |
ConversationSession,
|
9 |
)
|
10 |
+
from networks import OpenaiStreamOutputer
|
11 |
+
from sse_starlette.sse import EventSourceResponse
|
12 |
+
|
13 |
+
|
14 |
+
def mock_stream_chat(prompt):
|
15 |
+
outputer = OpenaiStreamOutputer()
|
16 |
+
for i in range(10):
|
17 |
+
output = outputer.output(content=f"MSG {i} ", content_type="Completions")
|
18 |
+
print(output)
|
19 |
+
yield output
|
20 |
+
output = outputer.output(content="", content_type="Finished")
|
21 |
+
print(output)
|
22 |
+
yield output
|
23 |
|
24 |
|
25 |
class ChatAPIApp:
|
|
|
143 |
description="(int) Invocation ID",
|
144 |
)
|
145 |
|
146 |
+
def chat_completions(self, item: ChatCompletionsPostItem):
|
147 |
+
# connector = ConversationConnector(
|
148 |
+
# conversation_style=item.model,
|
149 |
+
# sec_access_token=item.sec_access_token,
|
150 |
+
# client_id=item.client_id,
|
151 |
+
# conversation_id=item.conversation_id,
|
152 |
+
# invocation_id=item.invocation_id,
|
153 |
+
# )
|
154 |
|
155 |
if item.invocation_id == 0:
|
156 |
# TODO: History Messages Merger
|
|
|
158 |
else:
|
159 |
prompt = item.messages[-1]["content"]
|
160 |
|
161 |
+
return EventSourceResponse(
|
162 |
+
# connector.stream_chat(prompt=prompt, yield_output=True),
|
163 |
+
mock_stream_chat(prompt),
|
164 |
+
media_type="text/event-stream",
|
165 |
)
|
166 |
|
167 |
def setup_routes(self):
|
examples/chat_with_openai.py
CHANGED
@@ -22,17 +22,11 @@ response = client.chat.completions.create(
|
|
22 |
extra_body=extra_body,
|
23 |
)
|
24 |
|
25 |
-
print(response)
|
26 |
-
|
27 |
-
for chunk in response:
|
28 |
-
# print(chunk.choices[0].delta)
|
29 |
-
print("??")
|
30 |
-
|
31 |
-
# print(response.choices[0].message)
|
32 |
# print(response)
|
33 |
-
|
34 |
-
|
35 |
-
|
36 |
-
|
37 |
-
|
38 |
-
|
|
|
|
22 |
extra_body=extra_body,
|
23 |
)
|
24 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
25 |
# print(response)
|
26 |
+
for chunk in response:
|
27 |
+
if chunk.choices[0].delta.content is not None:
|
28 |
+
print(chunk.choices[0].delta.content, end="", flush=True)
|
29 |
+
elif chunk.choices[0].finish_reason == "stop":
|
30 |
+
print()
|
31 |
+
else:
|
32 |
+
print(chunk)
|
examples/chat_with_post.py
CHANGED
@@ -3,7 +3,7 @@ import httpx
|
|
3 |
import json
|
4 |
import re
|
5 |
|
6 |
-
|
7 |
chat_api = "http://localhost:22222"
|
8 |
api_key = "sk-xxxxx"
|
9 |
requests_headers = {}
|
|
|
3 |
import json
|
4 |
import re
|
5 |
|
6 |
+
# If runnning this service with proxy, you might need to unset `http(s)_proxy`.
|
7 |
chat_api = "http://localhost:22222"
|
8 |
api_key = "sk-xxxxx"
|
9 |
requests_headers = {}
|
networks/message_outputer.py
CHANGED
@@ -7,14 +7,21 @@ class OpenaiStreamOutputer:
|
|
7 |
* https://platform.openai.com/docs/api-reference/chat/create
|
8 |
"""
|
9 |
|
10 |
-
def data_to_string(self, data, content_type=""):
|
11 |
# return (json.dumps(data) + "\n").encode("utf-8")
|
12 |
-
data_str = f"
|
13 |
-
|
14 |
-
|
|
|
|
|
15 |
return data_str
|
16 |
|
17 |
-
def output(
|
|
|
|
|
|
|
|
|
|
|
18 |
data = {
|
19 |
"created": 1677825464,
|
20 |
"id": "chatcmpl-bing",
|
@@ -37,6 +44,8 @@ class OpenaiStreamOutputer:
|
|
37 |
"InternalSearchResult",
|
38 |
"SuggestedResponses",
|
39 |
]:
|
|
|
|
|
40 |
data["choices"] = [
|
41 |
{
|
42 |
"index": 0,
|
@@ -56,8 +65,8 @@ class OpenaiStreamOutputer:
|
|
56 |
data["choices"] = [
|
57 |
{
|
58 |
"index": 0,
|
59 |
-
"delta": {
|
60 |
"finish_reason": None,
|
61 |
}
|
62 |
]
|
63 |
-
return self.data_to_string(data, content_type)
|
|
|
7 |
* https://platform.openai.com/docs/api-reference/chat/create
|
8 |
"""
|
9 |
|
10 |
+
def data_to_string(self, data={}, content_type="", media_type=None):
|
11 |
# return (json.dumps(data) + "\n").encode("utf-8")
|
12 |
+
data_str = f"{json.dumps(data)}\n"
|
13 |
+
|
14 |
+
if media_type == "text/event-stream":
|
15 |
+
data_str = f"data: {data_str}"
|
16 |
+
|
17 |
return data_str
|
18 |
|
19 |
+
def output(
|
20 |
+
self,
|
21 |
+
content=None,
|
22 |
+
content_type=None,
|
23 |
+
media_type=None,
|
24 |
+
) -> bytes:
|
25 |
data = {
|
26 |
"created": 1677825464,
|
27 |
"id": "chatcmpl-bing",
|
|
|
44 |
"InternalSearchResult",
|
45 |
"SuggestedResponses",
|
46 |
]:
|
47 |
+
if content_type in ["InternalSearchQuery", "InternalSearchResult"]:
|
48 |
+
content += "\n"
|
49 |
data["choices"] = [
|
50 |
{
|
51 |
"index": 0,
|
|
|
65 |
data["choices"] = [
|
66 |
{
|
67 |
"index": 0,
|
68 |
+
"delta": {},
|
69 |
"finish_reason": None,
|
70 |
}
|
71 |
]
|
72 |
+
return self.data_to_string(data, content_type, media_type)
|
networks/message_parser.py
CHANGED
@@ -50,7 +50,7 @@ class MessageParser:
|
|
50 |
# Message: Search Query
|
51 |
elif message_type in ["InternalSearchQuery"]:
|
52 |
message_hidden_text = message["hiddenText"]
|
53 |
-
search_str = f"
|
54 |
logger.note(search_str)
|
55 |
if return_output:
|
56 |
return self.outputer.output(
|
@@ -58,7 +58,7 @@ class MessageParser:
|
|
58 |
)
|
59 |
# Message: Internal Search Results
|
60 |
elif message_type in ["InternalSearchResult"]:
|
61 |
-
analysis_str = f"
|
62 |
logger.note(analysis_str)
|
63 |
if return_output:
|
64 |
return self.outputer.output(
|
@@ -85,5 +85,4 @@ class MessageParser:
|
|
85 |
f"Not Supported Message Type: {message_type}"
|
86 |
)
|
87 |
|
88 |
-
|
89 |
-
return b""
|
|
|
50 |
# Message: Search Query
|
51 |
elif message_type in ["InternalSearchQuery"]:
|
52 |
message_hidden_text = message["hiddenText"]
|
53 |
+
search_str = f"[Searching: [{message_hidden_text}]]"
|
54 |
logger.note(search_str)
|
55 |
if return_output:
|
56 |
return self.outputer.output(
|
|
|
58 |
)
|
59 |
# Message: Internal Search Results
|
60 |
elif message_type in ["InternalSearchResult"]:
|
61 |
+
analysis_str = f"[Analyzing search results ...]"
|
62 |
logger.note(analysis_str)
|
63 |
if return_output:
|
64 |
return self.outputer.output(
|
|
|
85 |
f"Not Supported Message Type: {message_type}"
|
86 |
)
|
87 |
|
88 |
+
return self.outputer.output(content="", content_type="NotImplemented")
|
|