Update api/utils.py
Browse files- api/utils.py +17 -2
api/utils.py
CHANGED
@@ -80,7 +80,15 @@ def strip_model_prefix(content: str, model_prefix: Optional[str] = None) -> str:
|
|
80 |
return content[len(model_prefix):].strip()
|
81 |
return content
|
82 |
|
83 |
-
#
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
84 |
async def process_streaming_response(request: ChatRequest):
|
85 |
# Generate a unique ID for this request
|
86 |
request_id = f"chatcmpl-{uuid.uuid4()}"
|
@@ -161,6 +169,10 @@ async def process_streaming_response(request: ChatRequest):
|
|
161 |
content = content.replace(BLOCKED_MESSAGE, '').strip()
|
162 |
if not content:
|
163 |
continue # Skip if content is empty after removal
|
|
|
|
|
|
|
|
|
164 |
cleaned_content = strip_model_prefix(content, model_prefix)
|
165 |
yield f"data: {json.dumps(create_chat_completion_data(cleaned_content, request.model, timestamp))}\n\n"
|
166 |
|
@@ -175,7 +187,7 @@ async def process_streaming_response(request: ChatRequest):
|
|
175 |
)
|
176 |
raise HTTPException(status_code=500, detail=str(e))
|
177 |
|
178 |
-
#
|
179 |
async def process_non_streaming_response(request: ChatRequest):
|
180 |
# Generate a unique ID for this request
|
181 |
request_id = f"chatcmpl-{uuid.uuid4()}"
|
@@ -272,6 +284,9 @@ async def process_non_streaming_response(request: ChatRequest):
|
|
272 |
status_code=500, detail="Blocked message detected in response."
|
273 |
)
|
274 |
|
|
|
|
|
|
|
275 |
cleaned_full_response = strip_model_prefix(full_response, model_prefix)
|
276 |
|
277 |
return {
|
|
|
80 |
return content[len(model_prefix):].strip()
|
81 |
return content
|
82 |
|
83 |
+
# Helper function to remove message between special tags
|
84 |
+
def remove_message_between_special_tags(content: str) -> str:
|
85 |
+
"""Remove any message that starts with '$~~~$' and ends with '$~~~$'."""
|
86 |
+
if content.startswith('$~~~$') and content.endswith('$~~~$'):
|
87 |
+
# Remove the entire message between the tags
|
88 |
+
return ""
|
89 |
+
return content
|
90 |
+
|
91 |
+
# Modify the streaming response processing
|
92 |
async def process_streaming_response(request: ChatRequest):
|
93 |
# Generate a unique ID for this request
|
94 |
request_id = f"chatcmpl-{uuid.uuid4()}"
|
|
|
169 |
content = content.replace(BLOCKED_MESSAGE, '').strip()
|
170 |
if not content:
|
171 |
continue # Skip if content is empty after removal
|
172 |
+
|
173 |
+
# Remove content between special tags
|
174 |
+
content = remove_message_between_special_tags(content)
|
175 |
+
|
176 |
cleaned_content = strip_model_prefix(content, model_prefix)
|
177 |
yield f"data: {json.dumps(create_chat_completion_data(cleaned_content, request.model, timestamp))}\n\n"
|
178 |
|
|
|
187 |
)
|
188 |
raise HTTPException(status_code=500, detail=str(e))
|
189 |
|
190 |
+
# Modify the non-streaming response processing
|
191 |
async def process_non_streaming_response(request: ChatRequest):
|
192 |
# Generate a unique ID for this request
|
193 |
request_id = f"chatcmpl-{uuid.uuid4()}"
|
|
|
284 |
status_code=500, detail="Blocked message detected in response."
|
285 |
)
|
286 |
|
287 |
+
# Remove content between special tags
|
288 |
+
full_response = remove_message_between_special_tags(full_response)
|
289 |
+
|
290 |
cleaned_full_response = strip_model_prefix(full_response, model_prefix)
|
291 |
|
292 |
return {
|