Niansuh commited on
Commit
e26a971
·
verified ·
1 Parent(s): b87572c

Update api/utils.py

Browse files
Files changed (1) hide show
  1. api/utils.py +21 -4
api/utils.py CHANGED
@@ -64,13 +64,22 @@ async def process_streaming_response(request: ChatRequest):
64
  agent_mode = AGENT_MODE.get(request.model, {})
65
  trending_agent_mode = TRENDING_AGENT_MODE.get(request.model, {})
66
 
 
 
 
 
 
 
 
 
 
67
  json_data = {
68
  "messages": [message_to_dict(msg) for msg in request.messages],
69
  "previewToken": None,
70
  "userId": None,
71
  "codeModelMode": True,
72
- "agentMode": agent_mode, # Populate agentMode
73
- "trendingAgentMode": trending_agent_mode, # Populate trendingAgentMode
74
  "isMicMode": False,
75
  "userSystemPrompt": None,
76
  "maxTokens": request.max_tokens,
@@ -125,13 +134,21 @@ async def process_non_streaming_response(request: ChatRequest):
125
  agent_mode = AGENT_MODE.get(request.model, {})
126
  trending_agent_mode = TRENDING_AGENT_MODE.get(request.model, {})
127
 
 
 
 
 
 
 
 
 
128
  json_data = {
129
  "messages": [message_to_dict(msg) for msg in request.messages],
130
  "previewToken": None,
131
  "userId": None,
132
  "codeModelMode": True,
133
- "agentMode": agent_mode, # Populate agentMode
134
- "trendingAgentMode": trending_agent_mode, # Populate trendingAgentMode
135
  "isMicMode": False,
136
  "userSystemPrompt": None,
137
  "maxTokens": request.max_tokens,
 
64
  agent_mode = AGENT_MODE.get(request.model, {})
65
  trending_agent_mode = TRENDING_AGENT_MODE.get(request.model, {})
66
 
67
+ # Log only necessary information
68
+ logger.info(
69
+ f"Received streaming request for model: '{request.model}', "
70
+ f"stream: {request.stream}, temperature: {request.temperature}, "
71
+ f"top_p: {request.top_p}, max_tokens: {request.max_tokens}, "
72
+ f"number_of_messages: {len(request.messages)}"
73
+ )
74
+ logger.info(f"Using agent mode: {agent_mode}, trending agent mode: {trending_agent_mode}")
75
+
76
  json_data = {
77
  "messages": [message_to_dict(msg) for msg in request.messages],
78
  "previewToken": None,
79
  "userId": None,
80
  "codeModelMode": True,
81
+ "agentMode": agent_mode, # Use agentMode
82
+ "trendingAgentMode": trending_agent_mode, # Use trendingAgentMode
83
  "isMicMode": False,
84
  "userSystemPrompt": None,
85
  "maxTokens": request.max_tokens,
 
134
  agent_mode = AGENT_MODE.get(request.model, {})
135
  trending_agent_mode = TRENDING_AGENT_MODE.get(request.model, {})
136
 
137
+ # Log only necessary information
138
+ logger.info(
139
+ f"Received non-streaming request for model: '{request.model}', "
140
+ f"max_tokens: {request.max_tokens}, temperature: {request.temperature}, "
141
+ f"top_p: {request.top_p}, number_of_messages: {len(request.messages)}"
142
+ )
143
+ logger.info(f"Using agent mode: {agent_mode}, trending agent mode: {trending_agent_mode}")
144
+
145
  json_data = {
146
  "messages": [message_to_dict(msg) for msg in request.messages],
147
  "previewToken": None,
148
  "userId": None,
149
  "codeModelMode": True,
150
+ "agentMode": agent_mode, # Use agentMode
151
+ "trendingAgentMode": trending_agent_mode, # Use trendingAgentMode
152
  "isMicMode": False,
153
  "userSystemPrompt": None,
154
  "maxTokens": request.max_tokens,