asthaa30 commited on
Commit
213ac08
·
verified ·
1 Parent(s): 1d21262

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +14 -117
app.py CHANGED
@@ -78,7 +78,7 @@ def get_model_response(messages, inner_messages, message, system_message):
78
  messages_for_model.extend(inner_messages)
79
 
80
  try:
81
- return client.chat.completions.create(
82
  model=MODEL,
83
  messages=messages_for_model,
84
  tools=tools,
@@ -86,6 +86,7 @@ def get_model_response(messages, inner_messages, message, system_message):
86
  top_p=0.65,
87
  max_tokens=4096,
88
  )
 
89
  except Exception as e:
90
  print(f"An error occurred while getting model response: {str(e)}")
91
  print(messages_for_model)
@@ -93,7 +94,6 @@ def get_model_response(messages, inner_messages, message, system_message):
93
 
94
  def respond(message, history, system_message):
95
  inner_history = []
96
-
97
  available_functions = {
98
  "legal_tool_function": legal_tool_function,
99
  }
@@ -102,13 +102,14 @@ def respond(message, history, system_message):
102
  assistant_native_message_list = []
103
 
104
  while True:
105
- response_message = (
106
- get_model_response(history, inner_history, message, system_message)
107
- .choices[0]
108
- .message
109
- )
110
 
111
  if not response_message.tool_calls and response_message.content is not None:
 
 
112
  break
113
 
114
  if response_message.tool_calls is not None:
@@ -132,7 +133,8 @@ def respond(message, history, system_message):
132
  "metadata": {"native_messages": assistant_native_message_list},
133
  }
134
 
135
- yield assistant_message
 
136
 
137
  for tool_call in response_message.tool_calls:
138
  function_response = call_function(tool_call, available_functions)
@@ -161,19 +163,10 @@ def respond(message, history, system_message):
161
  "content": assistant_content,
162
  "metadata": {"native_messages": assistant_native_message_list},
163
  }
164
- yield tool_message
165
  inner_history.append(native_tool_message)
166
 
167
- assistant_content += response_message.content
168
- assistant_native_message_list.append(response_message)
169
-
170
- final_message = {
171
- "role": "assistant",
172
- "content": assistant_content,
173
- "metadata": {"native_messages": assistant_native_message_list},
174
- }
175
-
176
- yield final_message
177
 
178
  # Update the system prompt to be more relevant to maritime legal assistance
179
  system_prompt = "You are a maritime legal assistant with expertise in maritime law. Provide detailed legal advice and information based on maritime legal principles and regulations."
@@ -185,106 +178,10 @@ with gr.Blocks() as demo:
185
  message_input = gr.Textbox(label="Message")
186
 
187
  def process_message(message, history, system_message):
188
- response_gen = respond(message, history, system_message)
189
- return list(response_gen), history
190
 
191
  message_input.submit(process_message, [message_input, chatbot, system_message_input], [chatbot, chatbot])
192
 
193
- if __name__ == "__main__":
194
- demo.launch()
195
-
196
- assistant_content = ""
197
- assistant_native_message_list = []
198
-
199
- while True:
200
- response_message = (
201
- get_model_response(history, inner_history, message, system_message)
202
- .choices[0]
203
- .message
204
- )
205
-
206
- if not response_message.tool_calls and response_message.content is not None:
207
- break
208
-
209
- if response_message.tool_calls is not None:
210
- assistant_native_message_list.append(response_message)
211
- inner_history.append(response_message)
212
-
213
- assistant_content += (
214
- "```json\n"
215
- + json.dumps(
216
- [
217
- tool_call.model_dump()
218
- for tool_call in response_message.tool_calls
219
- ],
220
- indent=2,
221
- )
222
- + "\n```\n"
223
- )
224
- assistant_message = {
225
- "role": "assistant",
226
- "content": assistant_content,
227
- "metadata": {"native_messages": assistant_native_message_list},
228
- }
229
-
230
- yield assistant_message
231
-
232
- for tool_call in response_message.tool_calls:
233
- function_response = call_function(tool_call, available_functions)
234
- assistant_content += (
235
- "```json\n"
236
- + json.dumps(
237
- {
238
- "name": tool_call.function.name,
239
- "arguments": json.loads(tool_call.function.arguments),
240
- "response": json.loads(function_response["content"]),
241
- },
242
- indent=2,
243
- )
244
- + "\n```\n"
245
- )
246
- native_tool_message = {
247
- "tool_call_id": tool_call.id,
248
- "role": "tool",
249
- "content": function_response["content"],
250
- }
251
- assistant_native_message_list.append(
252
- native_tool_message
253
- )
254
- tool_message = {
255
- "role": "assistant",
256
- "content": assistant_content,
257
- "metadata": {"native_messages": assistant_native_message_list},
258
- }
259
- yield tool_message
260
- inner_history.append(native_tool_message)
261
-
262
- assistant_content += response_message.content
263
- assistant_native_message_list.append(response_message)
264
-
265
- final_message = {
266
- "role": "assistant",
267
- "content": assistant_content,
268
- "metadata": {"native_messages": assistant_native_message_list},
269
- }
270
-
271
- yield final_message
272
-
273
- # Update the system prompt to be more relevant to maritime legal assistance
274
- system_prompt = "You are a maritime legal assistant with expertise in maritime law. Provide detailed legal advice and information based on maritime legal principles and regulations."
275
-
276
- demo = gr.ChatInterface(
277
- respond,
278
- additional_inputs=[
279
- gr.Textbox(
280
- value=system_prompt,
281
- label="System message",
282
- ),
283
- ],
284
- type="messages",
285
- title="Maritime Legal Assistant Chat",
286
- description="This chatbot uses the fine-tuned maritime legal model to provide legal assistance and information related to maritime law.",
287
- )
288
-
289
  if __name__ == "__main__":
290
  demo.launch()
 
78
  messages_for_model.extend(inner_messages)
79
 
80
  try:
81
+ response = client.chat.completions.create(
82
  model=MODEL,
83
  messages=messages_for_model,
84
  tools=tools,
 
86
  top_p=0.65,
87
  max_tokens=4096,
88
  )
89
+ return response.choices[0].message
90
  except Exception as e:
91
  print(f"An error occurred while getting model response: {str(e)}")
92
  print(messages_for_model)
 
94
 
95
  def respond(message, history, system_message):
96
  inner_history = []
 
97
  available_functions = {
98
  "legal_tool_function": legal_tool_function,
99
  }
 
102
  assistant_native_message_list = []
103
 
104
  while True:
105
+ response_message = get_model_response(history, inner_history, message, system_message)
106
+
107
+ if response_message is None:
108
+ return assistant_content, history
 
109
 
110
  if not response_message.tool_calls and response_message.content is not None:
111
+ assistant_content += response_message.content
112
+ assistant_native_message_list.append(response_message)
113
  break
114
 
115
  if response_message.tool_calls is not None:
 
133
  "metadata": {"native_messages": assistant_native_message_list},
134
  }
135
 
136
+ # Collect responses
137
+ response_list = [assistant_message]
138
 
139
  for tool_call in response_message.tool_calls:
140
  function_response = call_function(tool_call, available_functions)
 
163
  "content": assistant_content,
164
  "metadata": {"native_messages": assistant_native_message_list},
165
  }
166
+ response_list.append(tool_message)
167
  inner_history.append(native_tool_message)
168
 
169
+ return response_list, inner_history
 
 
 
 
 
 
 
 
 
170
 
171
  # Update the system prompt to be more relevant to maritime legal assistance
172
  system_prompt = "You are a maritime legal assistant with expertise in maritime law. Provide detailed legal advice and information based on maritime legal principles and regulations."
 
178
  message_input = gr.Textbox(label="Message")
179
 
180
  def process_message(message, history, system_message):
181
+ responses, updated_history = respond(message, history, system_message)
182
+ return responses, updated_history
183
 
184
  message_input.submit(process_message, [message_input, chatbot, system_message_input], [chatbot, chatbot])
185
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
186
  if __name__ == "__main__":
187
  demo.launch()