wop commited on
Commit
122d511
·
verified ·
1 Parent(s): 3eac704

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +46 -40
app.py CHANGED
@@ -104,46 +104,50 @@ def run_conversation(user_prompt):
104
  },
105
  }
106
  ]
107
- response = client.chat.completions.create(
108
- model=model_option,
109
- messages=messages,
110
- tools=tools,
111
- tool_choice="auto",
112
- max_tokens=4096
113
- )
 
114
 
115
- response_message = response.choices[0].delta
116
- tool_calls = response_message.tool_calls
117
 
118
- if tool_calls:
119
- available_functions = {
120
- "time_date": get_tool_owner_info
121
- }
122
 
123
- messages.append(response_message)
124
-
125
- for tool_call in tool_calls:
126
- function_name = tool_call.function.name
127
- function_to_call = available_functions[function_name]
128
- function_args = json.loads(tool_call.function.arguments)
129
- function_response = function_to_call(**function_args)
130
- messages.append(
131
- {
132
- "tool_call_id": tool_call.id,
133
- "role": "tool",
134
- "name": function_name,
135
- "content": function_response,
136
- }
137
- )
138
 
139
- second_response = client.chat.completions.create(
140
- model=model_option,
141
- messages=messages
142
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
143
 
144
- return second_response.choices[0].delta.content
145
- else:
146
- return response_message.content
 
 
 
147
 
148
  def get_tool_owner_info():
149
  owner_info = {
@@ -157,6 +161,9 @@ if prompt := st.chat_input("Enter your prompt here..."):
157
  with st.chat_message("user", avatar="🕺"):
158
  st.markdown(prompt)
159
 
 
 
 
160
  try:
161
  chat_completion = client.chat.completions.create(
162
  model=model_option,
@@ -168,9 +175,8 @@ if prompt := st.chat_input("Enter your prompt here..."):
168
  stream=True,
169
  )
170
 
171
- with st.chat_message("assistant", avatar="🤖"):
172
- chat_responses_generator = generate_chat_responses(chat_completion)
173
- full_response = st.write_stream(chat_responses_generator)
174
  except Exception as e:
175
  st.error(e, icon="🚨")
176
 
@@ -178,8 +184,8 @@ if prompt := st.chat_input("Enter your prompt here..."):
178
  st.session_state.messages.append(
179
  {"role": "assistant", "content": full_response}
180
  )
181
- else:
182
- combined_response = "\n".join(str(item) for item in full_response)
183
  st.session_state.messages.append(
184
  {"role": "assistant", "content": combined_response}
185
  )
 
104
  },
105
  }
106
  ]
107
+ try:
108
+ response = client.chat.completions.create(
109
+ model=model_option,
110
+ messages=messages,
111
+ tools=tools,
112
+ tool_choice="auto",
113
+ max_tokens=4096
114
+ )
115
 
116
+ response_message = response.choices[0].delta
117
+ tool_calls = response_message.tool_calls
118
 
119
+ if tool_calls:
120
+ available_functions = {
121
+ "time_date": get_tool_owner_info
122
+ }
123
 
124
+ messages.append(response_message)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
125
 
126
+ for tool_call in tool_calls:
127
+ function_name = tool_call.function.name
128
+ function_to_call = available_functions[function_name]
129
+ function_args = json.loads(tool_call.function.arguments)
130
+ function_response = function_to_call(**function_args)
131
+ messages.append(
132
+ {
133
+ "tool_call_id": tool_call.id,
134
+ "role": "tool",
135
+ "name": function_name,
136
+ "content": function_response,
137
+ }
138
+ )
139
+
140
+ second_response = client.chat.completions.create(
141
+ model=model_option,
142
+ messages=messages
143
+ )
144
 
145
+ return second_response.choices[0].delta.content
146
+ else:
147
+ return response_message.content
148
+ except Exception as e:
149
+ st.error(e, icon="🚨")
150
+ return None
151
 
152
  def get_tool_owner_info():
153
  owner_info = {
 
161
  with st.chat_message("user", avatar="🕺"):
162
  st.markdown(prompt)
163
 
164
+ chat_responses_generator = None
165
+ full_response = None
166
+
167
  try:
168
  chat_completion = client.chat.completions.create(
169
  model=model_option,
 
175
  stream=True,
176
  )
177
 
178
+ chat_responses_generator = generate_chat_responses(chat_completion)
179
+ full_response = st.write_stream(chat_responses_generator)
 
180
  except Exception as e:
181
  st.error(e, icon="🚨")
182
 
 
184
  st.session_state.messages.append(
185
  {"role": "assistant", "content": full_response}
186
  )
187
+ elif chat_responses_generator:
188
+ combined_response = "\n".join(str(item) for item in chat_responses_generator)
189
  st.session_state.messages.append(
190
  {"role": "assistant", "content": combined_response}
191
  )