wop commited on
Commit
93d9be5
·
verified ·
1 Parent(s): 5d92c93

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -35
app.py CHANGED
@@ -71,19 +71,10 @@ for message in st.session_state.messages:
71
  with st.chat_message(message["role"], avatar=avatar):
72
  st.markdown(message["content"])
73
 
74
- def generate_chat_responses(chat_completion) -> Generator[str, None, None]:
75
- """Yield chat response content from the Groq API response."""
76
- for chunk in chat_completion:
77
- if chunk.choices:
78
- for choice in chunk.choices:
79
- if choice.delta.content:
80
- yield choice.delta.content
81
- if chunk.message.tool_calls:
82
- for tool_call in chunk.message.tool_calls:
83
- function_name = tool_call.function.name
84
- if function_name == "time_date":
85
- owner_info = get_tool_owner_info()
86
- yield owner_info
87
 
88
  def run_conversation(user_prompt):
89
  messages=[
@@ -160,33 +151,19 @@ def get_tool_owner_info():
160
  if prompt := st.chat_input("Enter your prompt here..."):
161
  st.session_state.messages.append({"role": "user", "content": prompt})
162
 
163
- with st.chat_message("user", avatar="🕺"):
164
  st.markdown(prompt)
165
 
166
- chat_responses_generator = None
167
-
168
  try:
169
- chat_completion = client.chat.completions.create(
170
- model=model_option,
171
- messages=[
172
- {"role": m["role"], "content": m["content"]}
173
- for m in st.session_state.messages
174
- ],
175
- max_tokens=max_tokens,
176
- stream=True,
177
- )
178
-
179
- chat_responses_generator = generate_chat_responses(chat_completion)
180
- full_response = st.write_stream(chat_responses_generator)
181
  except Exception as e:
182
- st.error(e, icon="🚨")
183
 
 
184
  if isinstance(full_response, str):
185
  st.session_state.messages.append(
186
  {"role": "assistant", "content": full_response}
187
- )
188
- elif chat_responses_generator:
189
- combined_response = "\n".join(str(item) for item in chat_responses_generator)
190
- st.session_state.messages.append(
191
- {"role": "assistant", "content": combined_response}
192
- )
 
71
  with st.chat_message(message["role"], avatar=avatar):
72
  st.markdown(message["content"])
73
 
74
+ def generate_chat_responses(user_prompt):
75
+ """Fetches response from the Groq API using the run_conversation function."""
76
+ response = run_conversation(user_prompt)
77
+ yield response # Yield the response content
 
 
 
 
 
 
 
 
 
78
 
79
  def run_conversation(user_prompt):
80
  messages=[
 
151
  if prompt := st.chat_input("Enter your prompt here..."):
152
  st.session_state.messages.append({"role": "user", "content": prompt})
153
 
154
+ with st.chat_message("user", avatar=""):
155
  st.markdown(prompt)
156
 
 
 
157
  try:
158
+ # Use generate_chat_responses with user prompt
159
+ with st.chat_message("assistant", avatar=""):
160
+ chat_responses_generator = generate_chat_responses(prompt)
161
+ full_response = st.write_stream(chat_responses_generator)
 
 
 
 
 
 
 
 
162
  except Exception as e:
163
+ st.error(e, icon="")
164
 
165
+ # Append the full response to session_state.messages
166
  if isinstance(full_response, str):
167
  st.session_state.messages.append(
168
  {"role": "assistant", "content": full_response}
169
+ )