DrishtiSharma commited on
Commit
eb28792
Β·
verified Β·
1 Parent(s): 528b1be

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +18 -7
app.py CHANGED
@@ -63,35 +63,43 @@ layer_agent_config_rec = {
63
 
64
  # Unified streaming function to handle async and sync responses
65
  async def stream_or_async_response(messages: Union[Iterable[ResponseChunk], AsyncIterable[ResponseChunk]]):
 
 
 
 
 
 
66
  layer_outputs = {}
67
 
68
  async def process_message(message):
 
69
  if message['response_type'] == 'intermediate':
70
  layer = message['metadata']['layer']
71
  if layer not in layer_outputs:
72
  layer_outputs[layer] = []
73
  layer_outputs[layer].append(message['delta'])
74
  else:
 
75
  for layer, outputs in layer_outputs.items():
76
  st.write(f"Layer {layer}")
77
  cols = st.columns(len(outputs))
78
  for i, output in enumerate(outputs):
79
  with cols[i]:
80
  st.expander(label=f"Agent {i + 1}", expanded=False).write(output)
81
-
82
  layer_outputs.clear()
83
- yield message['delta']
84
 
85
- # Distinguish between async and sync iterables
86
  if hasattr(messages, "__aiter__"): # Asynchronous iterable
87
  async for message in messages:
88
  yield await process_message(message)
89
  elif hasattr(messages, "__iter__"): # Synchronous iterable
90
  for message in messages:
91
- yield await process_message(message)
92
  else:
93
  raise TypeError("'messages' must be an Iterable or AsyncIterable.")
94
 
 
95
  # Set up the MOAgent
96
  def set_moa_agent(
97
  main_model: str = default_config['main_model'],
@@ -258,18 +266,21 @@ if query := st.chat_input("Ask a question"):
258
  messages = moa_agent.chat(query, output_format='json')
259
 
260
  try:
261
- # Process each response chunk
 
262
  async for response in stream_or_async_response(messages):
263
- message_placeholder.markdown(response)
 
264
 
265
  # Save the final response to session state
266
- st.session_state.messages.append({"role": "assistant", "content": response})
267
  except Exception as e:
268
  st.error(f"Error processing response: {e}")
269
 
270
  # Run the asynchronous handle_query function
271
  asyncio.run(handle_query())
272
 
 
273
  # Add acknowledgment at the bottom
274
  st.markdown("---")
275
  st.markdown("""
 
63
 
64
  # Unified streaming function to handle async and sync responses
65
  async def stream_or_async_response(messages: Union[Iterable[ResponseChunk], AsyncIterable[ResponseChunk]]):
66
+ """
67
+ Handles both synchronous and asynchronous message streams.
68
+ Processes each response chunk and manages intermediate outputs.
69
+
70
+ :param messages: Union[Iterable, AsyncIterable]
71
+ """
72
  layer_outputs = {}
73
 
74
  async def process_message(message):
75
+ # Store intermediate messages in `layer_outputs`
76
  if message['response_type'] == 'intermediate':
77
  layer = message['metadata']['layer']
78
  if layer not in layer_outputs:
79
  layer_outputs[layer] = []
80
  layer_outputs[layer].append(message['delta'])
81
  else:
82
+ # Final message processing
83
  for layer, outputs in layer_outputs.items():
84
  st.write(f"Layer {layer}")
85
  cols = st.columns(len(outputs))
86
  for i, output in enumerate(outputs):
87
  with cols[i]:
88
  st.expander(label=f"Agent {i + 1}", expanded=False).write(output)
 
89
  layer_outputs.clear()
90
+ return message['delta']
91
 
92
+ # Determine if messages is async or sync and iterate accordingly
93
  if hasattr(messages, "__aiter__"): # Asynchronous iterable
94
  async for message in messages:
95
  yield await process_message(message)
96
  elif hasattr(messages, "__iter__"): # Synchronous iterable
97
  for message in messages:
98
+ yield process_message(message) # Do not `await` sync messages
99
  else:
100
  raise TypeError("'messages' must be an Iterable or AsyncIterable.")
101
 
102
+
103
  # Set up the MOAgent
104
  def set_moa_agent(
105
  main_model: str = default_config['main_model'],
 
266
  messages = moa_agent.chat(query, output_format='json')
267
 
268
  try:
269
+ # Stream and display responses from `stream_or_async_response`
270
+ final_response = ""
271
  async for response in stream_or_async_response(messages):
272
+ final_response += response # Accumulate full response
273
+ message_placeholder.markdown(final_response)
274
 
275
  # Save the final response to session state
276
+ st.session_state.messages.append({"role": "assistant", "content": final_response})
277
  except Exception as e:
278
  st.error(f"Error processing response: {e}")
279
 
280
  # Run the asynchronous handle_query function
281
  asyncio.run(handle_query())
282
 
283
+
284
  # Add acknowledgment at the bottom
285
  st.markdown("---")
286
  st.markdown("""