DrishtiSharma commited on
Commit
8e15048
Β·
verified Β·
1 Parent(s): 91e5253

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +35 -57
app.py CHANGED
@@ -1,13 +1,11 @@
1
- #ref: https://github.com/kram254/Mixture-of-Agents-running-on-Groq/tree/main
2
  import streamlit as st
3
  import json
4
- import threading
5
- from typing import Iterable
6
  from moa.agent import MOAgent
7
  from moa.agent.moa import ResponseChunk
8
  from streamlit_ace import st_ace
9
- import asyncio
10
- from typing import Union, Iterable, AsyncIterable
11
  import copy
12
 
13
  # Default configuration
@@ -31,11 +29,9 @@ layer_agent_config_def = {
31
  "system_prompt": "You are an expert at logic and reasoning. Always take a logical approach to the answer. {helper_response}",
32
  "model_name": "llama3-8b-8192"
33
  },
34
-
35
  }
36
 
37
- # Recommended Configuration
38
-
39
  rec_config = {
40
  "main_model": "llama3-70b-8192",
41
  "cycles": 2,
@@ -65,8 +61,8 @@ layer_agent_config_rec = {
65
  },
66
  }
67
 
68
-
69
- async def async_stream_response(messages: Union[Iterable[ResponseChunk], AsyncIterable[ResponseChunk]]):
70
  layer_outputs = {}
71
 
72
  async def process_message(message):
@@ -82,7 +78,7 @@ async def async_stream_response(messages: Union[Iterable[ResponseChunk], AsyncIt
82
  for i, output in enumerate(outputs):
83
  with cols[i]:
84
  st.expander(label=f"Agent {i+1}", expanded=False).write(output)
85
-
86
  layer_outputs.clear()
87
  yield message['delta']
88
 
@@ -95,6 +91,7 @@ async def async_stream_response(messages: Union[Iterable[ResponseChunk], AsyncIt
95
  for message in messages:
96
  await process_message(message)
97
 
 
98
  def set_moa_agent(
99
  main_model: str = default_config['main_model'],
100
  cycles: int = default_config['cycles'],
@@ -104,26 +101,18 @@ def set_moa_agent(
104
  ):
105
  if override or ("main_model" not in st.session_state):
106
  st.session_state.main_model = main_model
107
- else:
108
- if "main_model" not in st.session_state: st.session_state.main_model = main_model
109
 
110
  if override or ("cycles" not in st.session_state):
111
  st.session_state.cycles = cycles
112
- else:
113
- if "cycles" not in st.session_state: st.session_state.cycles = cycles
114
 
115
  if override or ("layer_agent_config" not in st.session_state):
116
  st.session_state.layer_agent_config = layer_agent_config
117
- else:
118
- if "layer_agent_config" not in st.session_state: st.session_state.layer_agent_config = layer_agent_config
119
 
120
  if override or ("main_temp" not in st.session_state):
121
  st.session_state.main_temp = main_model_temperature
122
- else:
123
- if "main_temp" not in st.session_state: st.session_state.main_temp = main_model_temperature
124
 
125
  cls_ly_conf = copy.deepcopy(st.session_state.layer_agent_config)
126
-
127
  if override or ("moa_agent" not in st.session_state):
128
  st.session_state.moa_agent = MOAgent.from_config(
129
  main_model=st.session_state.main_model,
@@ -135,10 +124,11 @@ def set_moa_agent(
135
  del cls_ly_conf
136
  del layer_agent_config
137
 
 
138
  st.set_page_config(
139
  page_title="Karios Agents Powered by Groq",
140
  page_icon='static/favicon.ico',
141
- menu_items={
142
  'About': "## Groq Mixture-Of-Agents \n Powered by [Groq](https://groq.com)"
143
  },
144
  layout="wide"
@@ -154,8 +144,6 @@ valid_model_names = [
154
  st.markdown("<a href='https://groq.com'><img src='app/static/banner.png' width='500'></a>", unsafe_allow_html=True)
155
  st.write("---")
156
 
157
-
158
-
159
  # Initialize session state
160
  if "messages" not in st.session_state:
161
  st.session_state.messages = []
@@ -164,7 +152,6 @@ set_moa_agent()
164
 
165
  # Sidebar for configuration
166
  with st.sidebar:
167
- # config_form = st.form("Agent Configuration", border=False)
168
  st.title("MOA Configuration")
169
  with st.form("Agent Configuration", border=False):
170
  if st.form_submit_button("Use Recommended Config"):
@@ -177,10 +164,9 @@ with st.sidebar:
177
  )
178
  st.session_state.messages = []
179
  st.success("Configuration updated successfully!")
180
- except json.JSONDecodeError:
181
- st.error("Invalid JSON in Layer Agent Configuration. Please check your input.")
182
  except Exception as e:
183
  st.error(f"Error updating configuration: {str(e)}")
 
184
  # Main model selection
185
  new_main_model = st.selectbox(
186
  "Select Main Model",
@@ -206,8 +192,6 @@ with st.sidebar:
206
  )
207
 
208
  # Layer agent configuration
209
- tooltip = "Agents in the layer agent configuration run in parallel _per cycle_. Each layer agent supports all initialization parameters of [Langchain's ChatGroq](https://api.python.langchain.com/en/latest/chat_models/langchain_groq.chat_models.ChatGroq.html) class as valid dictionary fields."
210
- st.markdown("Layer Agent Config", help=tooltip)
211
  new_layer_agent_config = st_ace(
212
  value=json.dumps(st.session_state.layer_agent_config, indent=2),
213
  language='json',
@@ -229,31 +213,20 @@ with st.sidebar:
229
  )
230
  st.session_state.messages = []
231
  st.success("Configuration updated successfully!")
232
- except json.JSONDecodeError:
233
- st.error("Invalid JSON in Layer Agent Configuration. Please check your input.")
234
  except Exception as e:
235
  st.error(f"Error updating configuration: {str(e)}")
236
 
237
- st.markdown("---")
238
- st.markdown("""
239
- ### Credits
240
- - MOA: [Together AI](https://www.together.ai/blog/together-moa)
241
- - LLMs: [Groq](https://groq.com/)
242
- - Paper: [arXiv:2406.04692](https://arxiv.org/abs/2406.04692)
243
- """)
244
-
245
  # Main app layout
246
- st.header("Karios Agents", anchor=False)
247
- st.write("A this project oversees implementation of Mixture of Agents architecture Powered by Groq LLMs.")
248
- # st.image("./static/moa_groq.svg", caption="Mixture of Agents Workflow", width=1000)
249
 
250
  # Display current configuration
251
  with st.expander("Current MOA Configuration", expanded=False):
252
- st.markdown(f"**Main Model**: ``{st.session_state.main_model}``")
253
- st.markdown(f"**Main Model Temperature**: ``{st.session_state.main_temp:.1f}``")
254
- st.markdown(f"**Layers**: ``{st.session_state.cycles}``")
255
- st.markdown(f"**Layer Agents Config**:")
256
- new_layer_agent_config = st_ace(
257
  value=json.dumps(st.session_state.layer_agent_config, indent=2),
258
  language='json',
259
  placeholder="Layer Agent Configuration (JSON)",
@@ -269,14 +242,19 @@ for message in st.session_state.messages:
269
  st.markdown(message["content"])
270
 
271
  if query := st.chat_input("Ask a question"):
272
- st.session_state.messages.append({"role": "user", "content": query})
273
- with st.chat_message("user"):
274
- st.write(query)
275
-
276
- moa_agent: MOAgent = st.session_state.moa_agent
277
- with st.chat_message("assistant"):
278
- message_placeholder = st.empty()
279
- ast_mess = stream_response(moa_agent.chat(query, output_format='json'))
280
- response = st.write_stream(ast_mess)
281
-
282
- st.session_state.messages.append({"role": "assistant", "content": response})
 
 
 
 
 
 
1
+ # ref: https://github.com/kram254/Mixture-of-Agents-running-on-Groq/tree/main
2
  import streamlit as st
3
  import json
4
+ import asyncio
5
+ from typing import Union, Iterable, AsyncIterable
6
  from moa.agent import MOAgent
7
  from moa.agent.moa import ResponseChunk
8
  from streamlit_ace import st_ace
 
 
9
  import copy
10
 
11
  # Default configuration
 
29
  "system_prompt": "You are an expert at logic and reasoning. Always take a logical approach to the answer. {helper_response}",
30
  "model_name": "llama3-8b-8192"
31
  },
 
32
  }
33
 
34
+ # Recommended configuration
 
35
  rec_config = {
36
  "main_model": "llama3-70b-8192",
37
  "cycles": 2,
 
61
  },
62
  }
63
 
64
+ # Unified streaming function to handle async and sync responses
65
+ async def stream_or_async_response(messages: Union[Iterable[ResponseChunk], AsyncIterable[ResponseChunk]]):
66
  layer_outputs = {}
67
 
68
  async def process_message(message):
 
78
  for i, output in enumerate(outputs):
79
  with cols[i]:
80
  st.expander(label=f"Agent {i+1}", expanded=False).write(output)
81
+
82
  layer_outputs.clear()
83
  yield message['delta']
84
 
 
91
  for message in messages:
92
  await process_message(message)
93
 
94
+ # Set up the MOAgent
95
  def set_moa_agent(
96
  main_model: str = default_config['main_model'],
97
  cycles: int = default_config['cycles'],
 
101
  ):
102
  if override or ("main_model" not in st.session_state):
103
  st.session_state.main_model = main_model
 
 
104
 
105
  if override or ("cycles" not in st.session_state):
106
  st.session_state.cycles = cycles
 
 
107
 
108
  if override or ("layer_agent_config" not in st.session_state):
109
  st.session_state.layer_agent_config = layer_agent_config
 
 
110
 
111
  if override or ("main_temp" not in st.session_state):
112
  st.session_state.main_temp = main_model_temperature
 
 
113
 
114
  cls_ly_conf = copy.deepcopy(st.session_state.layer_agent_config)
115
+
116
  if override or ("moa_agent" not in st.session_state):
117
  st.session_state.moa_agent = MOAgent.from_config(
118
  main_model=st.session_state.main_model,
 
124
  del cls_ly_conf
125
  del layer_agent_config
126
 
127
+ # Streamlit app layout
128
  st.set_page_config(
129
  page_title="Karios Agents Powered by Groq",
130
  page_icon='static/favicon.ico',
131
+ menu_items={
132
  'About': "## Groq Mixture-Of-Agents \n Powered by [Groq](https://groq.com)"
133
  },
134
  layout="wide"
 
144
  st.markdown("<a href='https://groq.com'><img src='app/static/banner.png' width='500'></a>", unsafe_allow_html=True)
145
  st.write("---")
146
 
 
 
147
  # Initialize session state
148
  if "messages" not in st.session_state:
149
  st.session_state.messages = []
 
152
 
153
  # Sidebar for configuration
154
  with st.sidebar:
 
155
  st.title("MOA Configuration")
156
  with st.form("Agent Configuration", border=False):
157
  if st.form_submit_button("Use Recommended Config"):
 
164
  )
165
  st.session_state.messages = []
166
  st.success("Configuration updated successfully!")
 
 
167
  except Exception as e:
168
  st.error(f"Error updating configuration: {str(e)}")
169
+
170
  # Main model selection
171
  new_main_model = st.selectbox(
172
  "Select Main Model",
 
192
  )
193
 
194
  # Layer agent configuration
 
 
195
  new_layer_agent_config = st_ace(
196
  value=json.dumps(st.session_state.layer_agent_config, indent=2),
197
  language='json',
 
213
  )
214
  st.session_state.messages = []
215
  st.success("Configuration updated successfully!")
 
 
216
  except Exception as e:
217
  st.error(f"Error updating configuration: {str(e)}")
218
 
 
 
 
 
 
 
 
 
219
  # Main app layout
220
+ st.header("Karios Agents")
221
+ st.write("This project oversees implementation of Mixture of Agents architecture powered by Groq LLMs.")
 
222
 
223
  # Display current configuration
224
  with st.expander("Current MOA Configuration", expanded=False):
225
+ st.markdown(f"**Main Model**: `{st.session_state.main_model}`")
226
+ st.markdown(f"**Main Model Temperature**: `{st.session_state.main_temp:.1f}`")
227
+ st.markdown(f"**Layers**: `{st.session_state.cycles}`")
228
+ st.markdown("**Layer Agents Config:**")
229
+ st_ace(
230
  value=json.dumps(st.session_state.layer_agent_config, indent=2),
231
  language='json',
232
  placeholder="Layer Agent Configuration (JSON)",
 
242
  st.markdown(message["content"])
243
 
244
  if query := st.chat_input("Ask a question"):
245
+ async def handle_query():
246
+ st.session_state.messages.append({"role": "user", "content": query})
247
+ with st.chat_message("user"):
248
+ st.write(query)
249
+
250
+ moa_agent: MOAgent = st.session_state.moa_agent
251
+
252
+ with st.chat_message("assistant"):
253
+ message_placeholder = st.empty()
254
+ messages = moa_agent.chat(query, output_format='json')
255
+ async for response in stream_or_async_response(messages):
256
+ message_placeholder.markdown(response)
257
+
258
+ st.session_state.messages.append({"role": "assistant", "content": response})
259
+
260
+ asyncio.run(handle_query())