acecalisto3 commited on
Commit
3ae40d2
·
verified ·
1 Parent(s): 6e48490

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +45 -9
app.py CHANGED
@@ -13,9 +13,9 @@ import json
13
  now = datetime.now()
14
  date_time_str = now.strftime("%Y-%m-%d %H:%M:%S")
15
 
16
- client = InferenceClient(
17
- "mistralai/Mixtral-8x7B-Instruct-v0.1"
18
- )
19
 
20
  # --- Set up logging ---
21
  logging.basicConfig(
@@ -275,8 +275,7 @@ def project_explorer(path):
275
  tree = get_file_tree(path)
276
  display_file_tree(tree)
277
 
278
- def chat_app_logic(message, history, purpose, agent_name, sys_prompt, temperature, max_new_tokens, top_p, repetition_penalty):
279
- # Your existing code here
280
 
281
  try:
282
  # Attempt to join the generator output
@@ -302,6 +301,41 @@ def chat_app_logic(message, history, purpose, agent_name, sys_prompt, temperatur
302
  elif isinstance(part, dict) and 'content' in part:
303
  response_parts.append(part['content']),
304
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
305
  response = ''.join(response_parts,
306
  # Run the model and get the response (convert generator to string)
307
  prompt=message,
@@ -319,6 +353,7 @@ def chat_app_logic(message, history, purpose, agent_name, sys_prompt, temperatur
319
  return history
320
 
321
  def main():
 
322
  with gr.Blocks() as demo:
323
  gr.Markdown("## FragMixt")
324
  gr.Markdown("### Agents w/ Agents")
@@ -346,15 +381,16 @@ def main():
346
  explore_button = gr.Button(value="Explore")
347
  project_output = gr.Textbox(label="File Tree", lines=20)
348
 
349
- # Chat App Logic Tab
350
  with gr.Tab("Chat App"):
351
  history = gr.State([])
352
  for example in examples:
353
- gr.Button(value=example[0]).click(lambda: chat_app_logic(example[0], history, purpose, agent_name, sys_prompt, temperature, max_new_tokens, top_p, repetition_penalty), outputs=chatbot)
354
 
355
  # Connect components to the chat app logic
356
- submit_button.click(chat_app_logic, inputs=[message, history, purpose, agent_name, sys_prompt, temperature, max_new_tokens, top_p, repetition_penalty], outputs=chatbot)
357
- message.submit(chat_app_logic, inputs=[message, history, purpose, agent_name, sys_prompt, temperature, max_new_tokens, top_p, repetition_penalty], outputs=chatbot)
 
358
 
359
  # Connect components to the project explorer
360
  explore_button.click(project_explorer, inputs=project_path, outputs=project_output)
 
13
  now = datetime.now()
14
  date_time_str = now.strftime("%Y-%m-%d %H:%M:%S")
15
 
16
+ # Define the model globally (or pass it as an argument to main)
17
+ model = "mistralai/Mixtral-8x7B-Instruct-v0.1"
18
+ client = InferenceClient(model)
19
 
20
  # --- Set up logging ---
21
  logging.basicConfig(
 
275
  tree = get_file_tree(path)
276
  display_file_tree(tree)
277
 
278
+ def chat_app_logic(message, history, purpose, agent_name, sys_prompt, temperature, max_new_tokens, top_p, repetition_penalty, model): # Add 'model' as an argument
 
279
 
280
  try:
281
  # Attempt to join the generator output
 
301
  elif isinstance(part, dict) and 'content' in part:
302
  response_parts.append(part['content']),
303
 
304
+ response = ''.join(response_parts,
305
+ # Run the model and get the response (convert generator to string)
306
+ prompt=message,
307
+ history=history,
308
+ agent_name=agent_name,
309
+ sys_prompt=sys_prompt,
310
+ temperature=temperature,
311
+ max_new_tokens=max_new_tokens,
312
+ top_p=top_p,
313
+ repetition_penalty=repetition_penalty,
314
+ )
315
+ try:
316
+ # Attempt to join the generator output
317
+ response = ''.join(generate(
318
+ model=model, # Now you can use 'model' here
319
+ messages=messages,
320
+ stream=True,
321
+ temperature=0.7,
322
+ max_tokens=1500
323
+ ))
324
+ except TypeError:
325
+ # If joining fails, collect the output in a list
326
+ response_parts = []
327
+ for part in generate(
328
+ model=model, # Now you can use 'model' here
329
+ messages=messages,
330
+ stream=True,
331
+ temperature=0.7,
332
+ max_tokens=1500
333
+ ):
334
+ if isinstance(part, str):
335
+ response_parts.append(part)
336
+ elif isinstance(part, dict) and 'content' in part:
337
+ response_parts.append(part['content']),
338
+
339
  response = ''.join(response_parts,
340
  # Run the model and get the response (convert generator to string)
341
  prompt=message,
 
353
  return history
354
 
355
  def main():
356
+
357
  with gr.Blocks() as demo:
358
  gr.Markdown("## FragMixt")
359
  gr.Markdown("### Agents w/ Agents")
 
381
  explore_button = gr.Button(value="Explore")
382
  project_output = gr.Textbox(label="File Tree", lines=20)
383
 
384
+ # Chat App Logic Tab
385
  with gr.Tab("Chat App"):
386
  history = gr.State([])
387
  for example in examples:
388
+ gr.Button(value=example[0]).click(lambda: chat_app_logic(example[0], history, purpose, agent_name, sys_prompt, temperature, max_new_tokens, top_p, repetition_penalty, model), outputs=chatbot)
389
 
390
  # Connect components to the chat app logic
391
+ submit_button.click(chat_app_logic, inputs=[message, history, purpose, agent_name, sys_prompt, temperature, max_new_tokens, top_p, repetition_penalty, model], outputs=chatbot) # Pass 'model'
392
+ message.submit(chat_app_logic, inputs=[message, history, purpose, agent_name, sys_prompt, temperature, max_new_tokens, top_p, repetition_penalty, model], outputs=chatbot) # Pass 'model'
393
+
394
 
395
  # Connect components to the project explorer
396
  explore_button.click(project_explorer, inputs=project_path, outputs=project_output)