VanguardAI commited on
Commit
d70fa2a
·
verified ·
1 Parent(s): 1061b7a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -15
app.py CHANGED
@@ -16,6 +16,7 @@ from tavily import TavilyClient
16
  import requests
17
  from huggingface_hub import hf_hub_download
18
  from safetensors.torch import load_file
 
19
 
20
  # Initialize models and clients
21
  MODEL = 'llama3-groq-70b-8192-tool-use-preview'
@@ -48,18 +49,9 @@ def play_voice_output(response):
48
  # NumPy Code Calculator Tool
49
  def numpy_code_calculator(query):
50
  try:
51
- llm_response = client.chat.completions.create(
52
- model=MODEL,
53
- messages=[
54
- {"role": "user", "content": f"Write NumPy code to: {query}"}
55
- ]
56
- )
57
- code = llm_response.choices[0].message.content
58
- print(f"Generated NumPy code:\n{code}")
59
-
60
- # Execute the code in a safe environment
61
  local_dict = {"np": np}
62
- exec(code, local_dict)
63
  result = local_dict.get("result", "No result found")
64
  return str(result)
65
  except Exception as e:
@@ -82,8 +74,6 @@ def image_generation(query):
82
  return "output.jpg"
83
 
84
  # Function to handle different input types and choose the right tool
85
- from llama_index.core.chat_engine.types import AgentChatResponse
86
-
87
  def handle_input(user_prompt, image=None, audio=None, websearch=False):
88
  if audio:
89
  if isinstance(audio, str):
@@ -115,7 +105,7 @@ def handle_input(user_prompt, image=None, audio=None, websearch=False):
115
 
116
  # Extract the content from AgentChatResponse to return as a string
117
  if isinstance(response, AgentChatResponse):
118
- response = response.final_response # Use 'final_response' to access the text response
119
 
120
  return response
121
 
@@ -188,4 +178,4 @@ def main_interface(user_prompt, image=None, audio=None, voice_only=False, websea
188
 
189
  # Launch the UI
190
  demo = create_ui()
191
- demo.launch()
 
16
  import requests
17
  from huggingface_hub import hf_hub_download
18
  from safetensors.torch import load_file
19
+ from llama_index.core.chat_engine.types import AgentChatResponse
20
 
21
  # Initialize models and clients
22
  MODEL = 'llama3-groq-70b-8192-tool-use-preview'
 
49
  # NumPy Code Calculator Tool
50
  def numpy_code_calculator(query):
51
  try:
52
+ # Assume query is a request for a numpy computation
 
 
 
 
 
 
 
 
 
53
  local_dict = {"np": np}
54
+ exec(query, local_dict)
55
  result = local_dict.get("result", "No result found")
56
  return str(result)
57
  except Exception as e:
 
74
  return "output.jpg"
75
 
76
  # Function to handle different input types and choose the right tool
 
 
77
  def handle_input(user_prompt, image=None, audio=None, websearch=False):
78
  if audio:
79
  if isinstance(audio, str):
 
105
 
106
  # Extract the content from AgentChatResponse to return as a string
107
  if isinstance(response, AgentChatResponse):
108
+ response = response.response
109
 
110
  return response
111
 
 
178
 
179
  # Launch the UI
180
  demo = create_ui()
181
+ demo.launch()