mobinln commited on
Commit
034b4c3
·
verified ·
1 Parent(s): 05b7877

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +92 -17
app.py CHANGED
@@ -2,20 +2,10 @@ import subprocess
2
  import gradio as gr
3
  from openai import OpenAI
4
  import json
5
- from agno.agent import Agent, RunResponse
6
- from agno.models.openai.like import OpenAILike
7
 
8
  subprocess.Popen("bash /home/user/app/start.sh", shell=True)
9
 
10
- agent = Agent(
11
- model=OpenAILike(
12
- id="model",
13
- api_key="no-token",
14
- base_url="http://0.0.0.0:8000/v1",
15
- ),
16
- reasoning=True
17
- )
18
-
19
 
20
  def handle_function_call(function_name, arguments):
21
  """Handle function calls from the model"""
@@ -40,6 +30,8 @@ def respond(
40
  message,
41
  history: list[tuple[str, str]] = [],
42
  system_message=None,
 
 
43
  ):
44
  messages = []
45
  if system_message:
@@ -53,15 +45,98 @@ def respond(
53
 
54
  messages.append({"role": "user", "content": message})
55
 
56
- output = ""
57
  try:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
58
  print("messages", messages)
59
- stream = agent.run(messages=messages, stream=True)
 
 
60
 
61
  for chunk in stream:
62
- print("chunk", chunk)
63
- output += chunk.content
64
- yield output
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
65
 
66
  except Exception as e:
67
  print(f"[Error] {e}")
@@ -71,4 +146,4 @@ def respond(
71
  demo = gr.ChatInterface(respond)
72
 
73
  if __name__ == "__main__":
74
- demo.launch(show_api=False)
 
2
  import gradio as gr
3
  from openai import OpenAI
4
  import json
 
 
5
 
6
  subprocess.Popen("bash /home/user/app/start.sh", shell=True)
7
 
8
+ client = OpenAI(base_url="http://0.0.0.0:8000/v1", api_key="sk-local", timeout=600)
 
 
 
 
 
 
 
 
9
 
10
  def handle_function_call(function_name, arguments):
11
  """Handle function calls from the model"""
 
30
  message,
31
  history: list[tuple[str, str]] = [],
32
  system_message=None,
33
+ max_tokens=None,
34
+ temperature=0.7,
35
  ):
36
  messages = []
37
  if system_message:
 
45
 
46
  messages.append({"role": "user", "content": message})
47
 
 
48
  try:
49
+ stream = client.chat.completions.create(
50
+ model="Deepseek-R1-0528-Qwen3-8B",
51
+ messages=messages,
52
+ max_tokens=max_tokens,
53
+ temperature=temperature,
54
+ stream=True,
55
+ tools=[
56
+ {
57
+ "type": "function",
58
+ "function": {
59
+ "name": "browser_search",
60
+ "description": (
61
+ "Search the web for a given query and return the most relevant results."
62
+ ),
63
+ "parameters": {
64
+ "type": "object",
65
+ "properties": {
66
+ "query": {
67
+ "type": "string",
68
+ "description": "The search query string.",
69
+ },
70
+ "max_results": {
71
+ "type": "integer",
72
+ "description": (
73
+ "Maximum number of search results to return. "
74
+ "If omitted the service will use its default."
75
+ ),
76
+ "default": 5,
77
+ },
78
+ },
79
+ "required": ["query"],
80
+ },
81
+ },
82
+ },
83
+ {
84
+ "type": "function",
85
+ "function": {
86
+ "name": "code_interpreter",
87
+ "description": (
88
+ "Execute Python code and return the results. "
89
+ "Can generate plots, perform calculations, and data analysis."
90
+ ),
91
+ "parameters": {
92
+ "type": "object",
93
+ "properties": {
94
+ "code": {
95
+ "type": "string",
96
+ "description": "The Python code to execute.",
97
+ },
98
+ },
99
+ "required": ["code"],
100
+ },
101
+ },
102
+ },
103
+ ],
104
+ )
105
+
106
  print("messages", messages)
107
+ output = ""
108
+ reasoning = ""
109
+ function_calls_to_handle = []
110
 
111
  for chunk in stream:
112
+ delta = chunk.choices[0].delta
113
+
114
+ if hasattr(delta, "tool_calls") and delta.tool_calls:
115
+ for tool_call in delta.tool_calls:
116
+ if tool_call.function:
117
+ function_calls_to_handle.append(
118
+ {
119
+ "name": tool_call.function.name,
120
+ "arguments": json.loads(tool_call.function.arguments),
121
+ }
122
+ )
123
+
124
+ if hasattr(delta, "reasoning_content") and delta.reasoning_content:
125
+ reasoning += delta.reasoning_content
126
+ elif delta.content:
127
+ output += delta.content
128
+
129
+ yield f"*{reasoning}*\n{output}"
130
+
131
+ if function_calls_to_handle:
132
+ for func_call in function_calls_to_handle:
133
+ func_result = handle_function_call(
134
+ func_call["name"], func_call["arguments"]
135
+ )
136
+ output += (
137
+ f"\n\n**Function Result ({func_call['name']}):**\n{func_result}"
138
+ )
139
+ yield output
140
 
141
  except Exception as e:
142
  print(f"[Error] {e}")
 
146
  demo = gr.ChatInterface(respond)
147
 
148
  if __name__ == "__main__":
149
+ demo.launch(show_api=False)