Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,4 +1,5 @@
|
|
1 |
from smolagents import CodeAgent,DuckDuckGoSearchTool, HfApiModel,load_tool,tool
|
|
|
2 |
import datetime
|
3 |
import requests
|
4 |
import pytz
|
@@ -8,15 +9,33 @@ from tools.final_answer import FinalAnswerTool
|
|
8 |
from Gradio_UI import GradioUI
|
9 |
|
10 |
# Below is an example of a tool that does nothing. Amaze us with your creativity !
|
|
|
|
|
|
|
11 |
@tool
|
12 |
-
def
|
13 |
#Keep this format for the description / args / args description but feel free to modify the tool
|
14 |
-
"""A tool that
|
15 |
Args:
|
16 |
-
|
17 |
-
arg2: the second argument
|
18 |
"""
|
19 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
20 |
|
21 |
@tool
|
22 |
def get_current_time_in_timezone(timezone: str) -> str:
|
@@ -55,7 +74,7 @@ with open("prompts.yaml", 'r') as stream:
|
|
55 |
|
56 |
agent = CodeAgent(
|
57 |
model=model,
|
58 |
-
tools=[final_answer], ## add your tools here (don't remove final answer)
|
59 |
max_steps=6,
|
60 |
verbosity_level=1,
|
61 |
grammar=None,
|
|
|
1 |
from smolagents import CodeAgent,DuckDuckGoSearchTool, HfApiModel,load_tool,tool
|
2 |
+
from huggingface_hub import InferenceClient
|
3 |
import datetime
|
4 |
import requests
|
5 |
import pytz
|
|
|
9 |
from Gradio_UI import GradioUI
|
10 |
|
11 |
# Below is an example of a tool that does nothing. Amaze us with your creativity !
|
12 |
+
|
13 |
+
llm = InferenceClient(model_id='microsoft/phi-4')
|
14 |
+
|
15 |
@tool
|
16 |
+
def write_image_spec(pointers: str)-> str: #it's import to specify the return type
|
17 |
#Keep this format for the description / args / args description but feel free to modify the tool
|
18 |
+
"""A tool that writes an image description for a generative model.
|
19 |
Args:
|
20 |
+
pointers: the pointers for generating the description
|
|
|
21 |
"""
|
22 |
+
messages = [
|
23 |
+
{
|
24 |
+
'role': 'system',
|
25 |
+
'content': 'You are a helpful assistant that expand points into a fully-fledge image description for prompting a generative model.'
|
26 |
+
},
|
27 |
+
{
|
28 |
+
'role': 'user',
|
29 |
+
'content': f'Here are my pointers for the description: {pointers}'
|
30 |
+
}
|
31 |
+
]
|
32 |
+
output = llm.chat.completions.create(
|
33 |
+
messages=messages,
|
34 |
+
stream=False,
|
35 |
+
max_tokens=1024
|
36 |
+
)
|
37 |
+
return output.choices[0].message.content
|
38 |
+
|
39 |
|
40 |
@tool
|
41 |
def get_current_time_in_timezone(timezone: str) -> str:
|
|
|
74 |
|
75 |
agent = CodeAgent(
|
76 |
model=model,
|
77 |
+
tools=[final_answer, write_image_spec], ## add your tools here (don't remove final answer)
|
78 |
max_steps=6,
|
79 |
verbosity_level=1,
|
80 |
grammar=None,
|