Alaaeldin commited on
Commit
c82482f
·
verified ·
1 Parent(s): ae7a494

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +254 -49
app.py CHANGED
@@ -1,69 +1,274 @@
1
- from smolagents import CodeAgent,DuckDuckGoSearchTool, HfApiModel,load_tool,tool
2
- import datetime
3
  import requests
4
- import pytz
5
- import yaml
6
- from tools.final_answer import FinalAnswerTool
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7
 
8
- from Gradio_UI import GradioUI
9
 
10
- # Below is an example of a tool that does nothing. Amaze us with your creativity !
11
  @tool
12
- def my_custom_tool(arg1:str, arg2:int)-> str: #it's import to specify the return type
13
- #Keep this format for the description / args / args description but feel free to modify the tool
14
- """A tool that does nothing yet
15
  Args:
16
- arg1: the first argument
17
- arg2: the second argument
 
 
18
  """
19
- return "What magic will you build ?"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
20
 
21
  @tool
22
- def get_current_time_in_timezone(timezone: str) -> str:
23
- """A tool that fetches the current local time in a specified timezone.
 
24
  Args:
25
- timezone: A string representing a valid timezone (e.g., 'America/New_York').
 
26
  """
27
  try:
28
- # Create timezone object
29
- tz = pytz.timezone(timezone)
30
- # Get current time in that timezone
31
- local_time = datetime.datetime.now(tz).strftime("%Y-%m-%d %H:%M:%S")
32
- return f"The current local time in {timezone} is: {local_time}"
33
  except Exception as e:
34
- return f"Error fetching time for timezone '{timezone}': {str(e)}"
35
 
36
 
37
- final_answer = FinalAnswerTool()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
38
 
39
- # If the agent does not answer, the model is overloaded, please use another model or the following Hugging Face Endpoint that also contains qwen2.5 coder:
40
- # model_id='https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud'
41
 
42
- model = HfApiModel(
43
- max_tokens=2096,
44
- temperature=0.5,
45
- model_id='Qwen/Qwen2.5-Coder-32B-Instruct',# it is possible that this model may be overloaded
46
- custom_role_conversions=None,
47
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
48
 
49
 
50
- # Import tool from Hub
51
- image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True)
 
 
 
 
 
 
 
 
 
 
 
52
 
53
- with open("prompts.yaml", 'r') as stream:
54
- prompt_templates = yaml.safe_load(stream)
 
 
55
 
56
- agent = CodeAgent(
57
- model=model,
58
- tools=[final_answer], ## add your tools here (don't remove final answer)
59
- max_steps=6,
60
- verbosity_level=1,
61
- grammar=None,
62
- planning_interval=None,
63
- name=None,
64
- description=None,
65
- prompt_templates=prompt_templates
66
- )
67
-
68
-
69
- GradioUI(agent).launch()
 
1
+ from smolagents import tool
 
2
  import requests
3
+ import json
4
+ import datetime
5
+ import os
6
+ import base64
7
+ from typing import List, Optional, Dict, Any
8
+ import pandas as pd
9
+ import matplotlib.pyplot as plt
10
+ import io
11
+
12
+
13
+ @tool
14
+ def web_scrape(url: str) -> str:
15
+ """Scrapes the content from a specified URL.
16
+
17
+ Args:
18
+ url: The URL to scrape content from.
19
+ """
20
+ try:
21
+ response = requests.get(url, headers={
22
+ 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36'
23
+ })
24
+ response.raise_for_status()
25
+ return response.text
26
+ except Exception as e:
27
+ return f"Error scraping {url}: {str(e)}"
28
+
29
+
30
+ @tool
31
+ def extract_structured_data(text: str, schema: str) -> str:
32
+ """Extracts structured data from text based on a provided schema.
33
+
34
+ Args:
35
+ text: The text to extract data from.
36
+ schema: JSON schema describing the data structure to extract.
37
+ """
38
+ try:
39
+ # In a real implementation, you might use regex, NLP, or ML models
40
+ # This is a placeholder for demonstrating the concept
41
+ return f"Extracted structured data according to schema: {schema}"
42
+ except Exception as e:
43
+ return f"Error extracting structured data: {str(e)}"
44
+
45
+
46
+ @tool
47
+ def data_visualization(data: str, chart_type: str, title: str = "Data Visualization") -> str:
48
+ """Creates a data visualization from structured data.
49
+
50
+ Args:
51
+ data: JSON string or CSV text with the data to visualize.
52
+ chart_type: Type of chart to create (bar, line, scatter, pie).
53
+ title: Title for the visualization.
54
+ """
55
+ try:
56
+ # Parse the input data
57
+ try:
58
+ # Try parsing as JSON first
59
+ data_parsed = json.loads(data)
60
+ df = pd.DataFrame(data_parsed)
61
+ except:
62
+ # If not JSON, try as CSV
63
+ csv_data = io.StringIO(data)
64
+ df = pd.DataFrame.from_records(pd.read_csv(csv_data))
65
+
66
+ # Create appropriate visualization
67
+ plt.figure(figsize=(10, 6))
68
+
69
+ if chart_type.lower() == 'bar':
70
+ df.plot(kind='bar')
71
+ elif chart_type.lower() == 'line':
72
+ df.plot(kind='line')
73
+ elif chart_type.lower() == 'scatter':
74
+ # Assuming first two columns are x and y
75
+ columns = df.columns
76
+ if len(columns) >= 2:
77
+ plt.scatter(df[columns[0]], df[columns[1]])
78
+ else:
79
+ return "Need at least two columns for scatter plot"
80
+ elif chart_type.lower() == 'pie':
81
+ # Assuming first column is labels, second is values
82
+ columns = df.columns
83
+ if len(columns) >= 2:
84
+ plt.pie(df[columns[1]], labels=df[columns[0]], autopct='%1.1f%%')
85
+ else:
86
+ return "Need at least two columns for pie chart"
87
+ else:
88
+ return f"Unsupported chart type: {chart_type}"
89
+
90
+ plt.title(title)
91
+
92
+ # Save to bytes buffer
93
+ buf = io.BytesIO()
94
+ plt.savefig(buf, format='png')
95
+ buf.seek(0)
96
+
97
+ # Convert to base64 for embedding in HTML or returning
98
+ img_str = base64.b64encode(buf.read()).decode('utf-8')
99
+
100
+ # Return a reference or small thumbnail
101
+ return f"Visualization created successfully. Image data (base64): {img_str[:30]}..."
102
+ except Exception as e:
103
+ return f"Error creating visualization: {str(e)}"
104
+
105
+
106
+ @tool
107
+ def code_refactor(code: str, language: str, optimization: str) -> str:
108
+ """Refactors code based on specified optimization criteria.
109
+
110
+ Args:
111
+ code: The source code to refactor.
112
+ language: Programming language of the code.
113
+ optimization: Type of optimization to perform (performance, readability, security).
114
+ """
115
+ try:
116
+ # In a real implementation, you'd use language-specific tools or ML models
117
+ # This is a placeholder for demonstrating the concept
118
+ if optimization.lower() == 'performance':
119
+ return f"Code refactored for performance: \n```{language}\n# Performance optimized\n{code}\n```"
120
+ elif optimization.lower() == 'readability':
121
+ return f"Code refactored for readability: \n```{language}\n# Readability optimized\n{code}\n```"
122
+ elif optimization.lower() == 'security':
123
+ return f"Code refactored for security: \n```{language}\n# Security optimized\n{code}\n```"
124
+ else:
125
+ return f"Unsupported optimization type: {optimization}"
126
+ except Exception as e:
127
+ return f"Error refactoring code: {str(e)}"
128
 
 
129
 
 
130
  @tool
131
+ def api_interaction(endpoint: str, method: str = "GET", params: Optional[str] = None, headers: Optional[str] = None) -> str:
132
+ """Interacts with an API endpoint.
133
+
134
  Args:
135
+ endpoint: The API endpoint URL.
136
+ method: HTTP method (GET, POST, PUT, DELETE).
137
+ params: JSON string of parameters or data to send.
138
+ headers: JSON string of headers to include.
139
  """
140
+ try:
141
+ # Parse headers and params if provided
142
+ headers_dict = json.loads(headers) if headers else {}
143
+
144
+ if method.upper() == "GET":
145
+ params_dict = json.loads(params) if params else {}
146
+ response = requests.get(endpoint, params=params_dict, headers=headers_dict)
147
+ elif method.upper() == "POST":
148
+ data_dict = json.loads(params) if params else {}
149
+ response = requests.post(endpoint, json=data_dict, headers=headers_dict)
150
+ elif method.upper() == "PUT":
151
+ data_dict = json.loads(params) if params else {}
152
+ response = requests.put(endpoint, json=data_dict, headers=headers_dict)
153
+ elif method.upper() == "DELETE":
154
+ response = requests.delete(endpoint, headers=headers_dict)
155
+ else:
156
+ return f"Unsupported HTTP method: {method}"
157
+
158
+ response.raise_for_status()
159
+
160
+ # Try to return JSON if possible, otherwise return text
161
+ try:
162
+ return json.dumps(response.json(), indent=2)
163
+ except:
164
+ return response.text
165
+ except Exception as e:
166
+ return f"Error interacting with API {endpoint}: {str(e)}"
167
+
168
 
169
  @tool
170
+ def natural_language_query(database_description: str, query: str) -> str:
171
+ """Translates a natural language query to structured data operations.
172
+
173
  Args:
174
+ database_description: Description of the database schema.
175
+ query: Natural language query about the data.
176
  """
177
  try:
178
+ # In a real implementation, you'd use NLP to SQL or similar technology
179
+ # This is a placeholder for demonstrating the concept
180
+ return f"Query translated and executed. Results for: {query}"
 
 
181
  except Exception as e:
182
+ return f"Error processing natural language query: {str(e)}"
183
 
184
 
185
+ @tool
186
+ def file_operations(operation: str, file_path: str, content: Optional[str] = None) -> str:
187
+ """Performs operations on files.
188
+
189
+ Args:
190
+ operation: The operation to perform (read, write, append, list).
191
+ file_path: Path to the file or directory.
192
+ content: Content to write or append (only for write/append operations).
193
+ """
194
+ try:
195
+ if operation.lower() == 'read':
196
+ with open(file_path, 'r') as file:
197
+ return file.read()
198
+ elif operation.lower() == 'write':
199
+ if content is None:
200
+ return "Content must be provided for write operation"
201
+ with open(file_path, 'w') as file:
202
+ file.write(content)
203
+ return f"Content written to {file_path}"
204
+ elif operation.lower() == 'append':
205
+ if content is None:
206
+ return "Content must be provided for append operation"
207
+ with open(file_path, 'a') as file:
208
+ file.write(content)
209
+ return f"Content appended to {file_path}"
210
+ elif operation.lower() == 'list':
211
+ if os.path.isdir(file_path):
212
+ return str(os.listdir(file_path))
213
+ else:
214
+ return f"{file_path} is not a directory"
215
+ else:
216
+ return f"Unsupported file operation: {operation}"
217
+ except Exception as e:
218
+ return f"Error performing file operation: {str(e)}"
219
 
 
 
220
 
221
+ @tool
222
+ def semantic_search(corpus: str, query: str, top_k: int = 3) -> str:
223
+ """Performs semantic search on a corpus of text.
224
+
225
+ Args:
226
+ corpus: The text corpus to search within (could be a large text or list of documents).
227
+ query: The search query.
228
+ top_k: Number of top results to return.
229
+ """
230
+ try:
231
+ # In a real implementation, you'd use embedding models and vector similarity
232
+ # This is a placeholder for demonstrating the concept
233
+ results = [
234
+ {"text": f"Result {i} for query: {query}", "score": (top_k - i) / top_k}
235
+ for i in range(1, top_k + 1)
236
+ ]
237
+ return json.dumps(results, indent=2)
238
+ except Exception as e:
239
+ return f"Error performing semantic search: {str(e)}"
240
 
241
 
242
+ @tool
243
+ def weather_forecast(location: str) -> str:
244
+ """Fetches weather forecast for a specified location.
245
+
246
+ Args:
247
+ location: The location to get weather forecast for (city name or coordinates).
248
+ """
249
+ try:
250
+ # In a real implementation, you'd connect to a weather API
251
+ # This is a placeholder for demonstrating the concept
252
+ return f"Weather forecast for {location}: Sunny with a chance of AI"
253
+ except Exception as e:
254
+ return f"Error fetching weather forecast: {str(e)}"
255
 
256
+
257
+ @tool
258
+ def task_scheduler(task: str, schedule_time: str, priority: int = 1) -> str:
259
+ """Schedules a task to be performed at a specified time.
260
 
261
+ Args:
262
+ task: Description of the task to be scheduled.
263
+ schedule_time: Time to schedule the task (ISO format).
264
+ priority: Priority level of the task (1-5, where 1 is highest).
265
+ """
266
+ try:
267
+ # Parse the schedule time
268
+ schedule_datetime = datetime.datetime.fromisoformat(schedule_time)
269
+
270
+ # In a real implementation, you'd connect to a scheduling system
271
+ # This is a placeholder for demonstrating the concept
272
+ return f"Task '{task}' scheduled for {schedule_datetime} with priority {priority}"
273
+ except Exception as e:
274
+ return f"Error scheduling task: {str(e)}"