|
from smolagents import CodeAgent, DuckDuckGoSearchTool, HfApiModel, load_tool, tool |
|
import datetime |
|
import requests |
|
import pytz |
|
import yaml |
|
from tools.final_answer import FinalAnswerTool |
|
from Gradio_UI import GradioUI |
|
|
|
@tool |
|
def get_current_time_in_timezone(timezone: str) -> str: |
|
"""Get current time in specified timezone. |
|
Args: |
|
timezone: Valid timezone (e.g., 'America/New_York') |
|
""" |
|
try: |
|
tz = pytz.timezone(timezone) |
|
local_time = datetime.datetime.now(tz).strftime("%Y-%m-%d %H:%M:%S") |
|
return f"Current time in {timezone}: {local_time}" |
|
except Exception as e: |
|
return f"Error: {str(e)}" |
|
|
|
|
|
final_answer = FinalAnswerTool() |
|
|
|
|
|
model = HfApiModel( |
|
max_tokens=1024, |
|
temperature=0.3, |
|
model_id='Qwen/Qwen2.5-Coder-32B-Instruct', |
|
custom_role_conversions=None, |
|
) |
|
|
|
|
|
fallback_model = HfApiModel( |
|
max_tokens=1024, |
|
temperature=0.3, |
|
model_id='https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud', |
|
custom_role_conversions=None, |
|
) |
|
|
|
|
|
|
|
|
|
|
|
optimized_prompts = { |
|
"system": """You are a helpful coding assistant. Be concise and direct. |
|
Focus on the specific task. Avoid unnecessary explanations unless requested.""", |
|
"user": """Task: {task} |
|
|
|
Available tools: {tools} |
|
|
|
Provide a clear, step-by-step solution.""" |
|
} |
|
|
|
|
|
try: |
|
with open("prompts.yaml", 'r') as stream: |
|
prompt_templates = yaml.safe_load(stream) |
|
|
|
if 'system' in prompt_templates: |
|
prompt_templates['system'] = prompt_templates['system'][:500] |
|
except FileNotFoundError: |
|
prompt_templates = optimized_prompts |
|
|
|
|
|
agent = CodeAgent( |
|
model=model, |
|
tools=[final_answer, get_current_time_in_timezone], |
|
max_steps=3, |
|
verbosity_level=0, |
|
grammar=None, |
|
planning_interval=2, |
|
name="OptimizedAgent", |
|
description="Fast, efficient coding assistant", |
|
prompt_templates=prompt_templates |
|
) |
|
|
|
|
|
def launch_with_optimizations(): |
|
try: |
|
|
|
GradioUI(agent).launch( |
|
share=False, |
|
server_name="127.0.0.1", |
|
server_port=7860, |
|
max_threads=4 |
|
) |
|
except Exception as e: |
|
print(f"Main model failed: {e}") |
|
print("Switching to fallback model...") |
|
agent.model = fallback_model |
|
GradioUI(agent).launch() |
|
|
|
if __name__ == "__main__": |
|
launch_with_optimizations() |