File size: 1,962 Bytes
8618f46 06e57c6 8618f46 06e57c6 8618f46 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 |
# Tool Section
from langchain.tools import Tool
from huggingface_hub import list_models
import requests
import os
from tavily import TavilyClient
def internet_search_tool(query: str) -> str:
"""Fetches information, news, etc from internet."""
client = TavilyClient(os.getenv("TAVILY_API_KEY"))
response = client.search(query=query)
return response
search_tool = Tool(
name="internet_search_tool",
func=internet_search_tool,
description="Fetches information, news, etc from internet."
)
def get_weather_info(city: str) -> str:
"""Fetches the current weather information of specific city."""
weather_api = "http://api.weatherapi.com/v1/current.json?q={city}".format(city=city)
weather_api_response = requests.get(weather_api, headers={"key":os.getenv("WEATHER_API_KEY")})
if weather_api_response.status_code == 200:
return weather_api_response.text
return "Problem at moment with API requests, please try after some time"
weather_info_tool = Tool(
name="get_weather_info",
func=get_weather_info,
description="Fetches the current weather information of specific city."
)
def get_hub_stats(author: str) -> str:
"""Fetches the most downloaded model from a specific author on the Hugging Face Hub."""
try:
# List models from the specified author, sorted by downloads
models = list(list_models(author=author, sort="downloads", direction=-1, limit=1))
if models:
model = models[0]
return f"The most downloaded model by {author} is {model.id} with {model.downloads:,} downloads."
else:
return f"No models found for author {author}."
except Exception as e:
return f"Error fetching models for {author}: {str(e)}"
# Initialize the tool
hub_stats_tool = Tool(
name="get_hub_stats",
func=get_hub_stats,
description="Fetches the most downloaded model from a specific author on the Hugging Face Hub."
) |