Phoenix07 commited on
Commit
1e4e04e
Β·
1 Parent(s): 5776013

init commit

Browse files
Files changed (4) hide show
  1. app.py +22 -53
  2. requirements.txt +5 -1
  3. retriever.py +51 -0
  4. tools.py +55 -0
app.py CHANGED
@@ -1,64 +1,33 @@
1
  import gradio as gr
2
- from huggingface_hub import InferenceClient
 
3
 
4
- """
5
- For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
6
- """
7
- client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
8
 
 
 
9
 
10
- def respond(
11
- message,
12
- history: list[tuple[str, str]],
13
- system_message,
14
- max_tokens,
15
- temperature,
16
- top_p,
17
- ):
18
- messages = [{"role": "system", "content": system_message}]
19
 
20
- for val in history:
21
- if val[0]:
22
- messages.append({"role": "user", "content": val[0]})
23
- if val[1]:
24
- messages.append({"role": "assistant", "content": val[1]})
25
 
26
- messages.append({"role": "user", "content": message})
 
27
 
28
- response = ""
 
29
 
30
- for message in client.chat_completion(
31
- messages,
32
- max_tokens=max_tokens,
33
- stream=True,
34
- temperature=temperature,
35
- top_p=top_p,
36
- ):
37
- token = message.choices[0].delta.content
38
-
39
- response += token
40
- yield response
41
-
42
-
43
- """
44
- For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
45
- """
46
- demo = gr.ChatInterface(
47
- respond,
48
- additional_inputs=[
49
- gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
50
- gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
51
- gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
52
- gr.Slider(
53
- minimum=0.1,
54
- maximum=1.0,
55
- value=0.95,
56
- step=0.05,
57
- label="Top-p (nucleus sampling)",
58
- ),
59
- ],
60
  )
61
 
62
-
63
  if __name__ == "__main__":
64
- demo.launch()
 
1
  import gradio as gr
2
+ import random
3
+ from smolagents import GradioUI, CodeAgent, HfApiModel
4
 
5
+ # Import our custom tools from their modules
6
+ from tools import DuckDuckGoSearchTool, WeatherInfoTool, HubStatsTool
7
+ from retriever import load_guest_dataset
 
8
 
9
+ # Initialize the Hugging Face model
10
+ model = HfApiModel()
11
 
12
+ # Initialize the web search tool
13
+ search_tool = DuckDuckGoSearchTool()
 
 
 
 
 
 
 
14
 
15
+ # Initialize the weather tool
16
+ weather_info_tool = WeatherInfoTool()
 
 
 
17
 
18
+ # Initialize the Hub stats tool
19
+ hub_stats_tool = HubStatsTool()
20
 
21
+ # Load the guest dataset and initialize the guest info tool
22
+ guest_info_tool = load_guest_dataset()
23
 
24
+ # Create Alfred with all the tools
25
+ alfred = CodeAgent(
26
+ tools=[guest_info_tool, weather_info_tool, hub_stats_tool, search_tool],
27
+ model=model,
28
+ add_base_tools=True, # Add any additional base tools
29
+ planning_interval=3 # Enable planning every 3 steps
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
30
  )
31
 
 
32
  if __name__ == "__main__":
33
+ GradioUI(alfred).launch()
requirements.txt CHANGED
@@ -1 +1,5 @@
1
- huggingface_hub==0.25.2
 
 
 
 
 
1
+ datasets
2
+ smolagents
3
+ langchain-community
4
+ rank_bm25
5
+ duckduckgo-search
retriever.py ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from smolagents import Tool
2
+ from langchain_community.retrievers import BM25Retriever
3
+ from langchain.docstore.document import Document
4
+ import datasets
5
+
6
+
7
+ class GuestInfoRetrieverTool(Tool):
8
+ name = "guest_info_retriever"
9
+ description = "Retrieves detailed information about gala guests based on their name or relation."
10
+ inputs = {
11
+ "query": {
12
+ "type": "string",
13
+ "description": "The name or relation of the guest you want information about."
14
+ }
15
+ }
16
+ output_type = "string"
17
+
18
+ def __init__(self, docs):
19
+ self.is_initialized = False
20
+ self.retriever = BM25Retriever.from_documents(docs)
21
+
22
+
23
+ def forward(self, query: str):
24
+ results = self.retriever.get_relevant_documents(query)
25
+ if results:
26
+ return "\n\n".join([doc.page_content for doc in results[:3]])
27
+ else:
28
+ return "No matching guest information found."
29
+
30
+
31
+ def load_guest_dataset():
32
+ # Load the dataset
33
+ guest_dataset = datasets.load_dataset("agents-course/unit3-invitees", split="train")
34
+
35
+ # Convert dataset entries into Document objects
36
+ docs = [
37
+ Document(
38
+ page_content="\n".join([
39
+ f"Name: {guest['name']}",
40
+ f"Relation: {guest['relation']}",
41
+ f"Description: {guest['description']}",
42
+ f"Email: {guest['email']}"
43
+ ]),
44
+ metadata={"name": guest["name"]}
45
+ )
46
+ for guest in guest_dataset
47
+ ]
48
+
49
+ # Return the tool
50
+ return GuestInfoRetrieverTool(docs)
51
+
tools.py ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from smolagents import DuckDuckGoSearchTool
2
+ from smolagents import Tool
3
+ import random
4
+ from huggingface_hub import list_models
5
+
6
+
7
+ # Initialize the DuckDuckGo search tool
8
+ #search_tool = DuckDuckGoSearchTool()
9
+
10
+
11
+ class WeatherInfoTool(Tool):
12
+ name = "weather_info"
13
+ description = "Fetches dummy weather information for a given location."
14
+ inputs = {
15
+ "location": {
16
+ "type": "string",
17
+ "description": "The location to get weather information for."
18
+ }
19
+ }
20
+ output_type = "string"
21
+
22
+ def forward(self, location: str):
23
+ # Dummy weather data
24
+ weather_conditions = [
25
+ {"condition": "Rainy", "temp_c": 15},
26
+ {"condition": "Clear", "temp_c": 25},
27
+ {"condition": "Windy", "temp_c": 20}
28
+ ]
29
+ # Randomly select a weather condition
30
+ data = random.choice(weather_conditions)
31
+ return f"Weather in {location}: {data['condition']}, {data['temp_c']}Β°C"
32
+
33
+ class HubStatsTool(Tool):
34
+ name = "hub_stats"
35
+ description = "Fetches the most downloaded model from a specific author on the Hugging Face Hub."
36
+ inputs = {
37
+ "author": {
38
+ "type": "string",
39
+ "description": "The username of the model author/organization to find models from."
40
+ }
41
+ }
42
+ output_type = "string"
43
+
44
+ def forward(self, author: str):
45
+ try:
46
+ # List models from the specified author, sorted by downloads
47
+ models = list(list_models(author=author, sort="downloads", direction=-1, limit=1))
48
+
49
+ if models:
50
+ model = models[0]
51
+ return f"The most downloaded model by {author} is {model.id} with {model.downloads:,} downloads."
52
+ else:
53
+ return f"No models found for author {author}."
54
+ except Exception as e:
55
+ return f"Error fetching models for {author}: {str(e)}"