Update app.py
Browse files
app.py
CHANGED
@@ -28,7 +28,6 @@ import time
|
|
28 |
from transformers import load_tool, Agent
|
29 |
import torch
|
30 |
|
31 |
-
|
32 |
class ToolLoader:
|
33 |
def __init__(self, tool_names):
|
34 |
self.tools = self.load_tools(tool_names)
|
@@ -41,8 +40,6 @@ class ToolLoader:
|
|
41 |
loaded_tools.append(tool)
|
42 |
except Exception as e:
|
43 |
print(f"Error loading tool '{tool_name}': {e}")
|
44 |
-
# Handle the error as needed, e.g., continue with other tools or take corrective action
|
45 |
-
|
46 |
return loaded_tools
|
47 |
|
48 |
class CustomHfAgent(Agent):
|
@@ -79,8 +76,19 @@ class CustomHfAgent(Agent):
|
|
79 |
return result[: -len(stop_seq)]
|
80 |
return result
|
81 |
|
82 |
-
def
|
83 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
84 |
|
85 |
# Define the tool names to load
|
86 |
tool_names = [
|
@@ -88,44 +96,37 @@ tool_names = [
|
|
88 |
"Chris4K/text-generation-tool",
|
89 |
"Chris4K/sentiment-tool",
|
90 |
"Chris4K/token-counter-tool",
|
91 |
-
|
92 |
-
|
93 |
-
|
94 |
-
|
95 |
-
|
96 |
-
|
97 |
-
|
98 |
-
|
99 |
-
|
100 |
-
|
101 |
-
|
102 |
-
|
103 |
# More cool tools to come
|
104 |
]
|
105 |
|
106 |
# Create tool loader instance
|
107 |
tool_loader = ToolLoader(tool_names)
|
108 |
|
109 |
-
# Define the callback function to handle the form submission
|
110 |
-
def handle_submission(user_message, selected_tools):
|
111 |
-
agent = CustomHfAgent(
|
112 |
-
url_endpoint="https://api-inference.huggingface.co/models/bigcode/starcoder",
|
113 |
-
token=os.environ['HF_token'],
|
114 |
-
additional_tools=selected_tools,
|
115 |
-
input_params={"max_new_tokens": 192},
|
116 |
-
)
|
117 |
-
|
118 |
-
response = agent.run(user_message)
|
119 |
-
|
120 |
-
print("Agent Response\n {}".format(response))
|
121 |
-
|
122 |
-
return response
|
123 |
-
|
124 |
st.title("Hugging Face Agent and tools")
|
125 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
126 |
tool_checkboxes = [st.checkbox(f"{tool.name} --- {tool.description} ") for tool in tool_loader.tools]
|
127 |
|
128 |
|
|
|
129 |
if "messages" not in st.session_state:
|
130 |
st.session_state.messages = []
|
131 |
|
@@ -142,7 +143,9 @@ if user_message := st.chat_input("Enter message"):
|
|
142 |
st.session_state.messages.append({"role": "user", "content": user_message})
|
143 |
|
144 |
selected_tools = [tool_loader.tools[idx] for idx, checkbox in enumerate(tool_checkboxes) if checkbox]
|
145 |
-
|
|
|
|
|
146 |
|
147 |
with st.chat_message("assistant"):
|
148 |
if response is None:
|
|
|
28 |
from transformers import load_tool, Agent
|
29 |
import torch
|
30 |
|
|
|
31 |
class ToolLoader:
|
32 |
def __init__(self, tool_names):
|
33 |
self.tools = self.load_tools(tool_names)
|
|
|
40 |
loaded_tools.append(tool)
|
41 |
except Exception as e:
|
42 |
print(f"Error loading tool '{tool_name}': {e}")
|
|
|
|
|
43 |
return loaded_tools
|
44 |
|
45 |
class CustomHfAgent(Agent):
|
|
|
76 |
return result[: -len(stop_seq)]
|
77 |
return result
|
78 |
|
79 |
+
def handle_submission(user_message, selected_tools, url_endpoint):
|
80 |
+
agent = CustomHfAgent(
|
81 |
+
url_endpoint=url_endpoint,
|
82 |
+
token=os.environ['HF_token'],
|
83 |
+
additional_tools=selected_tools,
|
84 |
+
input_params={"max_new_tokens": 192},
|
85 |
+
)
|
86 |
+
|
87 |
+
response = agent.run(user_message)
|
88 |
+
|
89 |
+
print("Agent Response\n {}".format(response))
|
90 |
+
|
91 |
+
return response
|
92 |
|
93 |
# Define the tool names to load
|
94 |
tool_names = [
|
|
|
96 |
"Chris4K/text-generation-tool",
|
97 |
"Chris4K/sentiment-tool",
|
98 |
"Chris4K/token-counter-tool",
|
99 |
+
"Chris4K/most-downloaded-model",
|
100 |
+
"Chris4K/rag-tool",
|
101 |
+
"Chris4K/word-counter-tool",
|
102 |
+
"Chris4K/sentence-counter-tool",
|
103 |
+
"Chris4K/EmojifyTextTool",
|
104 |
+
"Chris4K/NamedEntityRecognitionTool",
|
105 |
+
"Chris4K/TextDownloadTool",
|
106 |
+
"Chris4K/source-code-retriever-tool",
|
107 |
+
"Chris4K/text-to-image",
|
108 |
+
"Chris4K/text-to-video",
|
109 |
+
"Chris4K/image-transformation",
|
110 |
+
"Chris4K/latent-upscaler-tool"
|
111 |
# More cool tools to come
|
112 |
]
|
113 |
|
114 |
# Create tool loader instance
|
115 |
tool_loader = ToolLoader(tool_names)
|
116 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
117 |
st.title("Hugging Face Agent and tools")
|
118 |
|
119 |
+
# Add a dropdown for selecting the inference URL
|
120 |
+
url_endpoint = st.selectbox("Select Inference URL", [
|
121 |
+
"https://api-inference.huggingface.co/models/bigcode/starcoder",
|
122 |
+
"https://api-inference.huggingface.co/models/bigcode/starcode",
|
123 |
+
"https://api-inference.huggingface.co/models/gpt"
|
124 |
+
])
|
125 |
+
|
126 |
tool_checkboxes = [st.checkbox(f"{tool.name} --- {tool.description} ") for tool in tool_loader.tools]
|
127 |
|
128 |
|
129 |
+
|
130 |
if "messages" not in st.session_state:
|
131 |
st.session_state.messages = []
|
132 |
|
|
|
143 |
st.session_state.messages.append({"role": "user", "content": user_message})
|
144 |
|
145 |
selected_tools = [tool_loader.tools[idx] for idx, checkbox in enumerate(tool_checkboxes) if checkbox]
|
146 |
+
# Handle submission with the selected inference URL
|
147 |
+
response = handle_submission(user_message, selected_tools, url_endpoint)
|
148 |
+
|
149 |
|
150 |
with st.chat_message("assistant"):
|
151 |
if response is None:
|