Techno-1 commited on
Commit
91c0c5a
·
verified ·
1 Parent(s): f1b5974

Looked through mcp_client.py in smolagents library and adjusted code

Browse files

Used the fully managed context manager instead of the manual try and finally

Changed example prompt to ask about the weather (which should hopefully call our MCP server)

Files changed (1) hide show
  1. app.py +14 -18
app.py CHANGED
@@ -6,24 +6,20 @@ from smolagents import CodeAgent, InferenceClientModel
6
  from smolagents.mcp_client import MCPClient
7
 
8
 
9
- try:
10
- mcp_client = MCPClient(
11
- # {"url": "https://abidlabs-mcp-tools.hf.space/gradio_api/mcp/sse"}
12
- {"https://techno-1-mcp-sentiment.hf.space/gradio_api/mcp/sse"}
13
- )
14
- tools = mcp_client.get_tools()
15
 
16
- model = InferenceClientModel()
17
- agent = CodeAgent(tools=[*tools], model=model)
18
 
19
- demo = gr.ChatInterface(
20
- fn=lambda message, history: str(agent.run(message)),
21
- type="messages",
22
- examples=["Prime factorization of 68"],
23
- title="Agent with MCP Tools",
24
- description="This is a simple agent that uses MCP tools to answer questions.",
25
- )
26
 
27
- demo.launch()
28
- finally:
29
- mcp_client.close()
 
6
  from smolagents.mcp_client import MCPClient
7
 
8
 
9
+ with MCPClient(
10
+ # {"url": "https://abidlabs-mcp-tools.hf.space/gradio_api/mcp/sse"}
11
+ {"url": "https://techno-1-mcp-sentiment.hf.space/gradio_api/mcp/sse", "transport": "streamable-http"}
12
+ ) as tools:
 
 
13
 
14
+ model = InferenceClientModel()
15
+ agent = CodeAgent(tools=[*tools], model=model)
16
 
17
+ demo = gr.ChatInterface(
18
+ fn=lambda message, history: str(agent.run(message)),
19
+ type="messages",
20
+ examples=["What's the weather like?"],
21
+ title="Agent with MCP Tools",
22
+ description="This is a simple agent that uses MCP tools to answer questions.",
23
+ )
24
 
25
+ demo.launch()