Update mcp/arxiv.py
Browse files- mcp/arxiv.py +3 -3
mcp/arxiv.py
CHANGED
@@ -7,17 +7,17 @@ ARXIV_BASE = "http://export.arxiv.org/api/query?search_query="
|
|
7 |
|
8 |
async def fetch_arxiv(query: str, max_results: int = 5):
|
9 |
"""Fetch latest arXiv papers for the query."""
|
10 |
-
encoded_query = quote_plus(query)
|
11 |
search_url = f"{ARXIV_BASE}{encoded_query}&max_results={max_results}"
|
12 |
feed = feedparser.parse(search_url)
|
13 |
results = []
|
14 |
for entry in feed.entries:
|
15 |
results.append({
|
16 |
"title": entry.title,
|
17 |
-
"authors": ", ".join([a.name for a in entry.authors]),
|
18 |
"summary": entry.summary,
|
19 |
"link": entry.link,
|
20 |
-
"published": entry.published,
|
21 |
"source": "arXiv"
|
22 |
})
|
23 |
return results
|
|
|
7 |
|
8 |
async def fetch_arxiv(query: str, max_results: int = 5):
|
9 |
"""Fetch latest arXiv papers for the query."""
|
10 |
+
encoded_query = quote_plus(query)
|
11 |
search_url = f"{ARXIV_BASE}{encoded_query}&max_results={max_results}"
|
12 |
feed = feedparser.parse(search_url)
|
13 |
results = []
|
14 |
for entry in feed.entries:
|
15 |
results.append({
|
16 |
"title": entry.title,
|
17 |
+
"authors": ", ".join([a.name for a in entry.authors]) if hasattr(entry, 'authors') else "",
|
18 |
"summary": entry.summary,
|
19 |
"link": entry.link,
|
20 |
+
"published": entry.get("published", ""), # <--- THIS FIXES THE ERROR!
|
21 |
"source": "arXiv"
|
22 |
})
|
23 |
return results
|