File size: 1,397 Bytes
ee65d10 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 |
# mcp/alerts.py
"""
Saved-query alert helper.
• Stores the last 30 PubMed/arXiv links per query.
• Returns a dict of {query: [new_links]} when fresh papers appear.
Implementation is intentionally simple: JSON on disk + orchestrate_search.
"""
import json, asyncio
from pathlib import Path
from typing import List, Dict
from mcp.orchestrator import orchestrate_search
_ALERT_DB = Path("saved_alerts.json")
_MAX_IDS = 30 # keep last N links per query
def _read_db() -> Dict[str, List[str]]:
if _ALERT_DB.exists():
return json.loads(_ALERT_DB.read_text())
return {}
def _write_db(data: Dict[str, List[str]]):
_ALERT_DB.write_text(json.dumps(data, indent=2))
async def check_alerts(queries: List[str]) -> Dict[str, List[str]]:
"""
For each saved query, run a quick orchestrate_search and detect new paper links.
Returns {query: [fresh_links]} (empty dict if nothing new).
"""
db = _read_db()
new_map = {}
async def _check(q: str):
res = await orchestrate_search(q)
links = [p["link"] for p in res["papers"]]
prev = set(db.get(q, []))
fresh = [l for l in links if l not in prev]
if fresh:
new_map[q] = fresh
db[q] = links[:_MAX_IDS] # save trimmed
# run in parallel
await asyncio.gather(*[_check(q) for q in queries])
_write_db(db)
return new_map
|