|
|
|
""" |
|
Saved-query alert helper. |
|
• Stores the last 30 PubMed/arXiv links per query. |
|
• Returns a dict of {query: [new_links]} when fresh papers appear. |
|
Implementation is intentionally simple: JSON on disk + orchestrate_search. |
|
""" |
|
|
|
import json, asyncio |
|
from pathlib import Path |
|
from typing import List, Dict |
|
|
|
from mcp.orchestrator import orchestrate_search |
|
|
|
_ALERT_DB = Path("saved_alerts.json") |
|
_MAX_IDS = 30 |
|
|
|
|
|
def _read_db() -> Dict[str, List[str]]: |
|
if _ALERT_DB.exists(): |
|
return json.loads(_ALERT_DB.read_text()) |
|
return {} |
|
|
|
|
|
def _write_db(data: Dict[str, List[str]]): |
|
_ALERT_DB.write_text(json.dumps(data, indent=2)) |
|
|
|
|
|
async def check_alerts(queries: List[str]) -> Dict[str, List[str]]: |
|
""" |
|
For each saved query, run a quick orchestrate_search and detect new paper links. |
|
Returns {query: [fresh_links]} (empty dict if nothing new). |
|
""" |
|
db = _read_db() |
|
new_map = {} |
|
|
|
async def _check(q: str): |
|
res = await orchestrate_search(q) |
|
links = [p["link"] for p in res["papers"]] |
|
prev = set(db.get(q, [])) |
|
fresh = [l for l in links if l not in prev] |
|
if fresh: |
|
new_map[q] = fresh |
|
db[q] = links[:_MAX_IDS] |
|
|
|
await asyncio.gather(*[_check(q) for q in queries]) |
|
_write_db(db) |
|
return new_map |
|
|