mgbam commited on
Commit
ee65d10
·
verified ·
1 Parent(s): b6ee928

Create alerts.py

Browse files
Files changed (1) hide show
  1. mcp/alerts.py +48 -0
mcp/alerts.py ADDED
@@ -0,0 +1,48 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # mcp/alerts.py
2
+ """
3
+ Saved-query alert helper.
4
+ • Stores the last 30 PubMed/arXiv links per query.
5
+ • Returns a dict of {query: [new_links]} when fresh papers appear.
6
+ Implementation is intentionally simple: JSON on disk + orchestrate_search.
7
+ """
8
+
9
+ import json, asyncio
10
+ from pathlib import Path
11
+ from typing import List, Dict
12
+
13
+ from mcp.orchestrator import orchestrate_search
14
+
15
+ _ALERT_DB = Path("saved_alerts.json")
16
+ _MAX_IDS = 30 # keep last N links per query
17
+
18
+
19
+ def _read_db() -> Dict[str, List[str]]:
20
+ if _ALERT_DB.exists():
21
+ return json.loads(_ALERT_DB.read_text())
22
+ return {}
23
+
24
+
25
+ def _write_db(data: Dict[str, List[str]]):
26
+ _ALERT_DB.write_text(json.dumps(data, indent=2))
27
+
28
+
29
+ async def check_alerts(queries: List[str]) -> Dict[str, List[str]]:
30
+ """
31
+ For each saved query, run a quick orchestrate_search and detect new paper links.
32
+ Returns {query: [fresh_links]} (empty dict if nothing new).
33
+ """
34
+ db = _read_db()
35
+ new_map = {}
36
+
37
+ async def _check(q: str):
38
+ res = await orchestrate_search(q)
39
+ links = [p["link"] for p in res["papers"]]
40
+ prev = set(db.get(q, []))
41
+ fresh = [l for l in links if l not in prev]
42
+ if fresh:
43
+ new_map[q] = fresh
44
+ db[q] = links[:_MAX_IDS] # save trimmed
45
+ # run in parallel
46
+ await asyncio.gather(*[_check(q) for q in queries])
47
+ _write_db(db)
48
+ return new_map