MCP_Res / app.py
mgbam's picture
Update app.py
36986a7 verified
raw
history blame
8.6 kB
#!/usr/bin/env python3
# MedGenesis AI – Streamlit frontend (OpenAI / Gemini)
import os, pathlib, asyncio, re
from pathlib import Path
import streamlit as st
import pandas as pd
import plotly.express as px
from fpdf import FPDF
from streamlit_agraph import agraph
from mcp.orchestrator import orchestrate_search, answer_ai_question
from mcp.workspace import get_workspace, save_query
from mcp.knowledge_graph import build_agraph
from mcp.graph_metrics import build_nx, get_top_hubs, get_density
from mcp.alerts import check_alerts
# ── Streamlit telemetry off ─────────────────────────────────────────
os.environ["STREAMLIT_DATA_DIR"] = "/tmp/.streamlit"
os.environ["XDG_STATE_HOME"] = "/tmp"
os.environ["STREAMLIT_BROWSER_GATHERUSAGESTATS"] = "false"
pathlib.Path("/tmp/.streamlit").mkdir(parents=True, exist_ok=True)
ROOT = Path(__file__).parent
LOGO = ROOT / "assets" / "logo.png"
# ── PDF helper ──────────────────────────────────────────────────────
def _latin1(txt: str) -> str:
return txt.encode("latin-1", "replace").decode("latin-1")
def _pdf(papers):
pdf = FPDF()
pdf.add_page()
pdf.set_font("Helvetica", size=11)
pdf.cell(200, 8, _latin1("MedGenesis AI – Results"), ln=True, align="C")
pdf.ln(3)
for i, p in enumerate(papers, 1):
pdf.set_font("Helvetica", "B", 11)
pdf.multi_cell(0, 7, _latin1(f"{i}. {p['title']}"))
pdf.set_font("Helvetica", "", 9)
body = f"{p['authors']}\n{p['summary']}\n{p['link']}\n"
pdf.multi_cell(0, 6, _latin1(body)); pdf.ln(1)
return pdf.output(dest="S").encode("latin-1", "replace")
# ── Sidebar ────────────────────────────────────────────────────────
def _sidebar_workspace():
with st.sidebar:
st.header("πŸ—‚οΈ Workspace")
ws = get_workspace()
if not ws:
st.info("Run a search then press **Save** to populate this list.")
return
for i, item in enumerate(ws, 1):
with st.expander(f"{i}. {item['query']}"):
st.write(item["result"]["ai_summary"])
# ── Main UI ────────────────────────────────────────────────────────
def render_ui():
st.set_page_config("MedGenesis AI", layout="wide")
# session state
st.session_state.setdefault("result", None)
st.session_state.setdefault("last_query", "")
st.session_state.setdefault("last_llm", "")
st.session_state.setdefault("followup", "")
st.session_state.setdefault("answer", "")
_sidebar_workspace()
c1, c2 = st.columns([0.15, 0.85])
if LOGO.exists(): c1.image(str(LOGO), width=105)
c2.markdown("## 🧬 **MedGenesis AI**")
c2.caption("Multi-source biomedical assistant Β· OpenAI / Gemini")
llm = st.radio("LLM engine", ["openai", "gemini"], horizontal=True)
query = st.text_input("Enter biomedical question",
placeholder="e.g. CRISPR glioblastoma therapy")
# Alerts
if get_workspace():
try:
news = asyncio.run(check_alerts([w["query"] for w in get_workspace()]))
if news:
with st.sidebar:
st.subheader("πŸ”” New papers")
for q, lnks in news.items():
st.write(f"**{q}** – {len(lnks)} new")
except Exception:
pass
# Run search
if st.button("Run Search πŸš€") and query.strip():
with st.spinner("Collecting literature & biomedical data …"):
res = asyncio.run(orchestrate_search(query, llm=llm))
st.session_state.update(
result=res, last_query=query, last_llm=llm,
followup="", answer=""
)
st.success(f"Completed with **{res['llm_used'].title()}**")
res = st.session_state.result
if not res:
st.info("Enter a question and press **Run Search πŸš€**")
return
# ── Tabs ───────────────────────────────────────────────────────
tabs = st.tabs(
["Results", "Genes", "Trials", "Graph", "Metrics", "Visuals"]
)
# Results
with tabs[0]:
for i, p in enumerate(res["papers"], 1):
st.markdown(f"**{i}. [{p['title']}]({p['link']})** *{p['authors']}*")
st.write(p["summary"])
c1, c2 = st.columns(2)
c1.download_button("CSV",
pd.DataFrame(res["papers"]).to_csv(index=False),
"papers.csv", "text/csv")
c2.download_button("PDF", _pdf(res["papers"]),
"papers.pdf", "application/pdf")
if st.button("πŸ’Ύ Save"):
save_query(st.session_state.last_query, res)
st.success("Saved to workspace")
st.subheader("UMLS concepts")
for c in res["umls"]:
if isinstance(c, dict) and c.get("cui"):
st.write(f"- **{c['name']}** ({c['cui']})")
st.subheader("OpenFDA safety")
for d in res["drug_safety"]:
st.json(d)
st.subheader("AI summary")
st.info(res["ai_summary"])
# Genes
with tabs[1]:
st.header("Gene / Variant signals")
if res["genes_rich"]:
for g in res["genes_rich"]:
st.write(f"- **{g.get('symbol', g.get('approvedSymbol','?'))}**"
f" – {g.get('summary','')[:160]}…")
else:
st.info("No gene hits (rate-limited or none found).")
if res["expr_atlas"]:
st.plotly_chart(px.bar(
res["expr_atlas"][0].get("expressions", [])[:10],
x="assayName", y="value", title="Top tissues (Expression Atlas)"
))
if res["cbio_variants"]:
st.markdown("### cBioPortal cohort variants")
st.json(res["cbio_variants"][0][:15])
# Trials
with tabs[2]:
st.header("Clinical trials")
if not res["clinical_trials"]:
st.info("No trials (rate-limited or none found).")
for t in res["clinical_trials"]:
st.markdown(f"**{t['NCTId'][0]}** – {t['BriefTitle'][0]}")
st.write(f"Phase {t.get('Phase',[''])[0]} | "
f"Status {t['OverallStatus'][0]}")
# Graph
with tabs[3]:
nodes, edges, cfg = build_agraph(
res["papers"], res["umls"], res["drug_safety"]
)
hl = st.text_input("Highlight node:", key="hl")
if hl:
pat = re.compile(re.escape(hl), re.I)
for n in nodes:
n.color = "#f1c40f" if pat.search(n.label) else "#d3d3d3"
agraph(nodes, edges, cfg)
# Metrics
with tabs[4]:
G = build_nx([n.__dict__ for n in nodes],
[e.__dict__ for e in edges])
st.metric("Density", f"{get_density(G):.3f}")
st.markdown("**Top hubs**")
for nid, sc in get_top_hubs(G):
lab = next((n.label for n in nodes if n.id == nid), nid)
st.write(f"- {lab} {sc:.3f}")
# Visuals
with tabs[5]:
years = [p["published"] for p in res["papers"] if p.get("published")]
if years:
st.plotly_chart(px.histogram(years, nbins=12,
title="Publication Year"))
# Follow-up QA
st.markdown("---")
st.text_input("Ask follow-up question:",
key="followup", placeholder="e.g. Any phase III trials recruiting now?")
def _on_ask():
q = st.session_state.followup.strip()
if not q:
st.warning("Please type a question first.")
return
with st.spinner("Querying LLM …"):
ans = asyncio.run(
answer_ai_question(q,
context=st.session_state.last_query,
llm=st.session_state.last_llm)
)
st.session_state.answer = ans["answer"]
st.button("Ask AI", on_click=_on_ask)
if st.session_state.answer:
st.write(st.session_state.answer)
# entry-point
if __name__ == "__main__":
render_ui()