Spaces:
Sleeping
Sleeping
import streamlit as st | |
from transformers import pipeline | |
import os | |
# Load the Hugging Face access token from secrets | |
access_token = os.getenv("HF_TOKEN") # Token is securely retrieved from secrets | |
# Model name | |
MODEL_NAME = "Alaaeldin/pubmedBERT-demo" | |
def load_pipeline(): | |
return pipeline("question-answering", model=MODEL_NAME, tokenizer=MODEL_NAME, use_auth_token=access_token) | |
# Load the pipeline | |
qa_pipeline = load_pipeline() | |
# Streamlit app UI | |
st.title("PubMed BERT Q&A App") | |
st.write("Ask questions based on biomedical content!") | |
# User inputs | |
context = st.text_area("Enter the biomedical context (e.g., PubMed abstract):", height=200) | |
question = st.text_input("Enter your question:") | |
# Button to get the answer | |
if st.button("Get Answer"): | |
if context.strip() and question.strip(): | |
with st.spinner("Finding the answer..."): | |
result = qa_pipeline(question=question, context=context) | |
st.success(f"Answer: {result['answer']}") | |
st.write(f"Confidence: {result['score']:.2f}") | |
else: | |
st.warning("Please provide both context and a question.") | |
# Footer | |
st.markdown("---") | |
st.markdown("Powered by **PubMed BERT** fine-tuned by [Alaaeldin](https://huggingface.co/Alaaeldin).") | |