Nlpfinalpj / app.py
Yereque
first commit
badb581
import streamlit as st
from transformers import AutoTokenizer, AutoModelForQuestionAnswering, pipeline
st.title("Question Answering Web App")
st.write("""
### Powered by Hugging Face and Streamlit
This app uses a pre-trained NLP model from Hugging Face to answer questions based on the text you provide.
Try entering a context and a question to get an answer!
""")
# Load the tokenizer and model
@st.cache_resource
def load_model():
tokenizer = AutoTokenizer.from_pretrained("Rakib/roberta-base-on-cuad")
model = AutoModelForQuestionAnswering.from_pretrained("Rakib/roberta-base-on-cuad")
return tokenizer, model
tokenizer, model = load_model()
# Define the question-answering pipeline
@st.cache_resource
def get_qa_pipeline():
return pipeline("question-answering", model=model, tokenizer=tokenizer)
qa_pipeline = get_qa_pipeline()
# UI input for context and question
context = st.text_area("Enter the context:", "Type the paragraph here where the answer will be extracted.")
question = st.text_input("Enter the question:", "What is being asked here?")
# Button to perform question answering
if st.button("Answer Question"):
if context and question:
result = qa_pipeline(question=question, context=context)
answer = result['answer']
# Display the result
st.subheader("Answer")
st.write(f"**Answer:** {answer}")
else:
st.warning("Please enter both context and question!")
# Sidebar with About Information
st.sidebar.title("About")
st.sidebar.info("""
This app demonstrates the use of Hugging Face's NLP models with Streamlit.
It uses the `Rakib/roberta-base-on-cuad` model for question answering tasks.
""")