mcq / app.py
Raghu-Shikari's picture
Update app.py
9834515 verified
raw
history blame
958 Bytes
import streamlit as st
from transformers import pipeline
# Load the Hugging Face model for question generation
question_generator = pipeline("text2text-generation", model="valhalla/t5-small-qg-hl")
# Function to generate MCQs
def generate_mcqs(content, num_questions):
# Generate questions
questions = question_generator(content, max_length=512, num_return_sequences=num_questions)
return questions
# Streamlit UI
st.title("MCQ Generator using Hugging Face")
content = st.text_area("Enter the content from which MCQs will be generated:", height=200)
num_questions = st.number_input("Enter the number of MCQs to generate:", min_value=1, max_value=20, value=5, step=1)
if st.button("Generate MCQs"):
if content:
mcqs = generate_mcqs(content, num_questions)
for i, mcq in enumerate(mcqs):
st.write(f"Q{i+1}: {mcq['generated_text']}")
else:
st.warning("Please enter the content to generate MCQs.")