File size: 958 Bytes
b3d7e23
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9834515
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
import streamlit as st
from transformers import pipeline

# Load the Hugging Face model for question generation
question_generator = pipeline("text2text-generation", model="valhalla/t5-small-qg-hl")

# Function to generate MCQs
def generate_mcqs(content, num_questions):
    # Generate questions
    questions = question_generator(content, max_length=512, num_return_sequences=num_questions)
    return questions

# Streamlit UI
st.title("MCQ Generator using Hugging Face")

content = st.text_area("Enter the content from which MCQs will be generated:", height=200)
num_questions = st.number_input("Enter the number of MCQs to generate:", min_value=1, max_value=20, value=5, step=1)

if st.button("Generate MCQs"):
    if content:
        mcqs = generate_mcqs(content, num_questions)
        for i, mcq in enumerate(mcqs):
            st.write(f"Q{i+1}: {mcq['generated_text']}")
    else:
        st.warning("Please enter the content to generate MCQs.")