DarwinAnim8or's picture
Update app.py
b24c622
raw
history blame
816 Bytes
import gradio as gr
from transformers import TextClassificationPipeline, AutoTokenizer, AutoModelForSequenceClassification
# Load a pre-trained text classification model
model_name = "KoalaAI/Text-Moderation"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForSequenceClassification.from_pretrained(model_name)
# Create a TextClassificationPipeline
pipe = TextClassificationPipeline(model=model, tokenizer=tokenizer)
# Define the classify_text function using the pipeline
def classify_text(text):
prediction = pipe(text)[0]["label"]
return prediction
# Create a Gradio interface
iface = gr.Interface(
fn=classify_text,
inputs=gradio.components.Textbox(label="Enter text"),
outputs=gr.outputs.Label(label="Predicted classes"),
)
# Launch the Gradio app
iface.launch()