Spaces:
Running
Running
import gradio as gr | |
from transformers import TextClassificationPipeline, AutoTokenizer, AutoModelForSequenceClassification | |
# Load a pre-trained text classification model | |
model_name = "KoalaAI/Text-Moderation" | |
tokenizer = AutoTokenizer.from_pretrained(model_name) | |
model = AutoModelForSequenceClassification.from_pretrained(model_name) | |
# Create a TextClassificationPipeline | |
pipe = TextClassificationPipeline(model=model, tokenizer=tokenizer) | |
# Define the classify_text function using the pipeline | |
def classify_text(text): | |
prediction = pipe(text)[0] | |
return prediction | |
# Create a Gradio interface | |
iface = gr.Interface( | |
fn=classify_text, | |
inputs=gr.components.Textbox(label="Enter text"), | |
outputs=gr.outputs.Label(num_top_classes=None), # Show all labels | |
) | |
# Launch the Gradio app | |
iface.launch() |