loom / app.py
greykaizen's picture
Update app.py
e6dcb5b verified
raw
history blame contribute delete
540 Bytes
import gradio as gr
from fastapi import FastAPI
from pydantic import BaseModel
from transformers import pipeline
app = FastAPI()
moderate_pipe = pipeline("text-classification", model="KoalaAI/Text-Moderation")
class TextInput(BaseModel):
text: str
@app.post("/moderate")
async def moderate_text(input: TextInput):
results = moderate_pipe(input.text)
return {r["label"]: r["score"] for r in results}
# Gradio interface to expose the model API via a Space
gr.Interface(fn=moderate_text, inputs="text", outputs="json").launch()