my-model-name / app.py
willco-afk's picture
Update app.py
e7075f5 verified
raw
history blame
964 Bytes
import gradio as gr
import torch
from transformers import AutoTokenizer, AutoModelForSequenceClassification
# Load the model and tokenizer
tokenizer = AutoTokenizer.from_pretrained("willco-afk/my-model-name")
model = AutoModelForSequenceClassification.from_pretrained("willco-afk/my-model-name")
# Function to classify input text
def classify_text(text):
inputs = tokenizer(text, return_tensors="pt", truncation=True, padding=True)
with torch.no_grad():
logits = model(**inputs).logits
predicted_class = logits.argmax().item() # Get the predicted class
return f"Predicted class: {predicted_class}"
# Create a Gradio interface with customized layout
demo = gr.Interface(fn=classify_text,
inputs=gr.Textbox(label="Enter your text"),
outputs=gr.Textbox(label="Prediction"),
live=True) # This option allows live feedback as you type
# Launch the Gradio interface
demo.launch()