my-model-name / app.py
willco-afk's picture
Update app.py
25ef615 verified
raw
history blame
767 Bytes
import gradio as gr
import torch
from transformers import AutoTokenizer, AutoModelForSequenceClassification
# Load the model and tokenizer
tokenizer = AutoTokenizer.from_pretrained("huggingface_user/my-model-name") # Use your model name here
model = AutoModelForSequenceClassification.from_pretrained("huggingface_user/my-model-name")
# Function to classify input text
def classify_text(text):
inputs = tokenizer(text, return_tensors="pt", truncation=True, padding=True)
with torch.no_grad():
logits = model(**inputs).logits
predicted_class = logits.argmax().item() # Get the predicted class
return f"Predicted class: {predicted_class}"
# Gradio Interface
demo = gr.Interface(fn=classify_text, inputs="text", outputs="text")
demo.launch()