my-model-name / app.py
willco-afk's picture
Update app.py
da13488 verified
raw
history blame contribute delete
811 Bytes
import gradio as gr
import torch
from transformers import AutoTokenizer, AutoModelForSequenceClassification
# Load the model and tokenizer
tokenizer = AutoTokenizer.from_pretrained("willco-afk/my-model-name")
model = AutoModelForSequenceClassification.from_pretrained("willco-afk/my-model-name")
# Function to classify input text
def classify_text(text):
print("Classifying:", text) # Check if this gets printed
inputs = tokenizer(text, return_tensors="pt", truncation=True, padding=True)
with torch.no_grad():
logits = model(**inputs).logits
predicted_class = logits.argmax().item() # Get the predicted class
return f"Predicted class: {predicted_class}"
# Gradio Interface without forced layout
demo = gr.Interface(fn=classify_text, inputs="text", outputs="text")
demo.launch()