Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,19 +1,19 @@
|
|
1 |
-
from transformers import AutoTokenizer, AutoModelForSequenceClassification
|
2 |
import gradio as gr
|
|
|
|
|
3 |
|
4 |
-
# Load the model and tokenizer
|
5 |
-
|
6 |
-
|
7 |
-
model = AutoModelForSequenceClassification.from_pretrained(model_name)
|
8 |
|
9 |
-
#
|
10 |
-
def
|
11 |
-
inputs = tokenizer(text, return_tensors="pt")
|
12 |
-
|
13 |
-
|
14 |
-
|
15 |
-
return f"Predicted
|
16 |
|
17 |
-
# Gradio
|
18 |
-
demo = gr.Interface(fn=
|
19 |
demo.launch()
|
|
|
|
|
1 |
import gradio as gr
|
2 |
+
import torch
|
3 |
+
from transformers import AutoTokenizer, AutoModelForSequenceClassification
|
4 |
|
5 |
+
# Load the model and tokenizer
|
6 |
+
tokenizer = AutoTokenizer.from_pretrained("huggingface_user/my-model-name") # Use your model name here
|
7 |
+
model = AutoModelForSequenceClassification.from_pretrained("huggingface_user/my-model-name")
|
|
|
8 |
|
9 |
+
# Function to classify input text
|
10 |
+
def classify_text(text):
|
11 |
+
inputs = tokenizer(text, return_tensors="pt", truncation=True, padding=True)
|
12 |
+
with torch.no_grad():
|
13 |
+
logits = model(**inputs).logits
|
14 |
+
predicted_class = logits.argmax().item() # Get the predicted class
|
15 |
+
return f"Predicted class: {predicted_class}"
|
16 |
|
17 |
+
# Gradio Interface
|
18 |
+
demo = gr.Interface(fn=classify_text, inputs="text", outputs="text")
|
19 |
demo.launch()
|