willco-afk commited on
Commit
25ef615
·
verified ·
1 Parent(s): e9af546

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +14 -14
app.py CHANGED
@@ -1,19 +1,19 @@
1
- from transformers import AutoTokenizer, AutoModelForSequenceClassification
2
  import gradio as gr
 
 
3
 
4
- # Load the model and tokenizer from your Hugging Face model repository
5
- model_name = "willco-afk/my-model-name" # Replace with your actual model repo name
6
- tokenizer = AutoTokenizer.from_pretrained(model_name)
7
- model = AutoModelForSequenceClassification.from_pretrained(model_name)
8
 
9
- # Define your prediction function
10
- def predict(text):
11
- inputs = tokenizer(text, return_tensors="pt")
12
- outputs = model(**inputs)
13
- logits = outputs.logits
14
- prediction = logits.argmax(dim=-1).item()
15
- return f"Predicted Class: {prediction}"
16
 
17
- # Gradio UI
18
- demo = gr.Interface(fn=predict, inputs="text", outputs="text")
19
  demo.launch()
 
 
1
  import gradio as gr
2
+ import torch
3
+ from transformers import AutoTokenizer, AutoModelForSequenceClassification
4
 
5
+ # Load the model and tokenizer
6
+ tokenizer = AutoTokenizer.from_pretrained("huggingface_user/my-model-name") # Use your model name here
7
+ model = AutoModelForSequenceClassification.from_pretrained("huggingface_user/my-model-name")
 
8
 
9
+ # Function to classify input text
10
+ def classify_text(text):
11
+ inputs = tokenizer(text, return_tensors="pt", truncation=True, padding=True)
12
+ with torch.no_grad():
13
+ logits = model(**inputs).logits
14
+ predicted_class = logits.argmax().item() # Get the predicted class
15
+ return f"Predicted class: {predicted_class}"
16
 
17
+ # Gradio Interface
18
+ demo = gr.Interface(fn=classify_text, inputs="text", outputs="text")
19
  demo.launch()