File size: 1,268 Bytes
7e12507
8a91181
7cbd5ba
 
7e12507
7cbd5ba
3136a6a
 
7e12507
7cbd5ba
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
import gradio as gr
import transformers
from transformers import AutoModelForSequenceClassification, AutoTokenizer
import torch

# Load the model and tokenizer from the Hugging Face Hub
model = AutoModelForSequenceClassification.from_pretrained("preetidav/sentomodel")
tokenizer = AutoTokenizer.from_pretrained("preetidav/sentomodel")

# Function to predict sentiment
def predict_sentiment(text):
    # Tokenize the input text
    inputs = tokenizer(text, return_tensors="pt", padding=True, truncation=True)
    # Forward pass through the model
    outputs = model(**inputs)
    # Get the prediction (0 or 1 for binary classification)
    prediction = torch.argmax(outputs.logits, dim=1).item()
    # Map prediction to sentiment labels
    return "positive" if prediction == 1 else "negative"

# Set up the Gradio interface
iface = gr.Interface(
    fn=predict_sentiment,              # Function to call
    inputs=gr.Textbox(label="Input Text"),  # Input field for the text
    outputs=gr.Textbox(label="Sentiment"), # Output field for the sentiment
    title="Sentiment Analysis Model",    # Title of the app
    description="This model predicts whether a given text has positive or negative sentiment.",  # Description of the app
)

# Launch the app
iface.launch()