kiwi-space / app.py
Peter Moc
Upload app.py
b7f8a86 verified
raw
history blame
1.33 kB
import streamlit as st
from transformers import AutoModelForSequenceClassification, AutoTokenizer
import torch
# Specify the local paths where the model and tokenizer are saved
model_dir = "path_to_save_model"
tokenizer_dir = "path_to_save_tokenizer"
# Load the model and tokenizer from the local directories
model = AutoModelForSequenceClassification.from_pretrained(model_dir)
tokenizer = AutoTokenizer.from_pretrained(tokenizer_dir)
# Function to predict the sentiment
def predict_sentiment(text):
inputs = tokenizer(text, return_tensors="pt")
with torch.no_grad():
outputs = model(**inputs)
probs = torch.nn.functional.softmax(outputs.logits, dim=-1)
return torch.argmax(probs, dim=-1).item(), probs
# Streamlit interface
st.title("KIWI Classifier")
st.write("Enter a question or statement to classify:")
user_input = st.text_area("Your input", "")
if st.button("Classify"):
if user_input:
label, probabilities = predict_sentiment(user_input)
st.write(f"Prediction: {label}")
st.write(f"Probabilities: {probabilities.tolist()}")
else:
st.write("Please enter some text to classify.")
# Additional instructions or information
st.write("This application uses a fine-tuned BERT model to classify questions and statements.")