import streamlit as st import torch #streamlit clean #streamlit run app.py #pip install --upgrade pip #curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh from PIL import Image #from transformers import ViTForImageClassification, ViTImageProcessor from transformers import AutoImageProcessor, AutoModelForImageClassification # Load the model model_name = "trpakov/vit-face-expression" image_processor = AutoImageProcessor.from_pretrained(model_name) model = AutoModelForImageClassification.from_pretrained(model_name) # Load the model model_name = "trpakov/vit-face-expression" #model = ViTForImageClassification.from_pretrained(model_name) #image_processor = ViTImageProcessor.from_pretrained(model_name) # Streamlit app st.title("Emotion Recognition with vit-face-expression") # Slider example x = st.slider('Select a value') st.write(f"{x} squared is {x * x}") # Upload image uploaded_image = st.file_uploader("Upload an image", type=["jpg", "png"]) if uploaded_image: image = Image.open(uploaded_image) inputs = image_processor(images=image, return_tensors="pt") pixel_values = inputs.pixel_values # Predict emotion with torch.no_grad(): outputs = model(pixel_values) predicted_class = torch.argmax(outputs.logits, dim=1).item() emotion_labels = ["Angry", "Disgust", "Fear", "Happy", "Sad", "Surprise", "Neutral"] predicted_emotion = emotion_labels[predicted_class] st.image(image, caption=f"Predicted emotion: {predicted_emotion}", use_column_width=True) # Display scores for each category st.write("Emotion Scores:") for label, score in zip(emotion_labels, outputs.logits[0]): st.write(f"{label}: {score:.4f}")