xtlyxt commited on
Commit
b0b6588
·
verified ·
1 Parent(s): 7668f47

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +37 -28
app.py CHANGED
@@ -1,13 +1,12 @@
1
  import streamlit as st
2
  from PIL import Image
3
  from transformers import pipeline
4
- import torch
5
 
6
  # Create an image classification pipeline with scores
7
  pipe = pipeline("image-classification", model="trpakov/vit-face-expression", top_k=None)
8
 
9
  # Define emotion labels
10
- emotion_labels = ["Neutral", "Sad", "Angry", "Surprised", "Happy"]
11
 
12
  # Streamlit app
13
  st.title("Emotion Recognition with vit-face-expression")
@@ -16,29 +15,39 @@ st.title("Emotion Recognition with vit-face-expression")
16
  x = st.slider('Select a value')
17
  st.write(f"{x} squared is {x * x}")
18
 
19
- # Upload image
20
- uploaded_image = st.file_uploader("Upload an image", type=["jpg", "png"])
21
-
22
- if uploaded_image:
23
- image = Image.open(uploaded_image)
24
- # Predict emotion using the pipeline
25
- results = pipe(image)
26
- predicted_class = results[0]["label"]
27
- #p_sc =
28
- predicted_emotion = predicted_class.split("_")[-1].capitalize()
29
-
30
- st.image(image, caption=f"Predicted emotion: {predicted_emotion}", use_column_width=True)
31
-
32
- # Get the keys of results[0]
33
- keys = results[0].keys()
34
-
35
- # Display the keys and values of results[0]
36
- st.write("Keys and Values of results[0]:")
37
- for key, value in results[0].items():
38
- st.write(f"Key: {key}, Value: {value}")
39
-
40
- # Display the keys and values of all results
41
- for i, result in enumerate(results):
42
- st.write(f"Keys and Values of results[{i}]:")
43
- for key, value in result.items():
44
- st.write(f"Key: {key}, Value: {value}")
 
 
 
 
 
 
 
 
 
 
 
1
  import streamlit as st
2
  from PIL import Image
3
  from transformers import pipeline
 
4
 
5
  # Create an image classification pipeline with scores
6
  pipe = pipeline("image-classification", model="trpakov/vit-face-expression", top_k=None)
7
 
8
  # Define emotion labels
9
+ #emotion_labels = ["Neutral", "Sad", "Angry", "Surprised", "Happy"]
10
 
11
  # Streamlit app
12
  st.title("Emotion Recognition with vit-face-expression")
 
15
  x = st.slider('Select a value')
16
  st.write(f"{x} squared is {x * x}")
17
 
18
+ # Upload images
19
+ uploaded_images = st.file_uploader("Upload images", type=["jpg", "png"], accept_multiple_files=True)
20
+
21
+ if st.button("Predict Emotions") and uploaded_images:
22
+ if len(uploaded_images) == 2:
23
+ # Open the uploaded images
24
+ images = [Image.open(img) for img in uploaded_images]
25
+ file_names = [img.name for img in uploaded_images] # Get file names
26
+
27
+ # Predict emotion for each image using the pipeline
28
+ results = [pipe(image) for image in images]
29
+
30
+ # Display images and predicted emotions side by side
31
+ col1, col2 = st.columns(2)
32
+ for i in range(2):
33
+ predicted_class = results[i][0]["label"]
34
+ predicted_emotion = predicted_class.split("_")[-1].capitalize()
35
+ col = col1 if i == 0 else col2
36
+ col.image(images[i], caption=f"Predicted emotion: {predicted_emotion}", use_column_width=True)
37
+ col.write(f"Emotion Scores: {predicted_emotion}: {results[i][0]['score']:.4f}")
38
+ col.write(f"Original File Name: {file_names[i]}") # Display original file name
39
+ else:
40
+ # Open the uploaded images
41
+ images = [Image.open(img) for img in uploaded_images]
42
+
43
+ # Predict emotion for each image using the pipeline
44
+ results = [pipe(image) for image in images]
45
+
46
+ # Display images and predicted emotions
47
+ for i, result in enumerate(results):
48
+ predicted_class = result[0]["label"]
49
+ predicted_emotion = predicted_class.split("_")[-1].capitalize()
50
+ st.image(images[i], caption=f"Predicted emotion: {predicted_emotion}", use_column_width=True)
51
+ st.write(f"Emotion Scores for #{i+1} Image")
52
+ st.write(f"{predicted_emotion}: {result[0]['score']:.4f}")
53
+ st.write(f"Original File Name: {uploaded_images[i].name}") # Display original file name