Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,61 +1,27 @@
|
|
1 |
-
import cv2
|
2 |
import streamlit as st
|
3 |
from streamlit_webrtc import webrtc_streamer, WebRtcMode
|
4 |
-
import numpy as np
|
5 |
import av
|
6 |
|
7 |
-
st.title("
|
8 |
-
|
9 |
-
# Define the filter state variable
|
10 |
-
filter = "none"
|
11 |
|
|
|
12 |
def transform(frame: av.VideoFrame):
|
13 |
-
img = frame.to_ndarray(format="bgr24")
|
14 |
-
|
15 |
-
# Apply the selected filter
|
16 |
-
if filter == "blur":
|
17 |
-
img = cv2.GaussianBlur(img, (21, 21), 0)
|
18 |
-
elif filter == "canny":
|
19 |
-
img = cv2.cvtColor(cv2.Canny(img, 100, 200), cv2.COLOR_GRAY2BGR)
|
20 |
-
elif filter == "grayscale":
|
21 |
-
img = cv2.cvtColor(cv2.cvtColor(img, cv2.COLOR_BGR2GRAY), cv2.COLOR_GRAY2BGR)
|
22 |
-
elif filter == "sepia":
|
23 |
-
kernel = np.array([[0.272, 0.534, 0.131], [0.349, 0.686, 0.168], [0.393, 0.769, 0.189]])
|
24 |
-
img = cv2.transform(img, kernel)
|
25 |
-
elif filter == "invert":
|
26 |
-
img = cv2.bitwise_not(img)
|
27 |
-
|
28 |
-
# Debugging: Check if the frame is being processed
|
29 |
-
print(f"Processing frame with filter: {filter}")
|
30 |
|
|
|
31 |
return av.VideoFrame.from_ndarray(img, format="bgr24")
|
32 |
|
33 |
-
# Streamlit buttons to
|
34 |
-
|
35 |
-
|
36 |
-
with col1:
|
37 |
-
if st.button("None"):
|
38 |
-
filter = "none"
|
39 |
-
with col2:
|
40 |
-
if st.button("Blur"):
|
41 |
-
filter = "blur"
|
42 |
-
with col3:
|
43 |
-
if st.button("Grayscale"):
|
44 |
-
filter = "grayscale"
|
45 |
-
with col4:
|
46 |
-
if st.button("Sepia"):
|
47 |
-
filter = "sepia"
|
48 |
-
with col5:
|
49 |
-
if st.button("Canny"):
|
50 |
-
filter = "canny"
|
51 |
-
with col6:
|
52 |
-
if st.button("Invert"):
|
53 |
-
filter = "invert"
|
54 |
|
55 |
# Display the video stream
|
56 |
webrtc_streamer(
|
57 |
key="streamer",
|
58 |
-
video_frame_callback=transform,
|
59 |
-
sendback_audio=False,
|
60 |
-
mode=WebRtcMode.
|
61 |
)
|
|
|
|
|
|
|
|
|
|
|
|
1 |
import streamlit as st
|
2 |
from streamlit_webrtc import webrtc_streamer, WebRtcMode
|
|
|
3 |
import av
|
4 |
|
5 |
+
st.title("Webcam Display Streamlit App")
|
|
|
|
|
|
|
6 |
|
7 |
+
# Define the callback for transforming frames (without applying any filters)
|
8 |
def transform(frame: av.VideoFrame):
|
9 |
+
img = frame.to_ndarray(format="bgr24") # Convert to NumPy array (BGR format)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
10 |
|
11 |
+
# Simply return the image without applying any filters
|
12 |
return av.VideoFrame.from_ndarray(img, format="bgr24")
|
13 |
|
14 |
+
# Streamlit buttons (optional, to stop the stream or interact further)
|
15 |
+
stop_button_pressed = st.button("Stop")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
16 |
|
17 |
# Display the video stream
|
18 |
webrtc_streamer(
|
19 |
key="streamer",
|
20 |
+
video_frame_callback=transform, # The transform function is only used to process frames
|
21 |
+
sendback_audio=False, # We don't need audio in this case
|
22 |
+
mode=WebRtcMode.RECVONLY, # We are only receiving the video stream (not sending any video back)
|
23 |
)
|
24 |
+
|
25 |
+
# If you want a "Stop" button that halts the webcam stream, you can handle this through Streamlit
|
26 |
+
if stop_button_pressed:
|
27 |
+
st.write("Stream stopped.")
|