liuhui0401 commited on
Commit
ef38074
·
verified ·
1 Parent(s): fb79e86

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +54 -3
app.py CHANGED
@@ -1,5 +1,13 @@
1
  import gradio as gr
 
2
  from gradio_webrtc import WebRTC
 
 
 
 
 
 
 
3
 
4
  # WebRTC 配置
5
  rtc_configuration = {
@@ -7,6 +15,41 @@ rtc_configuration = {
7
  "iceTransportPolicy": "relay"
8
  }
9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
10
  # Gradio 界面
11
  css = """.my-group {max-width: 600px !important; max-height: 600 !important;}
12
  .my-column {display: flex !important; justify-content: center !important; align-items: center !important;}"""
@@ -15,22 +58,30 @@ with gr.Blocks(css=css) as demo:
15
  gr.HTML(
16
  """
17
  <h1 style='text-align: center'>
18
- WebRTC Stream Test
19
  </h1>
20
  """
21
  )
22
  gr.HTML(
23
  """
24
  <h3 style='text-align: center'>
25
- WebRTC without MediaPipe
26
  </h3>
27
  """
28
  )
29
  with gr.Column(elem_classes=["my-column"]):
30
  with gr.Group(elem_classes=["my-group"]):
31
  image = WebRTC(label="Stream", rtc_configuration=rtc_configuration)
 
 
 
 
 
 
 
32
 
33
- image.stream(fn=lambda x: x, inputs=[image], outputs=[image], time_limit=10)
 
34
 
35
  if __name__ == "__main__":
36
  demo.launch()
 
1
  import gradio as gr
2
+ import cv2
3
  from gradio_webrtc import WebRTC
4
+ import mediapipe as mp
5
+ import time
6
+
7
+ # 初始化 MediaPipe Hands
8
+ mp_hands = mp.solutions.hands
9
+ mp_drawing = mp.solutions.drawing_utils
10
+ hands = mp_hands.Hands(min_detection_confidence=0.3, min_tracking_confidence=0.3) # 降低置信度提升速度
11
 
12
  # WebRTC 配置
13
  rtc_configuration = {
 
15
  "iceTransportPolicy": "relay"
16
  }
17
 
18
+ # 手势检测函数
19
+ last_process_time = time.time() # 用于控制处理频率
20
+
21
+ def detection(image, conf_threshold=0.5):
22
+ """
23
+ 使用 MediaPipe Hands 进行手势检测。
24
+ """
25
+ global last_process_time
26
+ current_time = time.time()
27
+
28
+ # 只每隔一定时间(比如0.1秒)才进行一次处理,减少计算负载
29
+ if current_time - last_process_time < 0.1:
30
+ return image # 如果时间间隔太短,则直接返回原图像
31
+
32
+ last_process_time = current_time
33
+
34
+ # 将图像从 BGR 转换为 RGB(MediaPipe 需要 RGB 格式)
35
+ image_rgb = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
36
+
37
+ # 将图像大小缩小到一个较小的尺寸,降低计算负担
38
+ image = cv2.resize(image, (640, 480))
39
+
40
+ # 使用 MediaPipe Hands 处理图像
41
+ results = hands.process(image_rgb)
42
+
43
+ # 如果检测到手,绘制手部关键点
44
+ if results.multi_hand_landmarks:
45
+ for hand_landmarks in results.multi_hand_landmarks:
46
+ mp_drawing.draw_landmarks(
47
+ image, hand_landmarks, mp_hands.HAND_CONNECTIONS
48
+ )
49
+
50
+ # 返回带注释的图像
51
+ return image
52
+
53
  # Gradio 界面
54
  css = """.my-group {max-width: 600px !important; max-height: 600 !important;}
55
  .my-column {display: flex !important; justify-content: center !important; align-items: center !important;}"""
 
58
  gr.HTML(
59
  """
60
  <h1 style='text-align: center'>
61
+ Hand Gesture Detection with MediaPipe (Powered by WebRTC ⚡️)
62
  </h1>
63
  """
64
  )
65
  gr.HTML(
66
  """
67
  <h3 style='text-align: center'>
68
+ <a href='https://mediapipe.dev/'>MediaPipe Hands</a>
69
  </h3>
70
  """
71
  )
72
  with gr.Column(elem_classes=["my-column"]):
73
  with gr.Group(elem_classes=["my-group"]):
74
  image = WebRTC(label="Stream", rtc_configuration=rtc_configuration)
75
+ conf_threshold = gr.Slider(
76
+ label="Confidence Threshold",
77
+ minimum=0.0,
78
+ maximum=1.0,
79
+ step=0.05,
80
+ value=0.5,
81
+ )
82
 
83
+ # 使用队列(queue=True)和时间限制来优化处理
84
+ image.stream(fn=detection, inputs=[image, conf_threshold], outputs=[image], time_limit=10, queue=True)
85
 
86
  if __name__ == "__main__":
87
  demo.launch()