Spaces:
Sleeping
Sleeping
import gradio as gr | |
import cv2 | |
from gradio_webrtc import WebRTC | |
import mediapipe as mp | |
import time | |
import os | |
if os.environ.get("SPACES_ZERO_GPU") is not None: | |
import spaces | |
else: | |
class spaces: | |
def GPU(func): | |
def wrapper(*args, **kwargs): | |
return func(*args, **kwargs) | |
return wrapper | |
def fake_gpu(): | |
pass | |
# 初始化 MediaPipe Hands | |
mp_hands = mp.solutions.hands | |
mp_drawing = mp.solutions.drawing_utils | |
hands = mp_hands.Hands(min_detection_confidence=0.3, min_tracking_confidence=0.3) # 降低置信度提升速度 | |
# WebRTC 配置 | |
rtc_configuration = { | |
"iceServers": [{"urls": "stun:stun.l.google.com:19302"}], | |
"iceTransportPolicy": "relay" | |
} | |
# 控制每秒帧处理频率的时间 | |
last_process_time = time.time() | |
# 手势检测函数 | |
def detection(image, conf_threshold=0.5): | |
""" | |
使用 MediaPipe Hands 进行手势检测。 | |
""" | |
global last_process_time | |
current_time = time.time() | |
# 只每隔一定时间(比如0.1秒)才进行一次处理,减少计算负担 | |
if current_time - last_process_time < 0.1: | |
return image # 如果时间间隔太短,则直接返回原图像 | |
last_process_time = current_time | |
# 将图像从 BGR 转换为 RGB(MediaPipe 需要 RGB 格式) | |
image_rgb = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) | |
# 将图像大小缩小到一个较小的尺寸,降低计算负担 | |
image = cv2.resize(image, (640, 480)) | |
# 使用 MediaPipe Hands 处理图像 | |
results = hands.process(image_rgb) | |
# 如果检测到手,绘制手部关键点 | |
if results.multi_hand_landmarks: | |
for hand_landmarks in results.multi_hand_landmarks: | |
mp_drawing.draw_landmarks( | |
image, hand_landmarks, mp_hands.HAND_CONNECTIONS | |
) | |
# 返回带注释的图像 | |
return image | |
# Gradio 界面 | |
css = """.my-group {max-width: 600px !important; max-height: 600 !important;} | |
.my-column {display: flex !important; justify-content: center !important; align-items: center !important;}""" | |
with gr.Blocks(css=css) as demo: | |
gr.HTML( | |
""" | |
<h1 style='text-align: center'> | |
Hand Gesture Detection with MediaPipe (Powered by WebRTC ⚡️) | |
</h1> | |
""" | |
) | |
gr.HTML( | |
""" | |
<h3 style='text-align: center'> | |
<a href='https://mediapipe.dev/'>MediaPipe Hands</a> | |
</h3> | |
""" | |
) | |
with gr.Column(elem_classes=["my-column"]): | |
with gr.Group(elem_classes=["my-group"]): | |
image = WebRTC(label="Stream", rtc_configuration=rtc_configuration) | |
conf_threshold = gr.Slider( | |
label="Confidence Threshold", | |
minimum=0.0, | |
maximum=1.0, | |
step=0.05, | |
value=0.5, | |
) | |
# 使用简化的stream函数,不使用queue参数 | |
image.stream(fn=detection, inputs=[image, conf_threshold], outputs=[image], time_limit=10) | |
if __name__ == "__main__": | |
demo.launch() |