import DeviceDetector from "https://cdn.skypack.dev/device-detector-js@2.2.10"; // 用法: testSupport({client?: string, os?: string}[]) // Client 和 os 是正则表达式。 // 参见: https://cdn.jsdelivr.net/npm/device-detector-js@2.2.10/README.md // 了解 client 和 os 的合法值 testSupport([ { client: 'Chrome' }, ]); function testSupport(supportedDevices) { const deviceDetector = new DeviceDetector(); const detectedDevice = deviceDetector.parse(navigator.userAgent); let isSupported = false; for (const device of supportedDevices) { if (device.client !== undefined) { const re = new RegExp(`^${device.client}$`); if (!re.test(detectedDevice.client.name)) { continue; } } if (device.os !== undefined) { const re = new RegExp(`^${device.os}$`); if (!re.test(detectedDevice.os.name)) { continue; } } isSupported = true; break; } if (!isSupported) { alert(`此演示在 ${detectedDevice.client.name}/${detectedDevice.os.name} 上运行时 ` + `目前不能很好地支持,继续使用需自担风险。`); } } const controls = window; const mpHolistic = window; const drawingUtils = window; const config = { locateFile: (file) => { return `https://cdn.jsdelivr.net/npm/@mediapipe/holistic@` + `${mpHolistic.VERSION}/${file}`; } }; // 我们的输入帧将来自这里。 const videoElement = document.getElementsByClassName('input_video')[0]; const canvasElement = document.getElementsByClassName('output_canvas')[0]; const controlsElement = document.getElementsByClassName('control-panel')[0]; const canvasCtx = canvasElement.getContext('2d'); // 我们稍后会将这个添加到控制面板中,但我们会在这里保存它, // 以便每次图形运行时都可以调用 tick()。 const fpsControl = new controls.FPS(); // 优化:在隐藏动画完成后关闭动画旋转器。 const spinner = document.querySelector('.loading'); spinner.ontransitionend = () => { spinner.style.display = 'none'; }; function removeElements(landmarks, elements) { for (const element of elements) { delete landmarks[element]; } } function removeLandmarks(results) { if (results.poseLandmarks) { removeElements(results.poseLandmarks, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 15, 16, 17, 18, 19, 20, 21, 22]); } } function connect(ctx, connectors) { const canvas = ctx.canvas; for (const connector of connectors) { const from = connector[0]; const to = connector[1]; if (from && to) { if (from.visibility && to.visibility && (from.visibility < 0.1 || to.visibility < 0.1)) { continue; } ctx.beginPath(); ctx.moveTo(from.x * canvas.width, from.y * canvas.height); ctx.lineTo(to.x * canvas.width, to.y * canvas.height); ctx.stroke(); } } } let activeEffect = 'mask'; function onResults(results) { // 隐藏旋转器。 document.body.classList.add('loaded'); // 移除我们不想绘制的关键点。 removeLandmarks(results); // 更新帧率。 fpsControl.tick(); // 绘制叠加层。 canvasCtx.save(); canvasCtx.clearRect(0, 0, canvasElement.width, canvasElement.height); if (results.segmentationMask) { canvasCtx.drawImage(results.segmentationMask, 0, 0, canvasElement.width, canvasElement.height); // 仅覆盖现有像素。 if (activeEffect === 'mask' || activeEffect === 'both') { canvasCtx.globalCompositeOperation = 'source-in'; // 这可以是颜色、纹理或其他... canvasCtx.fillStyle = '#00FF007F'; canvasCtx.fillRect(0, 0, canvasElement.width, canvasElement.height); } else { canvasCtx.globalCompositeOperation = 'source-out'; canvasCtx.fillStyle = '#0000FF7F'; canvasCtx.fillRect(0, 0, canvasElement.width, canvasElement.height); } // 仅覆盖缺失的像素。 canvasCtx.globalCompositeOperation = 'destination-atop'; canvasCtx.drawImage(results.image, 0, 0, canvasElement.width, canvasElement.height); canvasCtx.globalCompositeOperation = 'source-over'; } else { canvasCtx.drawImage(results.image, 0, 0, canvasElement.width, canvasElement.height); } // 连接肘部到手部。首先做这个,这样其他图形将绘制在这些标记之上。 canvasCtx.lineWidth = 5; if (results.poseLandmarks) { if (results.rightHandLandmarks) { canvasCtx.strokeStyle = 'white'; connect(canvasCtx, [[ results.poseLandmarks[mpHolistic.POSE_LANDMARKS.RIGHT_ELBOW], results.rightHandLandmarks[0] ]]); } if (results.leftHandLandmarks) { canvasCtx.strokeStyle = 'white'; connect(canvasCtx, [[ results.poseLandmarks[mpHolistic.POSE_LANDMARKS.LEFT_ELBOW], results.leftHandLandmarks[0] ]]); } } // 姿势... drawingUtils.drawConnectors(canvasCtx, results.poseLandmarks, mpHolistic.POSE_CONNECTIONS, { color: 'white' }); drawingUtils.drawLandmarks(canvasCtx, Object.values(mpHolistic.POSE_LANDMARKS_LEFT) .map(index => results.poseLandmarks[index]), { visibilityMin: 0.65, color: 'white', fillColor: 'rgb(255,138,0)' }); drawingUtils.drawLandmarks(canvasCtx, Object.values(mpHolistic.POSE_LANDMARKS_RIGHT) .map(index => results.poseLandmarks[index]), { visibilityMin: 0.65, color: 'white', fillColor: 'rgb(0,217,231)' }); // 手... drawingUtils.drawConnectors(canvasCtx, results.rightHandLandmarks, mpHolistic.HAND_CONNECTIONS, { color: 'white' }); drawingUtils.drawLandmarks(canvasCtx, results.rightHandLandmarks, { color: 'white', fillColor: 'rgb(0,217,231)', lineWidth: 2, radius: (data) => { return drawingUtils.lerp(data.from.z, -0.15, .1, 10, 1); } }); drawingUtils.drawConnectors(canvasCtx, results.leftHandLandmarks, mpHolistic.HAND_CONNECTIONS, { color: 'white' }); drawingUtils.drawLandmarks(canvasCtx, results.leftHandLandmarks, { color: 'white', fillColor: 'rgb(255,138,0)', lineWidth: 2, radius: (data) => { return drawingUtils.lerp(data.from.z, -0.15, .1, 10, 1); } }); // 面部... drawingUtils.drawConnectors(canvasCtx, results.faceLandmarks, mpHolistic.FACEMESH_TESSELATION, { color: '#C0C0C070', lineWidth: 1 }); drawingUtils.drawConnectors(canvasCtx, results.faceLandmarks, mpHolistic.FACEMESH_RIGHT_EYE, { color: 'rgb(0,217,231)' }); drawingUtils.drawConnectors(canvasCtx, results.faceLandmarks, mpHolistic.FACEMESH_RIGHT_EYEBROW, { color: 'rgb(0,217,231)' }); drawingUtils.drawConnectors(canvasCtx, results.faceLandmarks, mpHolistic.FACEMESH_LEFT_EYE, { color: 'rgb(255,138,0)' }); drawingUtils.drawConnectors(canvasCtx, results.faceLandmarks, mpHolistic.FACEMESH_LEFT_EYEBROW, { color: 'rgb(255,138,0)' }); drawingUtils.drawConnectors(canvasCtx, results.faceLandmarks, mpHolistic.FACEMESH_FACE_OVAL, { color: '#E0E0E0', lineWidth: 5 }); drawingUtils.drawConnectors(canvasCtx, results.faceLandmarks, mpHolistic.FACEMESH_LIPS, { color: '#E0E0E0', lineWidth: 5 }); canvasCtx.restore(); } const holistic = new mpHolistic.Holistic(config); holistic.onResults(onResults); // 呈现一个控制面板,用户可以通过它操作解决方案选项。 new controls .ControlPanel(controlsElement, { selfieMode: true, modelComplexity: 1, smoothLandmarks: true, enableSegmentation: false, smoothSegmentation: true, minDetectionConfidence: 0.5, minTrackingConfidence: 0.5, effect: 'background', }) .add([ new controls.StaticText({ title: 'MediaPipe 全身姿态检测' }), fpsControl, new controls.Toggle({ title: '自拍模式', field: 'selfieMode' }), new controls.SourcePicker({ onSourceChanged: () => { // 重置,因为在源更改之间重置时,姿势会给出更好的结果。 holistic.reset(); }, onFrame: async (input, size) => { const aspect = size.height / size.width; let width, height; if (window.innerWidth > window.innerHeight) { height = window.innerHeight; width = height / aspect; } else { width = window.innerWidth; height = width * aspect; } canvasElement.width = width; canvasElement.height = height; await holistic.send({ image: input }); }, }), new controls.Slider({ title: '模型复杂度', field: 'modelComplexity', discrete: ['轻量', '完整', '重度'], }), new controls.Toggle({ title: '平滑关键点', field: 'smoothLandmarks' }), new controls.Toggle({ title: '启用分割', field: 'enableSegmentation' }), new controls.Toggle({ title: '平滑分割', field: 'smoothSegmentation' }), new controls.Slider({ title: '最小检测置信度', field: 'minDetectionConfidence', range: [0, 1], step: 0.01 }), new controls.Slider({ title: '最小跟踪置信度', field: 'minTrackingConfidence', range: [0, 1], step: 0.01 }), new controls.Slider({ title: '效果', field: 'effect', discrete: { 'background': '背景', 'mask': '前景' }, }), ]) .on(x => { const options = x; videoElement.classList.toggle('selfie', options.selfieMode); activeEffect = x['effect']; holistic.setOptions(options); }); // 添加窗口大小调整事件监听器,以确保画布大小随窗口变化而调整 window.addEventListener('resize', () => { const aspect = videoElement.videoHeight / videoElement.videoWidth; let width, height; if (window.innerWidth > window.innerHeight) { height = window.innerHeight; width = height / aspect; } else { width = window.innerWidth; height = width * aspect; } canvasElement.width = width; canvasElement.height = height; });