/** * Copyright 2024 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import cn from "classnames"; import { memo, ReactNode, RefObject, useEffect, useRef, useState } from "react"; import { useLiveAPIContext } from "../../contexts/LiveAPIContext"; import { UseMediaStreamResult } from "../../hooks/use-media-stream-mux"; import { useScreenCapture } from "../../hooks/use-screen-capture"; import { useWebcam } from "../../hooks/use-webcam"; import { AudioRecorder } from "../../lib/audio-recorder"; import { audioContext } from "../../lib/utils"; import { isIOS } from "../../lib/platform"; import AudioPulse from "../audio-pulse/AudioPulse"; import "./control-tray.scss"; export type ControlTrayProps = { videoRef: RefObject; children?: ReactNode; supportsVideo: boolean; onVideoStreamChange?: (stream: MediaStream | null) => void; }; type MediaStreamButtonProps = { isStreaming: boolean; onIcon: string; offIcon: string; start: () => Promise; stop: () => any; }; /** * button used for triggering webcam or screen-capture */ const MediaStreamButton = memo( ({ isStreaming, onIcon, offIcon, start, stop }: MediaStreamButtonProps) => isStreaming ? ( ) : ( ), ); function ControlTray({ videoRef, children, onVideoStreamChange = () => {}, supportsVideo, }: ControlTrayProps) { const videoStreams = [useWebcam(), useScreenCapture()]; const [activeVideoStream, setActiveVideoStream] = useState(null); const [webcam, screenCapture] = videoStreams; const [inVolume, setInVolume] = useState(0); const [audioRecorder] = useState(() => new AudioRecorder()); const [muted, setMuted] = useState(false); const renderCanvasRef = useRef(null); const connectButtonRef = useRef(null); const [simulatedVolume, setSimulatedVolume] = useState(0); const isIOSDevice = isIOS(); const { client, connected, connect, disconnect, volume } = useLiveAPIContext(); // Add iOS detection const isSafari = /^((?!chrome|android).)*safari/i.test(navigator.userAgent); useEffect(() => { if (!connected && connectButtonRef.current) { connectButtonRef.current.focus(); } }, [connected]); // Add iOS volume simulation effect useEffect(() => { if (isIOSDevice && connected && !muted) { const interval = setInterval(() => { // Create a smooth pulsing effect const pulse = (Math.sin(Date.now() / 500) + 1) / 2; // Values between 0 and 1 setSimulatedVolume(0.02 + pulse * 0.03); // Small range for subtle effect }, 50); return () => clearInterval(interval); } }, [connected, muted, isIOSDevice]); useEffect(() => { document.documentElement.style.setProperty( "--volume", `${Math.max(5, Math.min((isIOSDevice ? simulatedVolume : inVolume) * 200, 8))}px`, ); }, [inVolume, simulatedVolume, isIOSDevice]); useEffect(() => { const onData = (base64: string) => { client.sendRealtimeInput([ { mimeType: "audio/pcm;rate=16000", data: base64, }, ]); }; if (connected && !muted && audioRecorder) { audioRecorder.on("data", onData).on("volume", setInVolume).start(); } else { audioRecorder.stop(); } return () => { audioRecorder.off("data", onData).off("volume", setInVolume); }; }, [connected, client, muted, audioRecorder]); useEffect(() => { if (videoRef.current) { videoRef.current.srcObject = activeVideoStream; } let timeoutId = -1; function sendVideoFrame() { const video = videoRef.current; const canvas = renderCanvasRef.current; if (!video || !canvas) { return; } const ctx = canvas.getContext("2d")!; canvas.width = video.videoWidth * 0.25; canvas.height = video.videoHeight * 0.25; if (canvas.width + canvas.height > 0) { ctx.drawImage(videoRef.current, 0, 0, canvas.width, canvas.height); const base64 = canvas.toDataURL("image/jpeg", 1.0); const data = base64.slice(base64.indexOf(",") + 1, Infinity); client.sendRealtimeInput([{ mimeType: "image/jpeg", data }]); } if (connected) { timeoutId = window.setTimeout(sendVideoFrame, 1000 / 0.5); } } if (connected && activeVideoStream !== null) { requestAnimationFrame(sendVideoFrame); } return () => { clearTimeout(timeoutId); }; }, [connected, activeVideoStream, client, videoRef]); //handler for swapping from one video-stream to the next const changeStreams = (next?: UseMediaStreamResult) => async () => { if (next) { const mediaStream = await next.start(); setActiveVideoStream(mediaStream); onVideoStreamChange(mediaStream); } else { setActiveVideoStream(null); onVideoStreamChange(null); } videoStreams.filter((msr) => msr !== next).forEach((msr) => msr.stop()); }; return (
Streaming
); } export default memo(ControlTray);