ggwave / static /conv.html
yasserrmd's picture
Update static/conv.html
ff2faa2 verified
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Sound Chat Interface</title>
<!-- Bootstrap CSS -->
<link href="https://cdn.jsdelivr.net/npm/[email protected]/dist/css/bootstrap.min.css" rel="stylesheet">
<!-- Bootstrap Icons -->
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/[email protected]/font/bootstrap-icons.css">
<!-- Google Fonts -->
<link href="https://fonts.googleapis.com/css2?family=Poppins:wght@300;400;500;600;700&display=swap" rel="stylesheet">
<style>
:root {
--primary: #2dd4bf;
--primary-dark: #0d9488;
--accent: #f59e0b;
--accent-light: #fbbf24;
--bg-color: #f0f9ff;
--card-bg: #ffffff;
--dark-text: #0f172a;
--light-text: #f8fafc;
--message-bg-user: #2dd4bf;
--message-bg-assistant: #e2e8f0;
--border-radius: 20px;
--shadow: 0 10px 25px rgba(0, 0, 0, 0.05), 0 0 1px rgba(0, 0, 0, 0.1);
}
body {
background-color: var(--bg-color);
font-family: 'Poppins', 'Segoe UI', sans-serif;
color: var(--dark-text);
min-height: 100vh;
display: flex;
align-items: center;
justify-content: center;
background-image: radial-gradient(circle at 10% 20%, rgba(45, 212, 191, 0.1) 0%, rgba(45, 212, 191, 0.03) 50%);
}
.app-container {
max-width: 900px;
margin: 20px auto;
background: var(--card-bg);
border-radius: var(--border-radius);
box-shadow: var(--shadow);
overflow: hidden;
display: grid;
grid-template-rows: auto 1fr auto;
height: 85vh;
width: 100%;
border: 1px solid rgba(45, 212, 191, 0.1);
}
.app-header {
padding: 24px;
background: linear-gradient(135deg, var(--primary), var(--primary-dark));
color: var(--light-text);
border-bottom: 1px solid rgba(255, 255, 255, 0.1);
display: flex;
align-items: center;
gap: 15px;
position: relative;
overflow: hidden;
}
.app-header::before {
content: '';
position: absolute;
top: 0;
left: 0;
right: 0;
bottom: 0;
background-image: url("data:image/svg+xml,%3Csvg width='100' height='100' viewBox='0 0 100 100' xmlns='http://www.w3.org/2000/svg'%3E%3Cpath d='M11 18c3.866 0 7-3.134 7-7s-3.134-7-7-7-7 3.134-7 7 3.134 7 7 7zm48 25c3.866 0 7-3.134 7-7s-3.134-7-7-7-7 3.134-7 7 3.134 7 7 7zm-43-7c1.657 0 3-1.343 3-3s-1.343-3-3-3-3 1.343-3 3 1.343 3 3 3zm63 31c1.657 0 3-1.343 3-3s-1.343-3-3-3-3 1.343-3 3 1.343 3 3 3zM34 90c1.657 0 3-1.343 3-3s-1.343-3-3-3-3 1.343-3 3 1.343 3 3 3zm56-76c1.657 0 3-1.343 3-3s-1.343-3-3-3-3 1.343-3 3 1.343 3 3 3zM12 86c2.21 0 4-1.79 4-4s-1.79-4-4-4-4 1.79-4 4 1.79 4 4 4zm28-65c2.21 0 4-1.79 4-4s-1.79-4-4-4-4 1.79-4 4 1.79 4 4 4zm23-11c2.76 0 5-2.24 5-5s-2.24-5-5-5-5 2.24-5 5 2.24 5 5 5zm-6 60c2.21 0 4-1.79 4-4s-1.79-4-4-4-4 1.79-4 4 1.79 4 4 4zm29 22c2.76 0 5-2.24 5-5s-2.24-5-5-5-5 2.24-5 5 2.24 5 5 5zM32 63c2.76 0 5-2.24 5-5s-2.24-5-5-5-5 2.24-5 5 2.24 5 5 5zm57-13c2.76 0 5-2.24 5-5s-2.24-5-5-5-5 2.24-5 5 2.24 5 5 5zm-9-21c1.105 0 2-.895 2-2s-.895-2-2-2-2 .895-2 2 .895 2 2 2zM60 91c1.105 0 2-.895 2-2s-.895-2-2-2-2 .895-2 2 .895 2 2 2zM35 41c1.105 0 2-.895 2-2s-.895-2-2-2-2 .895-2 2 .895 2 2 2zM12 60c1.105 0 2-.895 2-2s-.895-2-2-2-2 .895-2 2 .895 2 2 2z' fill='rgba(255,255,255,0.1)' fill-rule='evenodd'/%3E%3C/svg%3E");
opacity: 0.5;
z-index: 0;
}
.app-header h1 {
font-size: 1.6rem;
font-weight: 600;
margin: 0;
position: relative;
z-index: 1;
letter-spacing: 0.5px;
}
.app-title-icon {
font-size: 1.8rem;
display: flex;
align-items: center;
animation: pulse 2s infinite alternate;
position: relative;
z-index: 1;
background: rgba(255, 255, 255, 0.2);
border-radius: 50%;
width: 40px;
height: 40px;
justify-content: center;
}
.conversation-area {
padding: 24px;
overflow-y: auto;
display: flex;
flex-direction: column;
gap: 20px;
background-color: var(--bg-color);
height: 100%;
background-image: linear-gradient(rgba(45, 212, 191, 0.03) 1px, transparent 1px),
linear-gradient(90deg, rgba(45, 212, 191, 0.03) 1px, transparent 1px);
background-size: 20px 20px;
}
.conversation-area::-webkit-scrollbar {
width: 8px;
}
.conversation-area::-webkit-scrollbar-track {
background: transparent;
}
.conversation-area::-webkit-scrollbar-thumb {
background-color: rgba(45, 212, 191, 0.3);
border-radius: 20px;
}
.message-group {
display: flex;
flex-direction: column;
gap: 8px;
max-width: 80%;
}
.message-group.user {
align-self: flex-end;
}
.message {
padding: 16px;
border-radius: 18px;
position: relative;
line-height: 1.5;
box-shadow: 0 2px 8px rgba(0, 0, 0, 0.05);
animation: fadeIn 0.3s ease-out;
transition: transform 0.2s ease;
}
.message:hover {
transform: translateY(-2px);
}
.message.user {
background-color: var(--message-bg-user);
color: var(--light-text);
border-top-right-radius: 4px;
}
.message.assistant {
background-color: var(--message-bg-assistant);
color: var(--dark-text);
border-top-left-radius: 4px;
}
.message-avatar {
width: 36px;
height: 36px;
border-radius: 50%;
display: flex;
align-items: center;
justify-content: center;
font-size: 14px;
margin-bottom: 6px;
box-shadow: 0 3px 6px rgba(0, 0, 0, 0.1);
}
.user .message-avatar {
background-color: var(--primary-dark);
color: var(--light-text);
align-self: flex-end;
}
.assistant .message-avatar {
background-color: var(--accent);
color: var(--light-text);
}
.controls-area {
padding: 28px;
background-color: white;
border-top: 1px solid rgba(0, 0, 0, 0.05);
display: flex;
justify-content: center;
align-items: center;
position: relative;
}
.controls-area::before {
content: '';
position: absolute;
top: 0;
left: 0;
right: 0;
height: 4px;
background: linear-gradient(90deg, var(--primary), var(--accent));
opacity: 0.6;
}
.listen-container {
display: flex;
flex-direction: column;
align-items: center;
}
.listen-ball {
width: 110px;
height: 110px;
border-radius: 50%;
background: linear-gradient(135deg, var(--primary), var(--primary-dark));
color: white;
display: flex;
align-items: center;
justify-content: center;
cursor: pointer;
transition: all 0.3s ease;
box-shadow: 0 6px 20px rgba(45, 212, 191, 0.4);
position: relative;
overflow: hidden;
}
.listen-ball::before {
content: '';
position: absolute;
width: 150%;
height: 150%;
background: radial-gradient(circle, rgba(255,255,255,0.2) 0%, rgba(255,255,255,0) 70%);
top: -25%;
left: -25%;
}
.listen-ball.listening {
background: linear-gradient(135deg, var(--accent), var(--accent-light));
animation: pulse 1.5s infinite;
}
.listen-ball.processing {
background: linear-gradient(135deg, var(--accent-light), var(--accent));
animation: rotate 2s linear infinite;
}
.listen-ball i {
font-size: 2.8rem;
filter: drop-shadow(0 2px 4px rgba(0, 0, 0, 0.2));
}
.sound-wave {
position: absolute;
top: 0;
left: 0;
width: 100%;
height: 100%;
border-radius: 50%;
opacity: 0;
}
.listening .sound-wave {
border: 2px solid rgba(255, 255, 255, 0.5);
animation: wave 2s infinite;
}
.status-badge {
background-color: white;
border-radius: 50px;
padding: 10px 20px;
font-size: 0.9rem;
color: var(--dark-text);
margin-top: 18px;
display: inline-flex;
align-items: center;
gap: 10px;
font-weight: 500;
box-shadow: 0 4px 10px rgba(0, 0, 0, 0.08);
border: 1px solid rgba(45, 212, 191, 0.2);
transition: all 0.3s ease;
}
.status-badge:hover {
transform: translateY(-2px);
box-shadow: 0 6px 15px rgba(0, 0, 0, 0.1);
}
.status-badge i {
font-size: 1.1rem;
color: var(--primary);
}
.audio-controls {
display: none;
}
.empty-state {
display: flex;
flex-direction: column;
align-items: center;
justify-content: center;
height: 100%;
color: #94a3b8;
text-align: center;
padding: 20px;
}
.empty-state i {
font-size: 4rem;
margin-bottom: 20px;
color: var(--primary);
opacity: 0.7;
}
.empty-state h3 {
font-size: 1.5rem;
margin-bottom: 12px;
color: #475569;
font-weight: 600;
}
.empty-state p {
color: #64748b;
font-size: 1.1rem;
max-width: 260px;
line-height: 1.6;
}
.time-stamp {
font-size: 0.75rem;
margin-top: 4px;
opacity: 0.7;
align-self: flex-end;
}
@keyframes pulse {
0% { transform: scale(1); box-shadow: 0 6px 20px rgba(45, 212, 191, 0.4); }
50% { transform: scale(1.05); box-shadow: 0 8px 25px rgba(45, 212, 191, 0.6); }
100% { transform: scale(1); box-shadow: 0 6px 20px rgba(45, 212, 191, 0.4); }
}
@keyframes rotate {
from { transform: rotate(0deg); }
to { transform: rotate(360deg); }
}
@keyframes wave {
0% { transform: scale(1); opacity: 0.7; }
100% { transform: scale(1.5); opacity: 0; }
}
@keyframes fadeIn {
from { opacity: 0; transform: translateY(10px); }
to { opacity: 1; transform: translateY(0); }
}
@media (max-width: 768px) {
.app-container {
margin: 0;
height: 100vh;
border-radius: 0;
}
.message-group {
max-width: 90%;
}
.listen-ball {
width: 90px;
height: 90px;
}
.listen-ball i {
font-size: 2.2rem;
}
.app-header h1 {
font-size: 1.4rem;
}
.controls-area {
padding: 20px;
}
.status-badge {
padding: 8px 16px;
font-size: 0.85rem;
}
}
@media (min-width: 1200px) {
.app-container {
max-width: 1000px;
}
}
</style>
</head>
<body>
<div class="container-fluid p-0">
<div class="app-container">
<div class="app-header">
<div class="app-title-icon">
<i class="bi bi-soundwave"></i>
</div>
<h1>Sound Chat</h1>
</div>
<div class="conversation-area" id="conversationArea">
<div class="empty-state" id="emptyState">
<i class="bi bi-ear"></i>
<h3>No messages yet</h3>
<p>Tap the sound button below to start listening and chatting.</p>
</div>
<!-- Messages will be added here dynamically -->
</div>
<div class="controls-area">
<div class="listen-container">
<div class="listen-ball" id="listenBall">
<div class="sound-wave"></div>
<div class="sound-wave" style="animation-delay: 0.5s"></div>
<div class="sound-wave" style="animation-delay: 1s"></div>
<i class="bi bi-soundwave"></i>
</div>
<div class="status-badge" id="statusBadge">
<i class="bi bi-info-circle"></i>
<span id="statusMessage">Tap to listen for sound</span>
</div>
</div>
<div class="audio-controls">
<audio id="audioPlayer" controls></audio>
</div>
</div>
</div>
</div>
<!-- Bootstrap JS Bundle -->
<script src="https://cdn.jsdelivr.net/npm/[email protected]/dist/js/bootstrap.bundle.min.js"></script>
<script>
// DOM Elements
const listenBall = document.getElementById("listenBall");
const statusMessage = document.getElementById("statusMessage");
const audioPlayer = document.getElementById("audioPlayer");
const conversationArea = document.getElementById("conversationArea");
const emptyState = document.getElementById("emptyState");
const statusBadge = document.getElementById("statusBadge");
// Global variables
let mediaRecorder;
let audioChunks = [];
let audioStream;
let chatHistory = [];
let isListening = false;
let isAutoListening = false;
let silenceDetectionInterval;
let activityDetectionInterval;
let lastAudioLevel = 0;
let silenceCounter = 0;
let activityCounter = 0;
let currentUserGroup = null;
let currentAssistantGroup = null;
let audioContext;
let analyzer;
let isProcessing = false;
// Constants
const SILENCE_THRESHOLD = 15;
const ACTIVITY_THRESHOLD = 20;
const MIN_ACTIVITY_DURATION = 5; // Minimum counts of activity before recording
const MAX_SILENCE_DURATION = 15; // Maximum counts of silence before stopping
const MAX_RECORDING_DURATION = 8000; // Maximum recording duration in ms
const COOLDOWN_PERIOD = 1000; // Cooldown between recordings
// Functions
function updateStatus(message, icon = "bi-info-circle") {
statusMessage.textContent = message;
statusBadge.querySelector("i").className = `bi ${icon}`;
}
function addMessageToChat(content, sender) {
// Hide empty state if it's visible
if (!emptyState.classList.contains("d-none")) {
emptyState.classList.add("d-none");
}
const now = new Date();
const timeString = now.toLocaleTimeString([], { hour: '2-digit', minute: '2-digit' });
// Create new message group or use existing one based on sender
let messageGroup;
if (sender === 'user') {
if (!currentUserGroup || (currentAssistantGroup && currentAssistantGroup.classList.contains("assistant"))) {
currentUserGroup = document.createElement("div");
currentUserGroup.className = "message-group user";
const avatar = document.createElement("div");
avatar.className = "message-avatar";
avatar.innerHTML = "<i class='bi bi-person'></i>";
currentUserGroup.appendChild(avatar);
conversationArea.appendChild(currentUserGroup);
}
messageGroup = currentUserGroup;
currentAssistantGroup = null;
} else {
if (!currentAssistantGroup || (currentUserGroup && currentUserGroup.classList.contains("user"))) {
currentAssistantGroup = document.createElement("div");
currentAssistantGroup.className = "message-group assistant";
const avatar = document.createElement("div");
avatar.className = "message-avatar";
avatar.innerHTML = "<i class='bi bi-robot'></i>";
currentAssistantGroup.appendChild(avatar);
conversationArea.appendChild(currentAssistantGroup);
}
messageGroup = currentAssistantGroup;
currentUserGroup = null;
}
// Create message element
const messageDiv = document.createElement("div");
messageDiv.className = `message ${sender}`;
messageDiv.textContent = content;
const timestamp = document.createElement("div");
timestamp.className = "time-stamp";
timestamp.textContent = timeString;
messageGroup.appendChild(messageDiv);
messageGroup.appendChild(timestamp);
// Scroll to bottom
conversationArea.scrollTop = conversationArea.scrollHeight;
// Add to chat history
chatHistory.push({
role: sender === 'user' ? 'user' : 'assistant',
content: content
});
}
async function setupAudioAnalysis() {
if (audioContext) {
audioContext.close();
}
audioContext = new AudioContext();
const source = audioContext.createMediaStreamSource(audioStream);
analyzer = audioContext.createAnalyser();
analyzer.fftSize = 256;
source.connect(analyzer);
const bufferLength = analyzer.frequencyBinCount;
const dataArray = new Uint8Array(bufferLength);
return { analyzer, dataArray, bufferLength };
}
function startContinuousListening() {
if (!audioStream) return;
// Set up audio analysis
setupAudioAnalysis().then(({ analyzer, dataArray, bufferLength }) => {
// Start monitoring audio levels
clearInterval(activityDetectionInterval);
activityDetectionInterval = setInterval(() => {
if (isProcessing) return;
analyzer.getByteFrequencyData(dataArray);
// Calculate average audio level
let sum = 0;
for (let i = 0; i < bufferLength; i++) {
sum += dataArray[i];
}
const avg = sum / bufferLength;
lastAudioLevel = avg;
// Detect significant sound
if (avg > ACTIVITY_THRESHOLD) {
activityCounter++;
silenceCounter = 0;
// If we have enough continuous activity and not already listening, start recording
if (activityCounter >= MIN_ACTIVITY_DURATION && !isListening && !isProcessing) {
startRecording();
}
} else {
activityCounter = 0;
}
}, 100);
});
}
function monitorSilenceDuringRecording() {
if (!audioStream || !isListening) return;
clearInterval(silenceDetectionInterval);
silenceDetectionInterval = setInterval(() => {
if (!isListening) {
clearInterval(silenceDetectionInterval);
return;
}
analyzer.getByteFrequencyData(new Uint8Array(analyzer.frequencyBinCount));
// Calculate average audio level
let sum = 0;
for (let i = 0; i < analyzer.frequencyBinCount; i++) {
sum += dataArray[i];
}
const avg = sum / analyzer.frequencyBinCount;
// If silent, increment counter
if (avg < SILENCE_THRESHOLD) {
silenceCounter++;
if (silenceCounter >= MAX_SILENCE_DURATION) {
stopRecording();
}
} else {
silenceCounter = 0;
}
}, 100);
}
async function startRecording() {
if (isListening || isProcessing) return;
try {
// Reset counters
silenceCounter = 0;
// Start recording
mediaRecorder = new MediaRecorder(audioStream, { mimeType: "audio/webm" });
audioChunks = [];
mediaRecorder.ondataavailable = event => audioChunks.push(event.data);
mediaRecorder.onstop = async () => {
if (audioChunks.length === 0) {
updateState("idle");
isProcessing = false;
return;
}
isProcessing = true;
updateState("processing");
try {
const audioBlob = new Blob(audioChunks, { type: "audio/webm" });
const wavBlob = await convertWebMToWav(audioBlob);
// Create form data with the audio and chat history
const formData = new FormData();
formData.append("file", wavBlob, "recording.wav");
formData.append("chat_history", JSON.stringify(chatHistory));
// Send to the continuous-chat endpoint using root-relative path
const response = await fetch("/continuous-chat/", {
method: "POST",
body: formData
});
if (response.ok) {
const userMessage = response.headers.get("X-User-Message") || "No user message";
const llmResponse = response.headers.get("X-LLM-Response") || "No response";
// Add messages to chat
addMessageToChat(userMessage, 'user');
addMessageToChat(llmResponse, 'assistant');
// Get audio response and play it
const audioData = await response.blob();
audioPlayer.src = URL.createObjectURL(audioData);
audioPlayer.play();
updateState("idle");
updateStatus("Listening for sound", "bi-broadcast");
} else {
updateState("idle");
updateStatus("Error processing audio", "bi-exclamation-triangle");
}
} catch (error) {
console.error("Error:", error);
updateState("idle");
updateStatus("Error processing audio", "bi-exclamation-triangle");
}
// Set a cooldown before allowing the next recording
setTimeout(() => {
isProcessing = false;
if (isAutoListening) {
updateStatus("Listening for sound", "bi-broadcast");
}
}, COOLDOWN_PERIOD);
};
mediaRecorder.start();
isListening = true;
updateState("listening");
updateStatus("Recording...", "bi-ear");
// Monitor for silence during recording
monitorSilenceDuringRecording();
// Set max recording duration
setTimeout(() => {
if (mediaRecorder && mediaRecorder.state === "recording") {
stopRecording();
}
}, MAX_RECORDING_DURATION);
} catch (error) {
console.error("Error starting recording:", error);
updateState("idle");
updateStatus("Recording error", "bi-exclamation-triangle");
isListening = false;
isProcessing = false;
}
}
function stopRecording() {
if (!isListening) return;
clearInterval(silenceDetectionInterval);
if (mediaRecorder && mediaRecorder.state === "recording") {
mediaRecorder.stop();
}
isListening = false;
updateStatus("Processing...", "bi-arrow-repeat");
}
function updateState(state) {
listenBall.classList.remove("listening", "processing");
if (state === "listening") {
listenBall.classList.add("listening");
listenBall.innerHTML = `
<div class="sound-wave"></div>
<div class="sound-wave" style="animation-delay: 0.5s"></div>
<div class="sound-wave" style="animation-delay: 1s"></div>
<i class="bi bi-soundwave"></i>
`;
} else if (state === "processing") {
listenBall.classList.add("processing");
listenBall.innerHTML = `<i class="bi bi-arrow-repeat"></i>`;
} else {
listenBall.innerHTML = `<i class="bi bi-soundwave"></i>`;
}
}
async function toggleContinuousListening() {
isAutoListening = !isAutoListening;
if (isAutoListening) {
try {
// Request microphone access if we don't have it
if (!audioStream) {
audioStream = await navigator.mediaDevices.getUserMedia({ audio: true });
}
updateStatus("Auto-listening active", "bi-broadcast");
startContinuousListening();
} catch (error) {
console.error("Error accessing microphone:", error);
updateStatus("Microphone access denied", "bi-mic-mute");
isAutoListening = false;
}
} else {
// Stop continuous listening
clearInterval(activityDetectionInterval);
clearInterval(silenceDetectionInterval);
updateStatus("Tap to listen", "bi-info-circle");
// If currently recording, stop it
if (isListening) {
stopRecording();
}
}
}
async function manualListening() {
if (isListening || isProcessing) return;
try {
// Request microphone access if we don't have it
if (!audioStream) {
audioStream = await navigator.mediaDevices.getUserMedia({ audio: true });
await setupAudioAnalysis();
}
startRecording();
} catch (error) {
console.error("Error accessing microphone:", error);
updateStatus("Microphone access denied", "bi-mic-mute");
}
}
async function convertWebMToWav(blob) {
return new Promise((resolve, reject) => {
try {
const reader = new FileReader();
reader.onload = function () {
const audioContext = new AudioContext();
audioContext.decodeAudioData(reader.result)
.then(buffer => {
const wavBuffer = audioBufferToWav(buffer);
resolve(new Blob([wavBuffer], { type: "audio/wav" }));
})
.catch(error => {
console.error("Error decoding audio data:", error);
reject(error);
});
};
reader.readAsArrayBuffer(blob);
} catch (error) {
console.error("Error in convertWebMToWav:", error);
reject(error);
}
});
}
function audioBufferToWav(buffer) {
let numOfChan = buffer.numberOfChannels,
length = buffer.length * numOfChan * 2 + 44,
bufferArray = new ArrayBuffer(length),
view = new DataView(bufferArray),
channels = [],
sampleRate = buffer.sampleRate,
offset = 0,
pos = 0;
setUint32(0x46464952); // "RIFF"
setUint32(length - 8);
setUint32(0x45564157); // "WAVE"
setUint32(0x20746d66); // "fmt " chunk
setUint32(16); // length = 16
setUint16(1); // PCM (uncompressed)
setUint16(numOfChan);
setUint32(sampleRate);
setUint32(sampleRate * 2 * numOfChan);
setUint16(numOfChan * 2);
setUint16(16); // bits per sample
setUint32(0x61746164); // "data" chunk
setUint32(length - pos - 4);
for (let i = 0; i < buffer.numberOfChannels; i++)
channels.push(buffer.getChannelData(i));
while (pos < length) {
for (let i = 0; i < numOfChan; i++) {
let sample = Math.max(-1, Math.min(1, channels[i][offset]));
sample = sample < 0 ? sample * 0x8000 : sample * 0x7FFF;
setUint16(sample);
}
offset++;
}
function setUint16(data) {
view.setUint16(pos, data, true);
pos += 2;
}
function setUint32(data) {
view.setUint32(pos, data, true);
pos += 4;
}
return bufferArray;
}
// Event Listeners
listenBall.addEventListener("click", () => {
if (isAutoListening) {
toggleContinuousListening(); // Turn off auto mode
} else {
if (isListening) {
stopRecording(); // Stop manual recording
} else {
manualListening(); // Start manual recording
}
}
});
listenBall.addEventListener("dblclick", toggleContinuousListening);
audioPlayer.addEventListener("ended", () => {
if (isAutoListening && !isProcessing) {
updateStatus("Listening for sound", "bi-broadcast");
}
});
// Initialize
updateStatus("Tap to listen, double-tap for auto mode", "bi-info-circle");
// Cleanup function for page unload
window.addEventListener('beforeunload', () => {
if (audioStream) {
audioStream.getTracks().forEach(track => track.stop());
}
if (audioContext) {
audioContext.close();
}
clearInterval(silenceDetectionInterval);
clearInterval(activityDetectionInterval);
});
</script>
</body>
</html>