ggwave / static /index.html
yasserrmd's picture
Update static/index.html
ef4ce16 verified
raw
history blame
15.3 kB
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Data-over-Sound Interface</title>
<!-- Bootstrap CSS -->
<link href="https://cdn.jsdelivr.net/npm/[email protected]/dist/css/bootstrap.min.css" rel="stylesheet">
<!-- Bootstrap Icons -->
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/[email protected]/font/bootstrap-icons.css">
<style>
body {
background-color: #f8f9fa;
padding-top: 30px;
}
.chat-container {
max-width: 700px;
margin: 0 auto;
background-color: white;
border-radius: 12px;
box-shadow: 0 4px 12px rgba(0, 0, 0, 0.1);
padding: 25px;
}
.btn-record {
background-color: #ff4b4b;
border-color: #ff4b4b;
}
.btn-record:hover {
background-color: #e43c3c;
border-color: #e43c3c;
}
.btn-record.recording {
animation: pulse 1.5s infinite;
}
.btn-generate {
background-color: #4c6ef5;
border-color: #4c6ef5;
}
.btn-generate:hover {
background-color: #3b5bdb;
border-color: #3b5bdb;
}
.icon-spacing {
margin-right: 8px;
}
.control-label {
font-size: 0.9rem;
color: #6c757d;
margin-bottom: 8px;
}
.audio-container {
background-color: #f1f3f5;
border-radius: 8px;
padding: 15px;
margin-top: 20px;
}
audio {
width: 100%;
}
.status-indicator {
font-size: 0.9rem;
margin-top: 10px;
height: 24px;
}
.response-container {
background-color: #f8f9fa;
border-radius: 8px;
padding: 15px;
margin-top: 20px;
border: 1px solid #dee2e6;
}
.header-info {
font-family: monospace;
padding: 10px;
background-color: #e9ecef;
border-radius: 6px;
margin-bottom: 15px;
font-size: 0.9rem;
}
.text-input {
margin-top: 15px;
}
@keyframes pulse {
0% { transform: scale(1); }
50% { transform: scale(1.05); }
100% { transform: scale(1); }
}
</style>
</head>
<body>
<div class="container">
<div class="chat-container">
<h1 class="text-center mb-4">
<i class="bi bi-soundwave text-primary"></i> Data-over-Sound
</h1>
<div class="text-input mb-4">
<label for="messageInput" class="form-label">Message to Encode:</label>
<textarea id="messageInput" class="form-control" rows="3" placeholder="Enter text to encode as sound..."></textarea>
</div>
<div class="row g-4">
<div class="col-md-6">
<div class="d-grid">
<p class="control-label text-center">Listen for Data</p>
<button id="recordButton" class="btn btn-record btn-lg text-white">
<i class="bi bi-mic-fill icon-spacing"></i> Start Listening
</button>
</div>
</div>
<div class="col-md-6">
<div class="d-grid">
<p class="control-label text-center">Transmit Data</p>
<button id="generateButton" class="btn btn-generate btn-lg text-white">
<i class="bi bi-broadcast icon-spacing"></i> Transmit
</button>
</div>
</div>
</div>
<div class="audio-container">
<p class="control-label mb-2">Audio Control</p>
<audio id="audioPlayer" controls></audio>
<div id="statusMessage" class="status-indicator text-center text-secondary"></div>
</div>
<div class="response-container">
<h5><i class="bi bi-arrow-repeat icon-spacing"></i>Communication Headers</h5>
<div class="header-info">
<div id="userMessageHeader">X-User-Message: <span class="text-primary">Waiting for data...</span></div>
<div id="llmResponseHeader">X-LLM-Response: <span class="text-success">Waiting for response...</span></div>
</div>
<h5><i class="bi bi-reception-4 icon-spacing"></i>Data Received</h5>
<div id="receivedData" class="p-3 border rounded bg-white">
No data received yet.
</div>
</div>
</div>
</div>
<!-- Bootstrap JS Bundle -->
<script src="https://cdn.jsdelivr.net/npm/[email protected]/dist/js/bootstrap.bundle.min.js"></script>
<script>
let mediaRecorder;
let audioChunks = [];
let audioStream;
const recordButton = document.getElementById("recordButton");
const generateButton = document.getElementById("generateButton");
const statusMessage = document.getElementById("statusMessage");
const messageInput = document.getElementById("messageInput");
const userMessageHeader = document.getElementById("userMessageHeader");
const llmResponseHeader = document.getElementById("llmResponseHeader");
const receivedData = document.getElementById("receivedData");
let recordingTimeout;
recordButton.addEventListener("click", async () => {
if (!mediaRecorder || mediaRecorder.state === "inactive") {
try {
// Stop any existing stream tracks before requesting a new one
if (audioStream) {
audioStream.getTracks().forEach(track => track.stop());
}
audioStream = await navigator.mediaDevices.getUserMedia({ audio: true });
mediaRecorder = new MediaRecorder(audioStream, { mimeType: "audio/webm" });
mediaRecorder.ondataavailable = event => audioChunks.push(event.data);
mediaRecorder.onstop = async () => {
statusMessage.textContent = "Processing audio data...";
recordButton.innerHTML = '<i class="bi bi-mic-fill icon-spacing"></i> Start Listening';
recordButton.classList.remove("recording");
recordButton.classList.remove("btn-danger");
recordButton.classList.add("btn-record");
try {
const audioBlob = new Blob(audioChunks, { type: "audio/webm" });
const wavBlob = await convertWebMToWav(audioBlob);
// This is the original code from your first implementation
const formData = new FormData();
formData.append("file", wavBlob, "recording.wav");
const response = await fetch("/chat/", {
method: "POST",
body: formData
});
if (response.ok) {
// Display response headers that we know are sent back from the endpoint
const userMessage = response.headers.get("X-User-Message") || "No user message";
const llmResponse = response.headers.get("X-LLM-Response") || "No response";
// Update the header display
userMessageHeader.innerHTML = `X-User-Message: <span class="text-primary">${userMessage}</span>`;
llmResponseHeader.innerHTML = `X-LLM-Response: <span class="text-success">${llmResponse}</span>`;
// Update received data
receivedData.textContent = userMessage;
// Get audio blob from response and play it
const audioData = await response.blob();
document.getElementById("audioPlayer").src = URL.createObjectURL(audioData);
statusMessage.textContent = "Data decoded successfully!";
} else {
statusMessage.textContent = "Error processing audio data. Please try again.";
}
} catch (error) {
console.error("Error:", error);
statusMessage.textContent = "Error processing audio data. Please try again.";
}
// Clean up the audio tracks after processing is complete
if (audioStream) {
audioStream.getTracks().forEach(track => track.stop());
}
};
audioChunks = [];
mediaRecorder.start();
recordButton.innerHTML = '<i class="bi bi-stop-fill icon-spacing"></i> Listening...';
recordButton.classList.add("recording");
recordButton.classList.remove("btn-record");
recordButton.classList.add("btn-danger");
statusMessage.textContent = "Listening for data transmission...";
// Auto-stop after 5 seconds
recordingTimeout = setTimeout(() => {
if (mediaRecorder && mediaRecorder.state === "recording") {
mediaRecorder.stop();
}
}, 5000);
} catch (error) {
console.error("Error accessing microphone:", error);
statusMessage.textContent = "Could not access microphone. Please check permissions.";
}
} else if (mediaRecorder.state === "recording") {
// Stop recording if already recording
clearTimeout(recordingTimeout);
mediaRecorder.stop();
}
});
generateButton.addEventListener("click", async () => {
const text = messageInput.value.trim();
if (text) {
statusMessage.textContent = "Encoding data to sound...";
try {
const response = await fetch("/tts/", {
method: "POST",
headers: {
"Content-Type": "application/json"
},
body: JSON.stringify({ text })
});
if (response.ok) {
const audioData = await response.blob();
document.getElementById("audioPlayer").src = URL.createObjectURL(audioData);
statusMessage.textContent = "Data encoded as sound. Ready to transmit!";
// Auto-play option
document.getElementById("audioPlayer").play();
} else {
statusMessage.textContent = "Error encoding data. Please try again.";
}
} catch (error) {
console.error("Error:", error);
statusMessage.textContent = "Error encoding data. Please try again.";
}
} else {
statusMessage.textContent = "Please enter a message to transmit.";
}
});
async function convertWebMToWav(blob) {
return new Promise((resolve, reject) => {
try {
const reader = new FileReader();
reader.onload = function () {
const audioContext = new AudioContext();
audioContext.decodeAudioData(reader.result)
.then(buffer => {
const wavBuffer = audioBufferToWav(buffer);
resolve(new Blob([wavBuffer], { type: "audio/wav" }));
})
.catch(error => {
console.error("Error decoding audio data:", error);
reject(error);
});
};
reader.readAsArrayBuffer(blob);
} catch (error) {
console.error("Error in convertWebMToWav:", error);
reject(error);
}
});
}
function audioBufferToWav(buffer) {
let numOfChan = buffer.numberOfChannels,
length = buffer.length * numOfChan * 2 + 44,
bufferArray = new ArrayBuffer(length),
view = new DataView(bufferArray),
channels = [],
sampleRate = buffer.sampleRate,
offset = 0,
pos = 0;
setUint32(0x46464952); // "RIFF"
setUint32(length - 8);
setUint32(0x45564157); // "WAVE"
setUint32(0x20746d66); // "fmt " chunk
setUint32(16); // length = 16
setUint16(1); // PCM (uncompressed)
setUint16(numOfChan);
setUint32(sampleRate);
setUint32(sampleRate * 2 * numOfChan);
setUint16(numOfChan * 2);
setUint16(16); // bits per sample
setUint32(0x61746164); // "data" chunk
setUint32(length - pos - 4);
for (let i = 0; i < buffer.numberOfChannels; i++)
channels.push(buffer.getChannelData(i));
while (pos < length) {
for (let i = 0; i < numOfChan; i++) {
let sample = Math.max(-1, Math.min(1, channels[i][offset]));
sample = sample < 0 ? sample * 0x8000 : sample * 0x7FFF;
setUint16(sample);
}
offset++;
}
function setUint16(data) {
view.setUint16(pos, data, true);
pos += 2;
}
function setUint32(data) {
view.setUint32(pos, data, true);
pos += 4;
}
return bufferArray;
}
</script>
</body>
</html>