yasserrmd commited on
Commit
4fb332f
·
verified ·
1 Parent(s): 976e0aa

Update static/index.html

Browse files
Files changed (1) hide show
  1. static/index.html +173 -150
static/index.html CHANGED
@@ -1,3 +1,4 @@
 
1
  <!DOCTYPE html>
2
  <html lang="en">
3
  <head>
@@ -143,176 +144,198 @@
143
  <script src="https://cdn.jsdelivr.net/npm/[email protected]/dist/js/bootstrap.bundle.min.js"></script>
144
  <script>
145
  let mediaRecorder;
146
- let audioChunks = [];
147
- let audioStream;
148
- const recordButton = document.getElementById("recordButton");
149
- const statusMessage = document.getElementById("statusMessage");
150
- const userMessageHeader = document.getElementById("userMessageHeader");
151
- const llmResponseHeader = document.getElementById("llmResponseHeader");
152
- const receivedData = document.getElementById("receivedData");
153
- const audioPlayer = document.getElementById("audioPlayer");
154
- let recordingTimeout;
155
-
156
- recordButton.addEventListener("click", async () => {
157
- if (!mediaRecorder || mediaRecorder.state === "inactive") {
158
- try {
159
- if (audioStream) {
160
- audioStream.getTracks().forEach(track => track.stop());
161
- }
162
-
163
- audioStream = await navigator.mediaDevices.getUserMedia({ audio: true });
164
- mediaRecorder = new MediaRecorder(audioStream, { mimeType: "audio/webm" });
165
- audioChunks = []; // Reset chunks
166
-
167
- mediaRecorder.ondataavailable = event => {
168
- if (event.data.size > 0) {
169
- audioChunks.push(event.data);
170
- }
171
- };
172
-
173
- mediaRecorder.onstop = async () => {
174
- if (audioChunks.length === 0) {
175
- statusMessage.textContent = "No audio recorded. Try again.";
176
- return;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
177
  }
178
-
179
- statusMessage.textContent = "Processing audio data...";
180
- recordButton.innerHTML = '<i class="bi bi-mic-fill icon-spacing"></i> Start Listening';
181
- recordButton.classList.remove("recording", "btn-danger");
182
- recordButton.classList.add("btn-record");
183
-
 
 
 
 
 
184
  try {
185
- const audioBlob = new Blob(audioChunks, { type: "audio/webm" });
186
- const wavBlob = await convertWebMToWav(audioBlob);
187
- const wavFile = new File([wavBlob], "recording.wav", { type: "audio/wav" });
188
-
189
- const formData = new FormData();
190
- formData.append("file", wavFile);
191
-
192
- const response = await fetch("/chat/", {
193
  method: "POST",
194
- body: formData
 
 
 
195
  });
196
-
197
  if (response.ok) {
198
- const userMessage = response.headers.get("X-User-Message") || "No user message";
199
- const llmResponse = response.headers.get("X-LLM-Response") || "No response";
200
-
201
- userMessageHeader.innerHTML = `X-User-Message: <span class="text-primary">${userMessage}</span>`;
202
- llmResponseHeader.innerHTML = `X-LLM-Response: <span class="text-success">${llmResponse}</span>`;
203
-
204
- receivedData.textContent = userMessage;
205
  const audioData = await response.blob();
206
- audioPlayer.src = URL.createObjectURL(audioData);
207
- statusMessage.textContent = "Data decoded successfully!";
 
 
208
  } else {
209
- statusMessage.textContent = "Error processing audio data. Please try again.";
210
  }
211
  } catch (error) {
212
  console.error("Error:", error);
213
- statusMessage.textContent = "Error processing audio data. Please try again.";
214
  }
 
 
 
 
215
 
216
- if (audioStream) {
217
- audioStream.getTracks().forEach(track => track.stop());
218
- }
219
- };
220
-
221
- mediaRecorder.start();
222
- recordButton.innerHTML = '<i class="bi bi-stop-fill icon-spacing"></i> Listening...';
223
- recordButton.classList.add("recording", "btn-danger");
224
- recordButton.classList.remove("btn-record");
225
- statusMessage.textContent = "Listening for data transmission...";
226
-
227
- recordingTimeout = setTimeout(() => {
228
- if (mediaRecorder.state === "recording") {
229
- mediaRecorder.stop();
 
 
 
 
 
 
230
  }
231
- }, 5000);
232
- } catch (error) {
233
- console.error("Error accessing microphone:", error);
234
- statusMessage.textContent = "Could not access microphone. Please check permissions.";
235
- }
236
- } else if (mediaRecorder.state === "recording") {
237
- clearTimeout(recordingTimeout);
238
- mediaRecorder.stop();
239
- }
240
- });
241
-
242
- async function convertWebMToWav(blob) {
243
- return new Promise((resolve, reject) => {
244
- try {
245
- const reader = new FileReader();
246
- reader.onload = function () {
247
- const audioContext = new AudioContext();
248
- audioContext.decodeAudioData(reader.result)
249
- .then(buffer => {
250
- const wavBuffer = audioBufferToWav(buffer);
251
- resolve(new Blob([wavBuffer], { type: "audio/wav" }));
252
- })
253
- .catch(error => {
254
- console.error("Error decoding audio data:", error);
255
- reject(error);
256
- });
257
- };
258
- reader.readAsArrayBuffer(blob);
259
- } catch (error) {
260
- console.error("Error in convertWebMToWav:", error);
261
- reject(error);
262
  }
263
- });
264
- }
265
-
266
- function audioBufferToWav(buffer) {
267
- let numOfChan = buffer.numberOfChannels,
268
- length = buffer.length * numOfChan * 2 + 44,
269
- bufferArray = new ArrayBuffer(length),
270
- view = new DataView(bufferArray),
271
- channels = [],
272
- sampleRate = buffer.sampleRate,
273
- offset = 0,
274
- pos = 0;
275
 
276
- setUint32(0x46464952); // "RIFF"
277
- setUint32(length - 8);
278
- setUint32(0x45564157); // "WAVE"
279
- setUint32(0x20746d66); // "fmt " chunk
280
- setUint32(16);
281
- setUint16(1);
282
- setUint16(numOfChan);
283
- setUint32(sampleRate);
284
- setUint32(sampleRate * 2 * numOfChan);
285
- setUint16(numOfChan * 2);
286
- setUint16(16);
287
- setUint32(0x61746164);
288
- setUint32(length - pos - 4);
289
 
290
- for (let i = 0; i < buffer.numberOfChannels; i++) {
291
- channels.push(buffer.getChannelData(i));
292
- }
 
 
 
 
 
 
 
 
 
 
293
 
294
- while (pos < length) {
295
- for (let i = 0; i < numOfChan; i++) {
296
- let sample = Math.max(-1, Math.min(1, channels[i][offset]));
297
- sample = sample < 0 ? sample * 0x8000 : sample * 0x7FFF;
298
- setUint16(sample);
299
- }
300
- offset++;
301
- }
302
 
303
- function setUint16(data) {
304
- view.setUint16(pos, data, true);
305
- pos += 2;
306
- }
 
 
 
 
307
 
308
- function setUint32(data) {
309
- view.setUint32(pos, data, true);
310
- pos += 4;
311
- }
312
 
313
- return bufferArray;
314
- }
 
 
315
 
 
 
316
  </script>
317
  </body>
318
- </html>
 
1
+
2
  <!DOCTYPE html>
3
  <html lang="en">
4
  <head>
 
144
  <script src="https://cdn.jsdelivr.net/npm/[email protected]/dist/js/bootstrap.bundle.min.js"></script>
145
  <script>
146
  let mediaRecorder;
147
+ let audioChunks = [];
148
+ const recordButton = document.getElementById("recordButton");
149
+ const generateButton = document.getElementById("generateButton");
150
+ const statusMessage = document.getElementById("statusMessage");
151
+ const messageInput = document.getElementById("messageInput");
152
+ const userMessageHeader = document.getElementById("userMessageHeader");
153
+ const llmResponseHeader = document.getElementById("llmResponseHeader");
154
+ const receivedData = document.getElementById("receivedData");
155
+ let recordingTimeout;
156
+
157
+ recordButton.addEventListener("click", async () => {
158
+ if (!mediaRecorder || mediaRecorder.state === "inactive") {
159
+ try {
160
+ const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
161
+ mediaRecorder = new MediaRecorder(stream, { mimeType: "audio/webm" });
162
+
163
+ mediaRecorder.ondataavailable = event => audioChunks.push(event.data);
164
+
165
+ mediaRecorder.onstop = async () => {
166
+ statusMessage.textContent = "Processing audio data...";
167
+ recordButton.innerHTML = '<i class="bi bi-mic-fill icon-spacing"></i> Start Listening';
168
+ recordButton.classList.remove("recording");
169
+ recordButton.classList.remove("btn-danger");
170
+ recordButton.classList.add("btn-record");
171
+
172
+ try {
173
+ const audioBlob = new Blob(audioChunks, { type: "audio/webm" });
174
+ const wavBlob = await convertWebMToWav(audioBlob);
175
+
176
+ // This is the original code from your first implementation
177
+ const formData = new FormData();
178
+ formData.append("file", wavBlob, "recording.wav");
179
+
180
+ const response = await fetch("/chat/", {
181
+ method: "POST",
182
+ body: formData
183
+ });
184
+
185
+ if (response.ok) {
186
+ // Display response headers that we know are sent back from the endpoint
187
+ const userMessage = response.headers.get("X-User-Message") || "No user message";
188
+ const llmResponse = response.headers.get("X-LLM-Response") || "No response";
189
+
190
+ // Update the header display
191
+ userMessageHeader.innerHTML = `X-User-Message: <span class="text-primary">${userMessage}</span>`;
192
+ llmResponseHeader.innerHTML = `X-LLM-Response: <span class="text-success">${llmResponse}</span>`;
193
+
194
+ // Update received data
195
+ receivedData.textContent = userMessage;
196
+
197
+ // Get audio blob from response and play it
198
+ const audioData = await response.blob();
199
+ document.getElementById("audioPlayer").src = URL.createObjectURL(audioData);
200
+ statusMessage.textContent = "Data decoded successfully!";
201
+ } else {
202
+ statusMessage.textContent = "Error processing audio data. Please try again.";
203
+ }
204
+ } catch (error) {
205
+ console.error("Error:", error);
206
+ statusMessage.textContent = "Error processing audio data. Please try again.";
207
+ }
208
+ };
209
+
210
+ audioChunks = [];
211
+ mediaRecorder.start();
212
+
213
+ recordButton.innerHTML = '<i class="bi bi-stop-fill icon-spacing"></i> Listening...';
214
+ recordButton.classList.add("recording");
215
+ recordButton.classList.remove("btn-record");
216
+ recordButton.classList.add("btn-danger");
217
+ statusMessage.textContent = "Listening for data transmission...";
218
+
219
+ // Auto-stop after 5 seconds
220
+ recordingTimeout = setTimeout(() => {
221
+ if (mediaRecorder && mediaRecorder.state === "recording") {
222
+ mediaRecorder.stop();
223
+ }
224
+ }, 5000);
225
+ } catch (error) {
226
+ console.error("Error accessing microphone:", error);
227
+ statusMessage.textContent = "Could not access microphone. Please check permissions.";
228
  }
229
+ } else if (mediaRecorder.state === "recording") {
230
+ // Stop recording if already recording
231
+ clearTimeout(recordingTimeout);
232
+ mediaRecorder.stop();
233
+ }
234
+ });
235
+
236
+ generateButton.addEventListener("click", async () => {
237
+ const text = messageInput.value.trim();
238
+ if (text) {
239
+ statusMessage.textContent = "Encoding data to sound...";
240
  try {
241
+ const response = await fetch("/tts/", {
 
 
 
 
 
 
 
242
  method: "POST",
243
+ headers: {
244
+ "Content-Type": "application/json"
245
+ },
246
+ body: JSON.stringify({ text })
247
  });
248
+
249
  if (response.ok) {
 
 
 
 
 
 
 
250
  const audioData = await response.blob();
251
+ document.getElementById("audioPlayer").src = URL.createObjectURL(audioData);
252
+ statusMessage.textContent = "Data encoded as sound. Ready to transmit!";
253
+ // Auto-play option
254
+ document.getElementById("audioPlayer").play();
255
  } else {
256
+ statusMessage.textContent = "Error encoding data. Please try again.";
257
  }
258
  } catch (error) {
259
  console.error("Error:", error);
260
+ statusMessage.textContent = "Error encoding data. Please try again.";
261
  }
262
+ } else {
263
+ statusMessage.textContent = "Please enter a message to transmit.";
264
+ }
265
+ });
266
 
267
+ async function convertWebMToWav(blob) {
268
+ return new Promise((resolve, reject) => {
269
+ try {
270
+ const reader = new FileReader();
271
+ reader.onload = function () {
272
+ const audioContext = new AudioContext();
273
+ audioContext.decodeAudioData(reader.result)
274
+ .then(buffer => {
275
+ const wavBuffer = audioBufferToWav(buffer);
276
+ resolve(new Blob([wavBuffer], { type: "audio/wav" }));
277
+ })
278
+ .catch(error => {
279
+ console.error("Error decoding audio data:", error);
280
+ reject(error);
281
+ });
282
+ };
283
+ reader.readAsArrayBuffer(blob);
284
+ } catch (error) {
285
+ console.error("Error in convertWebMToWav:", error);
286
+ reject(error);
287
  }
288
+ });
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
289
  }
 
 
 
 
 
 
 
 
 
 
 
 
290
 
291
+ function audioBufferToWav(buffer) {
292
+ let numOfChan = buffer.numberOfChannels,
293
+ length = buffer.length * numOfChan * 2 + 44,
294
+ bufferArray = new ArrayBuffer(length),
295
+ view = new DataView(bufferArray),
296
+ channels = [],
297
+ sampleRate = buffer.sampleRate,
298
+ offset = 0,
299
+ pos = 0;
 
 
 
 
300
 
301
+ setUint32(0x46464952); // "RIFF"
302
+ setUint32(length - 8);
303
+ setUint32(0x45564157); // "WAVE"
304
+ setUint32(0x20746d66); // "fmt " chunk
305
+ setUint32(16); // length = 16
306
+ setUint16(1); // PCM (uncompressed)
307
+ setUint16(numOfChan);
308
+ setUint32(sampleRate);
309
+ setUint32(sampleRate * 2 * numOfChan);
310
+ setUint16(numOfChan * 2);
311
+ setUint16(16); // bits per sample
312
+ setUint32(0x61746164); // "data" chunk
313
+ setUint32(length - pos - 4);
314
 
315
+ for (let i = 0; i < buffer.numberOfChannels; i++)
316
+ channels.push(buffer.getChannelData(i));
 
 
 
 
 
 
317
 
318
+ while (pos < length) {
319
+ for (let i = 0; i < numOfChan; i++) {
320
+ let sample = Math.max(-1, Math.min(1, channels[i][offset]));
321
+ sample = sample < 0 ? sample * 0x8000 : sample * 0x7FFF;
322
+ setUint16(sample);
323
+ }
324
+ offset++;
325
+ }
326
 
327
+ function setUint16(data) {
328
+ view.setUint16(pos, data, true);
329
+ pos += 2;
330
+ }
331
 
332
+ function setUint32(data) {
333
+ view.setUint32(pos, data, true);
334
+ pos += 4;
335
+ }
336
 
337
+ return bufferArray;
338
+ }
339
  </script>
340
  </body>
341
+ </html>