Spaces:
Building
Building
Update flare-ui/src/app/services/audio-stream.service.ts
Browse files
flare-ui/src/app/services/audio-stream.service.ts
CHANGED
@@ -1,545 +1,548 @@
|
|
1 |
-
// audio-stream.service.ts güncelleme
|
2 |
-
// Linear16 format desteği eklenmiş hali
|
3 |
-
|
4 |
-
import { Injectable, OnDestroy } from '@angular/core';
|
5 |
-
import { Subject, Observable, throwError } from 'rxjs';
|
6 |
-
|
7 |
-
export interface AudioChunk {
|
8 |
-
data: string; // Base64 encoded audio
|
9 |
-
timestamp: number;
|
10 |
-
}
|
11 |
-
|
12 |
-
export interface AudioStreamError {
|
13 |
-
type: 'permission' | 'device' | 'browser' | 'unknown';
|
14 |
-
message: string;
|
15 |
-
originalError?: any;
|
16 |
-
}
|
17 |
-
|
18 |
-
@Injectable({
|
19 |
-
providedIn: 'root'
|
20 |
-
})
|
21 |
-
export class AudioStreamService implements OnDestroy {
|
22 |
-
private mediaRecorder: MediaRecorder | null = null;
|
23 |
-
private audioStream: MediaStream | null = null;
|
24 |
-
private audioChunkSubject = new Subject<AudioChunk>();
|
25 |
-
private recordingStateSubject = new Subject<boolean>();
|
26 |
-
private errorSubject = new Subject<AudioStreamError>();
|
27 |
-
private volumeLevelSubject = new Subject<number>();
|
28 |
-
|
29 |
-
public audioChunk$ = this.audioChunkSubject.asObservable();
|
30 |
-
public recordingState$ = this.recordingStateSubject.asObservable();
|
31 |
-
public error$ = this.errorSubject.asObservable();
|
32 |
-
public volumeLevel$ = this.volumeLevelSubject.asObservable();
|
33 |
-
|
34 |
-
// Audio analysis
|
35 |
-
private audioContext: AudioContext | null = null;
|
36 |
-
private analyser: AnalyserNode | null = null;
|
37 |
-
private volumeInterval: any;
|
38 |
-
|
39 |
-
// Linear16 conversion için eklemeler
|
40 |
-
private scriptProcessor: ScriptProcessorNode | null = null;
|
41 |
-
private source: MediaStreamAudioSourceNode | null = null;
|
42 |
-
private useLinear16 = true; // Linear16 kullanım flag'i
|
43 |
-
|
44 |
-
// Audio constraints
|
45 |
-
private constraints = {
|
46 |
-
audio: {
|
47 |
-
channelCount: 1,
|
48 |
-
sampleRate: 16000,
|
49 |
-
echoCancellation: true,
|
50 |
-
noiseSuppression: true,
|
51 |
-
autoGainControl: true
|
52 |
-
}
|
53 |
-
};
|
54 |
-
|
55 |
-
ngOnDestroy(): void {
|
56 |
-
this.cleanup();
|
57 |
-
}
|
58 |
-
|
59 |
-
static checkBrowserSupport(): boolean {
|
60 |
-
return !!(
|
61 |
-
navigator.mediaDevices &&
|
62 |
-
typeof navigator.mediaDevices.getUserMedia === 'function' &&
|
63 |
-
(window.MediaRecorder || window.AudioContext)
|
64 |
-
);
|
65 |
-
}
|
66 |
-
|
67 |
-
async startRecording(): Promise<void> {
|
68 |
-
try {
|
69 |
-
console.log('🎤 [AudioStream] startRecording called', {
|
70 |
-
isAlreadyRecording: this.isRecording(),
|
71 |
-
useLinear16: this.useLinear16,
|
72 |
-
timestamp: new Date().toISOString()
|
73 |
-
});
|
74 |
-
|
75 |
-
if ((this.mediaRecorder && this.mediaRecorder.state !== 'inactive') || this.scriptProcessor) {
|
76 |
-
console.warn('Recording already in progress');
|
77 |
-
return;
|
78 |
-
}
|
79 |
-
|
80 |
-
// Check browser support
|
81 |
-
if (!AudioStreamService.checkBrowserSupport()) {
|
82 |
-
const error = this.createError('browser', 'Browser does not support audio recording');
|
83 |
-
this.errorSubject.next(error);
|
84 |
-
throw error;
|
85 |
-
}
|
86 |
-
|
87 |
-
try {
|
88 |
-
// Get audio stream
|
89 |
-
this.audioStream = await navigator.mediaDevices.getUserMedia(this.constraints);
|
90 |
-
console.log('✅ [AudioStream] Got media stream');
|
91 |
-
|
92 |
-
if (this.useLinear16) {
|
93 |
-
// Linear16 format için Web Audio API kullan
|
94 |
-
await this.startLinear16Recording();
|
95 |
-
} else {
|
96 |
-
// Standart MediaRecorder kullan (WebM-Opus)
|
97 |
-
await this.startMediaRecorderRecording();
|
98 |
-
}
|
99 |
-
|
100 |
-
this.recordingStateSubject.next(true);
|
101 |
-
console.log('✅ [AudioStream] Recording started successfully');
|
102 |
-
|
103 |
-
// Start volume monitoring
|
104 |
-
this.startVolumeMonitoring();
|
105 |
-
|
106 |
-
} catch (error: any) {
|
107 |
-
console.error('❌ [AudioStream] getUserMedia error:', error);
|
108 |
-
|
109 |
-
let audioError: AudioStreamError;
|
110 |
-
|
111 |
-
if (error.name === 'NotAllowedError' || error.name === 'PermissionDeniedError') {
|
112 |
-
audioError = this.createError('permission', 'Microphone permission denied');
|
113 |
-
} else if (error.name === 'NotFoundError' || error.name === 'DevicesNotFoundError') {
|
114 |
-
audioError = this.createError('device', 'No microphone found');
|
115 |
-
} else {
|
116 |
-
audioError = this.createError('unknown', `Failed to access microphone: ${error.message}`, error);
|
117 |
-
}
|
118 |
-
|
119 |
-
this.errorSubject.next(audioError);
|
120 |
-
throw audioError;
|
121 |
-
}
|
122 |
-
} catch (error) {
|
123 |
-
console.error('❌ [AudioStream] startRecording error:', error);
|
124 |
-
this.cleanup();
|
125 |
-
throw error;
|
126 |
-
}
|
127 |
-
}
|
128 |
-
|
129 |
-
private async startLinear16Recording(): Promise<void> {
|
130 |
-
console.log('🎵 Starting Linear16 recording with Web Audio API');
|
131 |
-
|
132 |
-
// Create audio context with specific sample rate
|
133 |
-
this.audioContext = new AudioContext({ sampleRate: 16000 });
|
134 |
-
|
135 |
-
// Create source from stream
|
136 |
-
this.source = this.audioContext.createMediaStreamSource(this.audioStream!);
|
137 |
-
|
138 |
-
// Create script processor for raw PCM access
|
139 |
-
this.scriptProcessor = this.audioContext.createScriptProcessor(4096, 1, 1);
|
140 |
-
|
141 |
-
// Debug için chunk counter
|
142 |
-
let chunkCounter = 0;
|
143 |
-
|
144 |
-
this.scriptProcessor.onaudioprocess = (audioEvent) => {
|
145 |
-
// Get PCM data from input buffer
|
146 |
-
const inputData = audioEvent.inputBuffer.getChannelData(0);
|
147 |
-
|
148 |
-
// Debug: İlk 5 chunk için detaylı log
|
149 |
-
if (chunkCounter < 5) {
|
150 |
-
const maxAmplitude = Math.max(...inputData.map(Math.abs));
|
151 |
-
const avgAmplitude = inputData.reduce((sum, val) => sum + Math.abs(val), 0) / inputData.length;
|
152 |
-
|
153 |
-
console.log(`🎤 Audio Debug Chunk #${chunkCounter}:`, {
|
154 |
-
bufferLength: inputData.length,
|
155 |
-
maxAmplitude: maxAmplitude.toFixed(6),
|
156 |
-
avgAmplitude: avgAmplitude.toFixed(6),
|
157 |
-
firstSamples: Array.from(inputData.slice(0, 10)).map(v => v.toFixed(4)),
|
158 |
-
silent: maxAmplitude < 0.001
|
159 |
-
});
|
160 |
-
}
|
161 |
-
|
162 |
-
// Convert Float32Array to Int16Array (Linear16)
|
163 |
-
const pcmData = this.float32ToInt16(inputData);
|
164 |
-
|
165 |
-
// Debug: PCM dönüşümünü kontrol et
|
166 |
-
if (chunkCounter < 5) {
|
167 |
-
const pcmArray = Array.from(pcmData.slice(0, 10));
|
168 |
-
console.log(`🔄 PCM Conversion #${chunkCounter}:`, {
|
169 |
-
firstPCMSamples: pcmArray,
|
170 |
-
maxPCM: Math.max(...Array.from(pcmData).map(Math.abs))
|
171 |
-
});
|
172 |
-
}
|
173 |
-
|
174 |
-
// Convert to base64
|
175 |
-
const base64Data = this.arrayBufferToBase64(pcmData.buffer);
|
176 |
-
|
177 |
-
// Debug: Base64 çıktısını kontrol et
|
178 |
-
if (chunkCounter < 5) {
|
179 |
-
console.log(`📦 Base64 Output #${chunkCounter}:`, {
|
180 |
-
base64Length: base64Data.length,
|
181 |
-
base64Preview: base64Data.substring(0, 50) + '...'
|
182 |
-
});
|
183 |
-
}
|
184 |
-
|
185 |
-
chunkCounter++;
|
186 |
-
|
187 |
-
// Send chunk
|
188 |
-
this.audioChunkSubject.next({
|
189 |
-
data: base64Data,
|
190 |
-
timestamp: Date.now()
|
191 |
-
});
|
192 |
-
};
|
193 |
-
|
194 |
-
// Connect nodes
|
195 |
-
this.source.connect(this.scriptProcessor);
|
196 |
-
this.scriptProcessor.connect(this.audioContext.destination);
|
197 |
-
|
198 |
-
// Test: Mikrofon seviyesini kontrol et
|
199 |
-
setTimeout(() => {
|
200 |
-
if (this.source && this.audioContext) {
|
201 |
-
console.log('🎙️ Audio Context State:', this.audioContext.state);
|
202 |
-
console.log('🎙️ Sample Rate:', this.audioContext.sampleRate);
|
203 |
-
}
|
204 |
-
}, 1000);
|
205 |
-
|
206 |
-
console.log('✅ Linear16 recording setup complete');
|
207 |
-
}
|
208 |
-
|
209 |
-
private async startMediaRecorderRecording(): Promise<void> {
|
210 |
-
// Original MediaRecorder implementation
|
211 |
-
const mimeType = this.getPreferredMimeType();
|
212 |
-
const options: MediaRecorderOptions = {};
|
213 |
-
if (mimeType) {
|
214 |
-
options.mimeType = mimeType;
|
215 |
-
}
|
216 |
-
|
217 |
-
this.mediaRecorder = new MediaRecorder(this.audioStream!, options);
|
218 |
-
console.log(`✅ [AudioStream] MediaRecorder created with MIME type: ${mimeType || 'default'}`);
|
219 |
-
|
220 |
-
this.setupMediaRecorderHandlers();
|
221 |
-
this.mediaRecorder.start(100);
|
222 |
-
}
|
223 |
-
|
224 |
-
private float32ToInt16(buffer: Float32Array): Int16Array {
|
225 |
-
const l = buffer.length;
|
226 |
-
const result = new Int16Array(l);
|
227 |
-
|
228 |
-
|
229 |
-
|
230 |
-
|
231 |
-
|
232 |
-
|
233 |
-
|
234 |
-
|
235 |
-
|
236 |
-
|
237 |
-
|
238 |
-
|
239 |
-
|
240 |
-
|
241 |
-
|
242 |
-
|
243 |
-
|
244 |
-
|
245 |
-
|
246 |
-
|
247 |
-
|
248 |
-
|
249 |
-
|
250 |
-
|
251 |
-
|
252 |
-
|
253 |
-
|
254 |
-
|
255 |
-
|
256 |
-
|
257 |
-
|
258 |
-
|
259 |
-
|
260 |
-
|
261 |
-
|
262 |
-
|
263 |
-
|
264 |
-
|
265 |
-
|
266 |
-
|
267 |
-
}
|
268 |
-
|
269 |
-
|
270 |
-
|
271 |
-
|
272 |
-
|
273 |
-
|
274 |
-
this.mediaRecorder
|
275 |
-
|
276 |
-
|
277 |
-
|
278 |
-
|
279 |
-
|
280 |
-
|
281 |
-
|
282 |
-
|
283 |
-
|
284 |
-
|
285 |
-
|
286 |
-
}
|
287 |
-
|
288 |
-
|
289 |
-
|
290 |
-
|
291 |
-
|
292 |
-
|
293 |
-
|
294 |
-
|
295 |
-
|
296 |
-
|
297 |
-
|
298 |
-
|
299 |
-
|
300 |
-
|
301 |
-
|
302 |
-
|
303 |
-
|
304 |
-
|
305 |
-
|
306 |
-
|
307 |
-
|
308 |
-
|
309 |
-
'audio/
|
310 |
-
'audio/
|
311 |
-
|
312 |
-
|
313 |
-
|
314 |
-
|
315 |
-
|
316 |
-
|
317 |
-
|
318 |
-
|
319 |
-
|
320 |
-
|
321 |
-
|
322 |
-
|
323 |
-
|
324 |
-
|
325 |
-
|
326 |
-
|
327 |
-
|
328 |
-
|
329 |
-
|
330 |
-
|
331 |
-
|
332 |
-
|
333 |
-
|
334 |
-
|
335 |
-
|
336 |
-
|
337 |
-
|
338 |
-
|
339 |
-
};
|
340 |
-
reader.
|
341 |
-
|
342 |
-
|
343 |
-
|
344 |
-
|
345 |
-
|
346 |
-
|
347 |
-
|
348 |
-
|
349 |
-
|
350 |
-
|
351 |
-
|
352 |
-
|
353 |
-
|
354 |
-
|
355 |
-
|
356 |
-
|
357 |
-
|
358 |
-
|
359 |
-
|
360 |
-
|
361 |
-
|
362 |
-
|
363 |
-
|
364 |
-
|
365 |
-
|
366 |
-
|
367 |
-
|
368 |
-
|
369 |
-
|
370 |
-
|
371 |
-
|
372 |
-
|
373 |
-
|
374 |
-
|
375 |
-
|
376 |
-
|
377 |
-
|
378 |
-
|
379 |
-
|
380 |
-
|
381 |
-
|
382 |
-
|
383 |
-
|
384 |
-
|
385 |
-
|
386 |
-
|
387 |
-
|
388 |
-
|
389 |
-
|
390 |
-
|
391 |
-
|
392 |
-
|
393 |
-
|
394 |
-
|
395 |
-
|
396 |
-
|
397 |
-
|
398 |
-
|
399 |
-
|
400 |
-
|
401 |
-
|
402 |
-
|
403 |
-
|
404 |
-
|
405 |
-
|
406 |
-
|
407 |
-
|
408 |
-
|
409 |
-
|
410 |
-
|
411 |
-
|
412 |
-
|
413 |
-
|
414 |
-
return 0
|
415 |
-
}
|
416 |
-
|
417 |
-
|
418 |
-
|
419 |
-
|
420 |
-
|
421 |
-
|
422 |
-
|
423 |
-
|
424 |
-
|
425 |
-
|
426 |
-
|
427 |
-
|
428 |
-
|
429 |
-
|
430 |
-
|
431 |
-
|
432 |
-
|
433 |
-
|
434 |
-
|
435 |
-
|
436 |
-
|
437 |
-
|
438 |
-
|
439 |
-
|
440 |
-
|
441 |
-
|
442 |
-
|
443 |
-
|
444 |
-
|
445 |
-
|
446 |
-
|
447 |
-
|
448 |
-
|
449 |
-
|
450 |
-
|
451 |
-
|
452 |
-
|
453 |
-
this.mediaRecorder
|
454 |
-
|
455 |
-
|
456 |
-
|
457 |
-
|
458 |
-
|
459 |
-
|
460 |
-
|
461 |
-
|
462 |
-
|
463 |
-
|
464 |
-
|
465 |
-
|
466 |
-
|
467 |
-
|
468 |
-
|
469 |
-
|
470 |
-
|
471 |
-
this.audioStream
|
472 |
-
|
473 |
-
|
474 |
-
|
475 |
-
|
476 |
-
|
477 |
-
//
|
478 |
-
|
479 |
-
|
480 |
-
|
481 |
-
|
482 |
-
|
483 |
-
|
484 |
-
|
485 |
-
|
486 |
-
|
487 |
-
|
488 |
-
|
489 |
-
|
490 |
-
|
491 |
-
|
492 |
-
|
493 |
-
|
494 |
-
|
495 |
-
|
496 |
-
|
497 |
-
|
498 |
-
|
499 |
-
|
500 |
-
|
501 |
-
|
502 |
-
|
503 |
-
|
504 |
-
|
505 |
-
|
506 |
-
|
507 |
-
|
508 |
-
|
509 |
-
|
510 |
-
|
511 |
-
|
512 |
-
|
513 |
-
return
|
514 |
-
}
|
515 |
-
|
516 |
-
|
517 |
-
|
518 |
-
|
519 |
-
|
520 |
-
|
521 |
-
|
522 |
-
|
523 |
-
//
|
524 |
-
this.
|
525 |
-
|
526 |
-
|
527 |
-
|
528 |
-
|
529 |
-
|
530 |
-
|
531 |
-
|
532 |
-
//
|
533 |
-
this.
|
534 |
-
|
535 |
-
|
536 |
-
|
537 |
-
|
538 |
-
|
539 |
-
|
540 |
-
|
541 |
-
|
542 |
-
|
543 |
-
|
544 |
-
|
|
|
|
|
|
|
545 |
}
|
|
|
1 |
+
// audio-stream.service.ts güncelleme
|
2 |
+
// Linear16 format desteği eklenmiş hali
|
3 |
+
|
4 |
+
import { Injectable, OnDestroy } from '@angular/core';
|
5 |
+
import { Subject, Observable, throwError } from 'rxjs';
|
6 |
+
|
7 |
+
export interface AudioChunk {
|
8 |
+
data: string; // Base64 encoded audio
|
9 |
+
timestamp: number;
|
10 |
+
}
|
11 |
+
|
12 |
+
export interface AudioStreamError {
|
13 |
+
type: 'permission' | 'device' | 'browser' | 'unknown';
|
14 |
+
message: string;
|
15 |
+
originalError?: any;
|
16 |
+
}
|
17 |
+
|
18 |
+
@Injectable({
|
19 |
+
providedIn: 'root'
|
20 |
+
})
|
21 |
+
export class AudioStreamService implements OnDestroy {
|
22 |
+
private mediaRecorder: MediaRecorder | null = null;
|
23 |
+
private audioStream: MediaStream | null = null;
|
24 |
+
private audioChunkSubject = new Subject<AudioChunk>();
|
25 |
+
private recordingStateSubject = new Subject<boolean>();
|
26 |
+
private errorSubject = new Subject<AudioStreamError>();
|
27 |
+
private volumeLevelSubject = new Subject<number>();
|
28 |
+
|
29 |
+
public audioChunk$ = this.audioChunkSubject.asObservable();
|
30 |
+
public recordingState$ = this.recordingStateSubject.asObservable();
|
31 |
+
public error$ = this.errorSubject.asObservable();
|
32 |
+
public volumeLevel$ = this.volumeLevelSubject.asObservable();
|
33 |
+
|
34 |
+
// Audio analysis
|
35 |
+
private audioContext: AudioContext | null = null;
|
36 |
+
private analyser: AnalyserNode | null = null;
|
37 |
+
private volumeInterval: any;
|
38 |
+
|
39 |
+
// Linear16 conversion için eklemeler
|
40 |
+
private scriptProcessor: ScriptProcessorNode | null = null;
|
41 |
+
private source: MediaStreamAudioSourceNode | null = null;
|
42 |
+
private useLinear16 = true; // Linear16 kullanım flag'i
|
43 |
+
|
44 |
+
// Audio constraints
|
45 |
+
private constraints = {
|
46 |
+
audio: {
|
47 |
+
channelCount: 1,
|
48 |
+
sampleRate: 16000,
|
49 |
+
echoCancellation: true,
|
50 |
+
noiseSuppression: true,
|
51 |
+
autoGainControl: true
|
52 |
+
}
|
53 |
+
};
|
54 |
+
|
55 |
+
ngOnDestroy(): void {
|
56 |
+
this.cleanup();
|
57 |
+
}
|
58 |
+
|
59 |
+
static checkBrowserSupport(): boolean {
|
60 |
+
return !!(
|
61 |
+
navigator.mediaDevices &&
|
62 |
+
typeof navigator.mediaDevices.getUserMedia === 'function' &&
|
63 |
+
(window.MediaRecorder || window.AudioContext)
|
64 |
+
);
|
65 |
+
}
|
66 |
+
|
67 |
+
async startRecording(): Promise<void> {
|
68 |
+
try {
|
69 |
+
console.log('🎤 [AudioStream] startRecording called', {
|
70 |
+
isAlreadyRecording: this.isRecording(),
|
71 |
+
useLinear16: this.useLinear16,
|
72 |
+
timestamp: new Date().toISOString()
|
73 |
+
});
|
74 |
+
|
75 |
+
if ((this.mediaRecorder && this.mediaRecorder.state !== 'inactive') || this.scriptProcessor) {
|
76 |
+
console.warn('Recording already in progress');
|
77 |
+
return;
|
78 |
+
}
|
79 |
+
|
80 |
+
// Check browser support
|
81 |
+
if (!AudioStreamService.checkBrowserSupport()) {
|
82 |
+
const error = this.createError('browser', 'Browser does not support audio recording');
|
83 |
+
this.errorSubject.next(error);
|
84 |
+
throw error;
|
85 |
+
}
|
86 |
+
|
87 |
+
try {
|
88 |
+
// Get audio stream
|
89 |
+
this.audioStream = await navigator.mediaDevices.getUserMedia(this.constraints);
|
90 |
+
console.log('✅ [AudioStream] Got media stream');
|
91 |
+
|
92 |
+
if (this.useLinear16) {
|
93 |
+
// Linear16 format için Web Audio API kullan
|
94 |
+
await this.startLinear16Recording();
|
95 |
+
} else {
|
96 |
+
// Standart MediaRecorder kullan (WebM-Opus)
|
97 |
+
await this.startMediaRecorderRecording();
|
98 |
+
}
|
99 |
+
|
100 |
+
this.recordingStateSubject.next(true);
|
101 |
+
console.log('✅ [AudioStream] Recording started successfully');
|
102 |
+
|
103 |
+
// Start volume monitoring
|
104 |
+
this.startVolumeMonitoring();
|
105 |
+
|
106 |
+
} catch (error: any) {
|
107 |
+
console.error('❌ [AudioStream] getUserMedia error:', error);
|
108 |
+
|
109 |
+
let audioError: AudioStreamError;
|
110 |
+
|
111 |
+
if (error.name === 'NotAllowedError' || error.name === 'PermissionDeniedError') {
|
112 |
+
audioError = this.createError('permission', 'Microphone permission denied');
|
113 |
+
} else if (error.name === 'NotFoundError' || error.name === 'DevicesNotFoundError') {
|
114 |
+
audioError = this.createError('device', 'No microphone found');
|
115 |
+
} else {
|
116 |
+
audioError = this.createError('unknown', `Failed to access microphone: ${error.message}`, error);
|
117 |
+
}
|
118 |
+
|
119 |
+
this.errorSubject.next(audioError);
|
120 |
+
throw audioError;
|
121 |
+
}
|
122 |
+
} catch (error) {
|
123 |
+
console.error('❌ [AudioStream] startRecording error:', error);
|
124 |
+
this.cleanup();
|
125 |
+
throw error;
|
126 |
+
}
|
127 |
+
}
|
128 |
+
|
129 |
+
private async startLinear16Recording(): Promise<void> {
|
130 |
+
console.log('🎵 Starting Linear16 recording with Web Audio API');
|
131 |
+
|
132 |
+
// Create audio context with specific sample rate
|
133 |
+
this.audioContext = new AudioContext({ sampleRate: 16000 });
|
134 |
+
|
135 |
+
// Create source from stream
|
136 |
+
this.source = this.audioContext.createMediaStreamSource(this.audioStream!);
|
137 |
+
|
138 |
+
// Create script processor for raw PCM access
|
139 |
+
this.scriptProcessor = this.audioContext.createScriptProcessor(4096, 1, 1);
|
140 |
+
|
141 |
+
// Debug için chunk counter
|
142 |
+
let chunkCounter = 0;
|
143 |
+
|
144 |
+
this.scriptProcessor.onaudioprocess = (audioEvent) => {
|
145 |
+
// Get PCM data from input buffer
|
146 |
+
const inputData = audioEvent.inputBuffer.getChannelData(0);
|
147 |
+
|
148 |
+
// Debug: İlk 5 chunk için detaylı log
|
149 |
+
if (chunkCounter < 5) {
|
150 |
+
const maxAmplitude = Math.max(...inputData.map(Math.abs));
|
151 |
+
const avgAmplitude = inputData.reduce((sum, val) => sum + Math.abs(val), 0) / inputData.length;
|
152 |
+
|
153 |
+
console.log(`🎤 Audio Debug Chunk #${chunkCounter}:`, {
|
154 |
+
bufferLength: inputData.length,
|
155 |
+
maxAmplitude: maxAmplitude.toFixed(6),
|
156 |
+
avgAmplitude: avgAmplitude.toFixed(6),
|
157 |
+
firstSamples: Array.from(inputData.slice(0, 10)).map(v => v.toFixed(4)),
|
158 |
+
silent: maxAmplitude < 0.001
|
159 |
+
});
|
160 |
+
}
|
161 |
+
|
162 |
+
// Convert Float32Array to Int16Array (Linear16)
|
163 |
+
const pcmData = this.float32ToInt16(inputData);
|
164 |
+
|
165 |
+
// Debug: PCM dönüşümünü kontrol et
|
166 |
+
if (chunkCounter < 5) {
|
167 |
+
const pcmArray = Array.from(pcmData.slice(0, 10));
|
168 |
+
console.log(`🔄 PCM Conversion #${chunkCounter}:`, {
|
169 |
+
firstPCMSamples: pcmArray,
|
170 |
+
maxPCM: Math.max(...Array.from(pcmData).map(Math.abs))
|
171 |
+
});
|
172 |
+
}
|
173 |
+
|
174 |
+
// Convert to base64
|
175 |
+
const base64Data = this.arrayBufferToBase64(pcmData.buffer);
|
176 |
+
|
177 |
+
// Debug: Base64 çıktısını kontrol et
|
178 |
+
if (chunkCounter < 5) {
|
179 |
+
console.log(`📦 Base64 Output #${chunkCounter}:`, {
|
180 |
+
base64Length: base64Data.length,
|
181 |
+
base64Preview: base64Data.substring(0, 50) + '...'
|
182 |
+
});
|
183 |
+
}
|
184 |
+
|
185 |
+
chunkCounter++;
|
186 |
+
|
187 |
+
// Send chunk
|
188 |
+
this.audioChunkSubject.next({
|
189 |
+
data: base64Data,
|
190 |
+
timestamp: Date.now()
|
191 |
+
});
|
192 |
+
};
|
193 |
+
|
194 |
+
// Connect nodes
|
195 |
+
this.source.connect(this.scriptProcessor);
|
196 |
+
this.scriptProcessor.connect(this.audioContext.destination);
|
197 |
+
|
198 |
+
// Test: Mikrofon seviyesini kontrol et
|
199 |
+
setTimeout(() => {
|
200 |
+
if (this.source && this.audioContext) {
|
201 |
+
console.log('🎙️ Audio Context State:', this.audioContext.state);
|
202 |
+
console.log('🎙️ Sample Rate:', this.audioContext.sampleRate);
|
203 |
+
}
|
204 |
+
}, 1000);
|
205 |
+
|
206 |
+
console.log('✅ Linear16 recording setup complete');
|
207 |
+
}
|
208 |
+
|
209 |
+
private async startMediaRecorderRecording(): Promise<void> {
|
210 |
+
// Original MediaRecorder implementation
|
211 |
+
const mimeType = this.getPreferredMimeType();
|
212 |
+
const options: MediaRecorderOptions = {};
|
213 |
+
if (mimeType) {
|
214 |
+
options.mimeType = mimeType;
|
215 |
+
}
|
216 |
+
|
217 |
+
this.mediaRecorder = new MediaRecorder(this.audioStream!, options);
|
218 |
+
console.log(`✅ [AudioStream] MediaRecorder created with MIME type: ${mimeType || 'default'}`);
|
219 |
+
|
220 |
+
this.setupMediaRecorderHandlers();
|
221 |
+
this.mediaRecorder.start(100);
|
222 |
+
}
|
223 |
+
|
224 |
+
private float32ToInt16(buffer: Float32Array): Int16Array {
|
225 |
+
const l = buffer.length;
|
226 |
+
const result = new Int16Array(l);
|
227 |
+
|
228 |
+
// ✅ Gain artır
|
229 |
+
const gain = 2.0; // Sesi 2x yükselt
|
230 |
+
|
231 |
+
for (let i = 0; i < l; i++) {
|
232 |
+
// Convert float32 [-1, 1] to int16 [-32768, 32767]
|
233 |
+
let s = Math.max(-1, Math.min(1, buffer[i] * gain));
|
234 |
+
result[i] = s < 0 ? s * 0x8000 : s * 0x7FFF;
|
235 |
+
}
|
236 |
+
|
237 |
+
return result;
|
238 |
+
}
|
239 |
+
|
240 |
+
private arrayBufferToBase64(buffer: ArrayBuffer): string {
|
241 |
+
const bytes = new Uint8Array(buffer);
|
242 |
+
let binary = '';
|
243 |
+
|
244 |
+
for (let i = 0; i < bytes.byteLength; i++) {
|
245 |
+
binary += String.fromCharCode(bytes[i]);
|
246 |
+
}
|
247 |
+
|
248 |
+
return btoa(binary);
|
249 |
+
}
|
250 |
+
|
251 |
+
stopRecording(): void {
|
252 |
+
try {
|
253 |
+
console.log('🛑 [AudioStream] stopRecording called', {
|
254 |
+
hasMediaRecorder: !!this.mediaRecorder,
|
255 |
+
hasScriptProcessor: !!this.scriptProcessor,
|
256 |
+
state: this.mediaRecorder?.state,
|
257 |
+
timestamp: new Date().toISOString()
|
258 |
+
});
|
259 |
+
|
260 |
+
if (this.mediaRecorder && this.mediaRecorder.state !== 'inactive') {
|
261 |
+
this.mediaRecorder.stop();
|
262 |
+
}
|
263 |
+
|
264 |
+
this.cleanup();
|
265 |
+
this.recordingStateSubject.next(false);
|
266 |
+
console.log('🛑 [AudioStream] Audio recording stopped successfully');
|
267 |
+
} catch (error) {
|
268 |
+
console.error('❌ [AudioStream] Error stopping recording:', error);
|
269 |
+
this.cleanup();
|
270 |
+
}
|
271 |
+
}
|
272 |
+
|
273 |
+
private setupMediaRecorderHandlers(): void {
|
274 |
+
if (!this.mediaRecorder) return;
|
275 |
+
|
276 |
+
// Handle data available
|
277 |
+
this.mediaRecorder.ondataavailable = async (event) => {
|
278 |
+
try {
|
279 |
+
if (event.data && event.data.size > 0) {
|
280 |
+
const base64Data = await this.blobToBase64(event.data);
|
281 |
+
this.audioChunkSubject.next({
|
282 |
+
data: base64Data,
|
283 |
+
timestamp: Date.now()
|
284 |
+
});
|
285 |
+
}
|
286 |
+
} catch (error) {
|
287 |
+
console.error('Error processing audio chunk:', error);
|
288 |
+
this.errorSubject.next(this.createError('unknown', 'Failed to process audio chunk', error));
|
289 |
+
}
|
290 |
+
};
|
291 |
+
|
292 |
+
// Handle recording stop
|
293 |
+
this.mediaRecorder.onstop = () => {
|
294 |
+
console.log('MediaRecorder stopped');
|
295 |
+
this.cleanup();
|
296 |
+
};
|
297 |
+
|
298 |
+
// Handle errors
|
299 |
+
this.mediaRecorder.onerror = (event: any) => {
|
300 |
+
console.error('MediaRecorder error:', event);
|
301 |
+
const error = this.createError('unknown', `Recording error: ${event.error?.message || 'Unknown error'}`, event.error);
|
302 |
+
this.errorSubject.next(error);
|
303 |
+
this.stopRecording();
|
304 |
+
};
|
305 |
+
}
|
306 |
+
|
307 |
+
private getPreferredMimeType(): string {
|
308 |
+
const types = [
|
309 |
+
'audio/webm;codecs=opus',
|
310 |
+
'audio/webm',
|
311 |
+
'audio/ogg;codecs=opus',
|
312 |
+
'audio/ogg',
|
313 |
+
'audio/mp4'
|
314 |
+
];
|
315 |
+
|
316 |
+
for (const type of types) {
|
317 |
+
if (MediaRecorder.isTypeSupported(type)) {
|
318 |
+
console.log(`Using MIME type: ${type}`);
|
319 |
+
return type;
|
320 |
+
}
|
321 |
+
}
|
322 |
+
|
323 |
+
// Return empty to use browser default
|
324 |
+
console.warn('No supported MIME types found, using browser default');
|
325 |
+
return '';
|
326 |
+
}
|
327 |
+
|
328 |
+
private async blobToBase64(blob: Blob): Promise<string> {
|
329 |
+
return new Promise((resolve, reject) => {
|
330 |
+
const reader = new FileReader();
|
331 |
+
reader.onloadend = () => {
|
332 |
+
if (reader.result && typeof reader.result === 'string') {
|
333 |
+
// Remove data URL prefix
|
334 |
+
const base64 = reader.result.split(',')[1];
|
335 |
+
resolve(base64);
|
336 |
+
} else {
|
337 |
+
reject(new Error('Failed to convert blob to base64'));
|
338 |
+
}
|
339 |
+
};
|
340 |
+
reader.onerror = () => {
|
341 |
+
reject(new Error('FileReader error'));
|
342 |
+
};
|
343 |
+
reader.readAsDataURL(blob);
|
344 |
+
});
|
345 |
+
}
|
346 |
+
|
347 |
+
// Volume level monitoring
|
348 |
+
private startVolumeMonitoring(): void {
|
349 |
+
if (!this.audioStream) return;
|
350 |
+
|
351 |
+
try {
|
352 |
+
// Eğer Linear16 için zaten audioContext varsa, onu kullan
|
353 |
+
if (!this.audioContext) {
|
354 |
+
this.audioContext = new AudioContext();
|
355 |
+
this.source = this.audioContext.createMediaStreamSource(this.audioStream);
|
356 |
+
}
|
357 |
+
|
358 |
+
this.analyser = this.audioContext.createAnalyser();
|
359 |
+
|
360 |
+
if (this.source) {
|
361 |
+
this.source.connect(this.analyser);
|
362 |
+
}
|
363 |
+
|
364 |
+
this.analyser.fftSize = 256;
|
365 |
+
|
366 |
+
const dataArray = new Uint8Array(this.analyser.frequencyBinCount);
|
367 |
+
|
368 |
+
// Monitor volume every 100ms
|
369 |
+
this.volumeInterval = setInterval(() => {
|
370 |
+
if (this.analyser) {
|
371 |
+
this.analyser.getByteFrequencyData(dataArray);
|
372 |
+
|
373 |
+
// Calculate average volume
|
374 |
+
const sum = dataArray.reduce((acc, val) => acc + val, 0);
|
375 |
+
const average = sum / dataArray.length;
|
376 |
+
const normalizedVolume = average / 255; // Normalize to 0-1
|
377 |
+
|
378 |
+
this.volumeLevelSubject.next(normalizedVolume);
|
379 |
+
}
|
380 |
+
}, 100);
|
381 |
+
} catch (error) {
|
382 |
+
console.warn('Failed to start volume monitoring:', error);
|
383 |
+
}
|
384 |
+
}
|
385 |
+
|
386 |
+
private stopVolumeMonitoring(): void {
|
387 |
+
if (this.volumeInterval) {
|
388 |
+
clearInterval(this.volumeInterval);
|
389 |
+
this.volumeInterval = null;
|
390 |
+
}
|
391 |
+
|
392 |
+
// AudioContext'i Linear16 kullanıyorsa kapatma
|
393 |
+
if (this.audioContext && !this.useLinear16) {
|
394 |
+
try {
|
395 |
+
this.audioContext.close();
|
396 |
+
} catch (error) {
|
397 |
+
console.warn('Error closing audio context:', error);
|
398 |
+
}
|
399 |
+
this.audioContext = null;
|
400 |
+
this.analyser = null;
|
401 |
+
}
|
402 |
+
}
|
403 |
+
|
404 |
+
async getVolumeLevel(): Promise<number> {
|
405 |
+
if (!this.audioStream || !this.analyser) return 0;
|
406 |
+
|
407 |
+
try {
|
408 |
+
const dataArray = new Uint8Array(this.analyser.frequencyBinCount);
|
409 |
+
this.analyser.getByteFrequencyData(dataArray);
|
410 |
+
|
411 |
+
// Calculate average volume
|
412 |
+
const average = dataArray.reduce((sum, value) => sum + value, 0) / dataArray.length;
|
413 |
+
|
414 |
+
return average / 255; // Normalize to 0-1
|
415 |
+
} catch (error) {
|
416 |
+
console.error('Error getting volume level:', error);
|
417 |
+
return 0;
|
418 |
+
}
|
419 |
+
}
|
420 |
+
|
421 |
+
// Check microphone permissions
|
422 |
+
async checkMicrophonePermission(): Promise<PermissionState> {
|
423 |
+
try {
|
424 |
+
// First check if Permissions API is available
|
425 |
+
if (!navigator.permissions || !navigator.permissions.query) {
|
426 |
+
console.warn('Permissions API not supported');
|
427 |
+
// Try to check by attempting getUserMedia with video disabled
|
428 |
+
try {
|
429 |
+
const stream = await navigator.mediaDevices.getUserMedia({ audio: true, video: false });
|
430 |
+
stream.getTracks().forEach(track => track.stop());
|
431 |
+
return 'granted';
|
432 |
+
} catch (error: any) {
|
433 |
+
if (error.name === 'NotAllowedError' || error.name === 'PermissionDeniedError') {
|
434 |
+
return 'denied';
|
435 |
+
}
|
436 |
+
return 'prompt';
|
437 |
+
}
|
438 |
+
}
|
439 |
+
|
440 |
+
// Use Permissions API
|
441 |
+
const result = await navigator.permissions.query({ name: 'microphone' as PermissionName });
|
442 |
+
return result.state;
|
443 |
+
} catch (error) {
|
444 |
+
console.warn('Error checking microphone permission:', error);
|
445 |
+
// Assume prompt state if we can't determine
|
446 |
+
return 'prompt';
|
447 |
+
}
|
448 |
+
}
|
449 |
+
|
450 |
+
private cleanup(): void {
|
451 |
+
try {
|
452 |
+
// Stop media recorder
|
453 |
+
if (this.mediaRecorder && this.mediaRecorder.state !== 'inactive') {
|
454 |
+
this.mediaRecorder.stop();
|
455 |
+
}
|
456 |
+
this.mediaRecorder = null;
|
457 |
+
|
458 |
+
// Stop script processor for Linear16
|
459 |
+
if (this.scriptProcessor) {
|
460 |
+
this.scriptProcessor.disconnect();
|
461 |
+
this.scriptProcessor = null;
|
462 |
+
}
|
463 |
+
|
464 |
+
if (this.source && !this.analyser) {
|
465 |
+
this.source.disconnect();
|
466 |
+
this.source = null;
|
467 |
+
}
|
468 |
+
|
469 |
+
// Stop all tracks
|
470 |
+
if (this.audioStream) {
|
471 |
+
this.audioStream.getTracks().forEach(track => {
|
472 |
+
track.stop();
|
473 |
+
});
|
474 |
+
this.audioStream = null;
|
475 |
+
}
|
476 |
+
|
477 |
+
// Stop volume monitoring
|
478 |
+
this.stopVolumeMonitoring();
|
479 |
+
|
480 |
+
// Close audio context if using Linear16
|
481 |
+
if (this.audioContext && this.useLinear16) {
|
482 |
+
try {
|
483 |
+
this.audioContext.close();
|
484 |
+
} catch (error) {
|
485 |
+
console.warn('Error closing audio context:', error);
|
486 |
+
}
|
487 |
+
this.audioContext = null;
|
488 |
+
}
|
489 |
+
|
490 |
+
} catch (error) {
|
491 |
+
console.error('Error during cleanup:', error);
|
492 |
+
}
|
493 |
+
}
|
494 |
+
|
495 |
+
private createError(type: AudioStreamError['type'], message: string, originalError?: any): AudioStreamError {
|
496 |
+
return {
|
497 |
+
type,
|
498 |
+
message,
|
499 |
+
originalError
|
500 |
+
};
|
501 |
+
}
|
502 |
+
|
503 |
+
// Get recording state
|
504 |
+
isRecording(): boolean {
|
505 |
+
return (this.mediaRecorder !== null && this.mediaRecorder.state === 'recording') ||
|
506 |
+
(this.scriptProcessor !== null);
|
507 |
+
}
|
508 |
+
|
509 |
+
// Get available audio devices
|
510 |
+
async getAudioDevices(): Promise<MediaDeviceInfo[]> {
|
511 |
+
try {
|
512 |
+
const devices = await navigator.mediaDevices.enumerateDevices();
|
513 |
+
return devices.filter(device => device.kind === 'audioinput');
|
514 |
+
} catch (error) {
|
515 |
+
console.error('Error enumerating devices:', error);
|
516 |
+
return [];
|
517 |
+
}
|
518 |
+
}
|
519 |
+
|
520 |
+
// Switch audio device
|
521 |
+
async switchAudioDevice(deviceId: string): Promise<void> {
|
522 |
+
if (this.isRecording()) {
|
523 |
+
// Stop current recording
|
524 |
+
this.stopRecording();
|
525 |
+
|
526 |
+
// Update constraints with new device
|
527 |
+
this.constraints.audio = {
|
528 |
+
...this.constraints.audio,
|
529 |
+
deviceId: { exact: deviceId }
|
530 |
+
} as any;
|
531 |
+
|
532 |
+
// Restart recording with new device
|
533 |
+
await this.startRecording();
|
534 |
+
} else {
|
535 |
+
// Just update constraints for next recording
|
536 |
+
this.constraints.audio = {
|
537 |
+
...this.constraints.audio,
|
538 |
+
deviceId: { exact: deviceId }
|
539 |
+
} as any;
|
540 |
+
}
|
541 |
+
}
|
542 |
+
|
543 |
+
// Linear16 format kullanımını aç/kapa
|
544 |
+
setUseLinear16(use: boolean): void {
|
545 |
+
this.useLinear16 = use;
|
546 |
+
console.log(`Audio format switched to: ${use ? 'Linear16' : 'WebM-Opus'}`);
|
547 |
+
}
|
548 |
}
|