ciyidogan commited on
Commit
56ea350
·
verified ·
1 Parent(s): 5e664a7

Update flare-ui/src/app/services/audio-stream.service.ts

Browse files
flare-ui/src/app/services/audio-stream.service.ts CHANGED
@@ -1,14 +1,20 @@
1
- // audio-stream.service.ts (YENİ DOSYA)
2
  // Path: /flare-ui/src/app/services/audio-stream.service.ts
3
 
4
  import { Injectable, OnDestroy } from '@angular/core';
5
- import { Subject, Observable } from 'rxjs';
6
 
7
  export interface AudioChunk {
8
  data: string; // Base64 encoded audio
9
  timestamp: number;
10
  }
11
 
 
 
 
 
 
 
12
  @Injectable({
13
  providedIn: 'root'
14
  })
@@ -17,9 +23,18 @@ export class AudioStreamService implements OnDestroy {
17
  private audioStream: MediaStream | null = null;
18
  private audioChunkSubject = new Subject<AudioChunk>();
19
  private recordingStateSubject = new Subject<boolean>();
 
 
20
 
21
  public audioChunk$ = this.audioChunkSubject.asObservable();
22
  public recordingState$ = this.recordingStateSubject.asObservable();
 
 
 
 
 
 
 
23
 
24
  // Audio constraints
25
  private constraints = {
@@ -33,41 +48,57 @@ export class AudioStreamService implements OnDestroy {
33
  };
34
 
35
  ngOnDestroy(): void {
36
- this.stopRecording();
37
  }
38
 
39
  static checkBrowserSupport(): boolean {
40
- return !!(navigator.mediaDevices && navigator.mediaDevices.getUserMedia);
41
  }
42
 
43
  async startRecording(): Promise<void> {
44
  try {
 
 
 
 
 
 
 
 
 
 
 
45
  // Request microphone access
46
- this.audioStream = await navigator.mediaDevices.getUserMedia(this.constraints);
 
 
 
 
 
 
 
 
 
 
47
 
48
  // Create MediaRecorder with appropriate format
49
  const options: MediaRecorderOptions = {
50
  mimeType: this.getPreferredMimeType()
51
  };
52
 
53
- this.mediaRecorder = new MediaRecorder(this.audioStream, options);
 
 
 
 
 
 
54
 
55
- // Handle data available
56
- this.mediaRecorder.ondataavailable = async (event) => {
57
- if (event.data.size > 0) {
58
- const base64Data = await this.blobToBase64(event.data);
59
- this.audioChunkSubject.next({
60
- data: base64Data,
61
- timestamp: Date.now()
62
- });
63
- }
64
- };
65
 
66
- // Handle errors
67
- this.mediaRecorder.onerror = (error) => {
68
- console.error('MediaRecorder error:', error);
69
- this.stopRecording();
70
- };
71
 
72
  // Start recording with timeslice for real-time streaming
73
  this.mediaRecorder.start(100); // Send chunks every 100ms
@@ -75,25 +106,68 @@ export class AudioStreamService implements OnDestroy {
75
 
76
  console.log('✅ Audio recording started');
77
 
78
- } catch (error) {
79
  console.error('Failed to start recording:', error);
 
 
 
 
 
 
 
 
 
80
  throw error;
81
  }
82
  }
83
 
84
  stopRecording(): void {
85
- if (this.mediaRecorder && this.mediaRecorder.state !== 'inactive') {
86
- this.mediaRecorder.stop();
87
- this.mediaRecorder = null;
 
 
 
 
 
 
 
 
88
  }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
89
 
90
- if (this.audioStream) {
91
- this.audioStream.getTracks().forEach(track => track.stop());
92
- this.audioStream = null;
93
- }
 
94
 
95
- this.recordingStateSubject.next(false);
96
- console.log('🛑 Audio recording stopped');
 
 
 
 
 
97
  }
98
 
99
  private getPreferredMimeType(): string {
@@ -101,7 +175,8 @@ export class AudioStreamService implements OnDestroy {
101
  'audio/webm;codecs=opus',
102
  'audio/webm',
103
  'audio/ogg;codecs=opus',
104
- 'audio/ogg'
 
105
  ];
106
 
107
  for (const type of types) {
@@ -111,7 +186,8 @@ export class AudioStreamService implements OnDestroy {
111
  }
112
  }
113
 
114
- // Fallback to default
 
115
  return '';
116
  }
117
 
@@ -119,52 +195,184 @@ export class AudioStreamService implements OnDestroy {
119
  return new Promise((resolve, reject) => {
120
  const reader = new FileReader();
121
  reader.onloadend = () => {
122
- if (reader.result) {
123
  // Remove data URL prefix
124
- const base64 = (reader.result as string).split(',')[1];
125
  resolve(base64);
126
  } else {
127
  reject(new Error('Failed to convert blob to base64'));
128
  }
129
  };
130
- reader.onerror = reject;
 
 
131
  reader.readAsDataURL(blob);
132
  });
133
  }
134
 
135
  // Volume level monitoring
136
- async getVolumeLevel(): Promise<number> {
137
- if (!this.audioStream) return 0;
138
-
139
- const audioContext = new AudioContext();
140
- const analyser = audioContext.createAnalyser();
141
- const source = audioContext.createMediaStreamSource(this.audioStream);
142
 
143
- source.connect(analyser);
144
- analyser.fftSize = 256;
145
-
146
- const dataArray = new Uint8Array(analyser.frequencyBinCount);
147
- analyser.getByteFrequencyData(dataArray);
148
-
149
- // Calculate average volume
150
- const average = dataArray.reduce((sum, value) => sum + value, 0) / dataArray.length;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
151
 
152
- // Cleanup
153
- source.disconnect();
154
- audioContext.close();
 
 
 
 
 
 
 
 
 
 
155
 
156
- return average / 255; // Normalize to 0-1
 
 
 
 
 
 
 
 
 
 
 
157
  }
158
 
159
  // Check microphone permissions
160
  async checkMicrophonePermission(): Promise<PermissionState> {
161
  try {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
162
  const result = await navigator.permissions.query({ name: 'microphone' as PermissionName });
163
  return result.state;
164
  } catch (error) {
165
- console.warn('Permissions API not supported:', error);
166
- // Assume granted if API not supported
167
- return 'granted';
168
  }
169
  }
170
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // audio-stream.service.ts
2
  // Path: /flare-ui/src/app/services/audio-stream.service.ts
3
 
4
  import { Injectable, OnDestroy } from '@angular/core';
5
+ import { Subject, Observable, throwError } from 'rxjs';
6
 
7
  export interface AudioChunk {
8
  data: string; // Base64 encoded audio
9
  timestamp: number;
10
  }
11
 
12
+ export interface AudioStreamError {
13
+ type: 'permission' | 'device' | 'browser' | 'unknown';
14
+ message: string;
15
+ originalError?: any;
16
+ }
17
+
18
  @Injectable({
19
  providedIn: 'root'
20
  })
 
23
  private audioStream: MediaStream | null = null;
24
  private audioChunkSubject = new Subject<AudioChunk>();
25
  private recordingStateSubject = new Subject<boolean>();
26
+ private errorSubject = new Subject<AudioStreamError>();
27
+ private volumeLevelSubject = new Subject<number>();
28
 
29
  public audioChunk$ = this.audioChunkSubject.asObservable();
30
  public recordingState$ = this.recordingStateSubject.asObservable();
31
+ public error$ = this.errorSubject.asObservable();
32
+ public volumeLevel$ = this.volumeLevelSubject.asObservable();
33
+
34
+ // Audio analysis
35
+ private audioContext: AudioContext | null = null;
36
+ private analyser: AnalyserNode | null = null;
37
+ private volumeInterval: any;
38
 
39
  // Audio constraints
40
  private constraints = {
 
48
  };
49
 
50
  ngOnDestroy(): void {
51
+ this.cleanup();
52
  }
53
 
54
  static checkBrowserSupport(): boolean {
55
+ return !!(navigator.mediaDevices && navigator.mediaDevices.getUserMedia && window.MediaRecorder);
56
  }
57
 
58
  async startRecording(): Promise<void> {
59
  try {
60
+ // Check browser support
61
+ if (!AudioStreamService.checkBrowserSupport()) {
62
+ throw this.createError('browser', 'Your browser does not support audio recording');
63
+ }
64
+
65
+ // Check permission first
66
+ const permission = await this.checkMicrophonePermission();
67
+ if (permission === 'denied') {
68
+ throw this.createError('permission', 'Microphone permission denied');
69
+ }
70
+
71
  // Request microphone access
72
+ try {
73
+ this.audioStream = await navigator.mediaDevices.getUserMedia(this.constraints);
74
+ } catch (error: any) {
75
+ if (error.name === 'NotAllowedError' || error.name === 'PermissionDeniedError') {
76
+ throw this.createError('permission', 'Microphone access denied', error);
77
+ } else if (error.name === 'NotFoundError' || error.name === 'DevicesNotFoundError') {
78
+ throw this.createError('device', 'No microphone found', error);
79
+ } else {
80
+ throw this.createError('device', `Failed to access microphone: ${error.message}`, error);
81
+ }
82
+ }
83
 
84
  // Create MediaRecorder with appropriate format
85
  const options: MediaRecorderOptions = {
86
  mimeType: this.getPreferredMimeType()
87
  };
88
 
89
+ try {
90
+ this.mediaRecorder = new MediaRecorder(this.audioStream, options);
91
+ } catch (error) {
92
+ // Fallback to default options if preferred mime type fails
93
+ console.warn('Failed with preferred mime type, using defaults:', error);
94
+ this.mediaRecorder = new MediaRecorder(this.audioStream);
95
+ }
96
 
97
+ // Set up event handlers
98
+ this.setupMediaRecorderHandlers();
 
 
 
 
 
 
 
 
99
 
100
+ // Start volume monitoring
101
+ this.startVolumeMonitoring();
 
 
 
102
 
103
  // Start recording with timeslice for real-time streaming
104
  this.mediaRecorder.start(100); // Send chunks every 100ms
 
106
 
107
  console.log('✅ Audio recording started');
108
 
109
+ } catch (error: any) {
110
  console.error('Failed to start recording:', error);
111
+ this.cleanup();
112
+
113
+ // Emit error
114
+ if (error.type) {
115
+ this.errorSubject.next(error);
116
+ } else {
117
+ this.errorSubject.next(this.createError('unknown', error.message || 'Failed to start recording', error));
118
+ }
119
+
120
  throw error;
121
  }
122
  }
123
 
124
  stopRecording(): void {
125
+ try {
126
+ if (this.mediaRecorder && this.mediaRecorder.state !== 'inactive') {
127
+ this.mediaRecorder.stop();
128
+ }
129
+
130
+ this.cleanup();
131
+ this.recordingStateSubject.next(false);
132
+ console.log('🛑 Audio recording stopped');
133
+ } catch (error) {
134
+ console.error('Error stopping recording:', error);
135
+ this.cleanup();
136
  }
137
+ }
138
+
139
+ private setupMediaRecorderHandlers(): void {
140
+ if (!this.mediaRecorder) return;
141
+
142
+ // Handle data available
143
+ this.mediaRecorder.ondataavailable = async (event) => {
144
+ try {
145
+ if (event.data && event.data.size > 0) {
146
+ const base64Data = await this.blobToBase64(event.data);
147
+ this.audioChunkSubject.next({
148
+ data: base64Data,
149
+ timestamp: Date.now()
150
+ });
151
+ }
152
+ } catch (error) {
153
+ console.error('Error processing audio chunk:', error);
154
+ this.errorSubject.next(this.createError('unknown', 'Failed to process audio chunk', error));
155
+ }
156
+ };
157
 
158
+ // Handle recording stop
159
+ this.mediaRecorder.onstop = () => {
160
+ console.log('MediaRecorder stopped');
161
+ this.cleanup();
162
+ };
163
 
164
+ // Handle errors
165
+ this.mediaRecorder.onerror = (event: any) => {
166
+ console.error('MediaRecorder error:', event);
167
+ const error = this.createError('unknown', `Recording error: ${event.error?.message || 'Unknown error'}`, event.error);
168
+ this.errorSubject.next(error);
169
+ this.stopRecording();
170
+ };
171
  }
172
 
173
  private getPreferredMimeType(): string {
 
175
  'audio/webm;codecs=opus',
176
  'audio/webm',
177
  'audio/ogg;codecs=opus',
178
+ 'audio/ogg',
179
+ 'audio/mp4'
180
  ];
181
 
182
  for (const type of types) {
 
186
  }
187
  }
188
 
189
+ // Return empty to use browser default
190
+ console.warn('No supported MIME types found, using browser default');
191
  return '';
192
  }
193
 
 
195
  return new Promise((resolve, reject) => {
196
  const reader = new FileReader();
197
  reader.onloadend = () => {
198
+ if (reader.result && typeof reader.result === 'string') {
199
  // Remove data URL prefix
200
+ const base64 = reader.result.split(',')[1];
201
  resolve(base64);
202
  } else {
203
  reject(new Error('Failed to convert blob to base64'));
204
  }
205
  };
206
+ reader.onerror = () => {
207
+ reject(new Error('FileReader error'));
208
+ };
209
  reader.readAsDataURL(blob);
210
  });
211
  }
212
 
213
  // Volume level monitoring
214
+ private startVolumeMonitoring(): void {
215
+ if (!this.audioStream) return;
 
 
 
 
216
 
217
+ try {
218
+ this.audioContext = new AudioContext();
219
+ this.analyser = this.audioContext.createAnalyser();
220
+ const source = this.audioContext.createMediaStreamSource(this.audioStream);
221
+
222
+ source.connect(this.analyser);
223
+ this.analyser.fftSize = 256;
224
+
225
+ const dataArray = new Uint8Array(this.analyser.frequencyBinCount);
226
+
227
+ // Monitor volume every 100ms
228
+ this.volumeInterval = setInterval(() => {
229
+ if (this.analyser) {
230
+ this.analyser.getByteFrequencyData(dataArray);
231
+
232
+ // Calculate average volume
233
+ const sum = dataArray.reduce((acc, val) => acc + val, 0);
234
+ const average = sum / dataArray.length;
235
+ const normalizedVolume = average / 255; // Normalize to 0-1
236
+
237
+ this.volumeLevelSubject.next(normalizedVolume);
238
+ }
239
+ }, 100);
240
+ } catch (error) {
241
+ console.warn('Failed to start volume monitoring:', error);
242
+ }
243
+ }
244
+
245
+ private stopVolumeMonitoring(): void {
246
+ if (this.volumeInterval) {
247
+ clearInterval(this.volumeInterval);
248
+ this.volumeInterval = null;
249
+ }
250
 
251
+ if (this.audioContext) {
252
+ try {
253
+ this.audioContext.close();
254
+ } catch (error) {
255
+ console.warn('Error closing audio context:', error);
256
+ }
257
+ this.audioContext = null;
258
+ this.analyser = null;
259
+ }
260
+ }
261
+
262
+ async getVolumeLevel(): Promise<number> {
263
+ if (!this.audioStream || !this.analyser) return 0;
264
 
265
+ try {
266
+ const dataArray = new Uint8Array(this.analyser.frequencyBinCount);
267
+ this.analyser.getByteFrequencyData(dataArray);
268
+
269
+ // Calculate average volume
270
+ const average = dataArray.reduce((sum, value) => sum + value, 0) / dataArray.length;
271
+
272
+ return average / 255; // Normalize to 0-1
273
+ } catch (error) {
274
+ console.error('Error getting volume level:', error);
275
+ return 0;
276
+ }
277
  }
278
 
279
  // Check microphone permissions
280
  async checkMicrophonePermission(): Promise<PermissionState> {
281
  try {
282
+ // First check if Permissions API is available
283
+ if (!navigator.permissions || !navigator.permissions.query) {
284
+ console.warn('Permissions API not supported');
285
+ // Try to check by attempting getUserMedia with video disabled
286
+ try {
287
+ const stream = await navigator.mediaDevices.getUserMedia({ audio: true, video: false });
288
+ stream.getTracks().forEach(track => track.stop());
289
+ return 'granted';
290
+ } catch (error: any) {
291
+ if (error.name === 'NotAllowedError' || error.name === 'PermissionDeniedError') {
292
+ return 'denied';
293
+ }
294
+ return 'prompt';
295
+ }
296
+ }
297
+
298
+ // Use Permissions API
299
  const result = await navigator.permissions.query({ name: 'microphone' as PermissionName });
300
  return result.state;
301
  } catch (error) {
302
+ console.warn('Error checking microphone permission:', error);
303
+ // Assume prompt state if we can't determine
304
+ return 'prompt';
305
  }
306
  }
307
+
308
+ private cleanup(): void {
309
+ try {
310
+ // Stop media recorder
311
+ if (this.mediaRecorder && this.mediaRecorder.state !== 'inactive') {
312
+ this.mediaRecorder.stop();
313
+ }
314
+ this.mediaRecorder = null;
315
+
316
+ // Stop all tracks
317
+ if (this.audioStream) {
318
+ this.audioStream.getTracks().forEach(track => {
319
+ track.stop();
320
+ });
321
+ this.audioStream = null;
322
+ }
323
+
324
+ // Stop volume monitoring
325
+ this.stopVolumeMonitoring();
326
+
327
+ } catch (error) {
328
+ console.error('Error during cleanup:', error);
329
+ }
330
+ }
331
+
332
+ private createError(type: AudioStreamError['type'], message: string, originalError?: any): AudioStreamError {
333
+ return {
334
+ type,
335
+ message,
336
+ originalError
337
+ };
338
+ }
339
+
340
+ // Get recording state
341
+ isRecording(): boolean {
342
+ return this.mediaRecorder !== null && this.mediaRecorder.state === 'recording';
343
+ }
344
+
345
+ // Get available audio devices
346
+ async getAudioDevices(): Promise<MediaDeviceInfo[]> {
347
+ try {
348
+ const devices = await navigator.mediaDevices.enumerateDevices();
349
+ return devices.filter(device => device.kind === 'audioinput');
350
+ } catch (error) {
351
+ console.error('Error enumerating devices:', error);
352
+ return [];
353
+ }
354
+ }
355
+
356
+ // Switch audio device
357
+ async switchAudioDevice(deviceId: string): Promise<void> {
358
+ if (this.isRecording()) {
359
+ // Stop current recording
360
+ this.stopRecording();
361
+
362
+ // Update constraints with new device
363
+ this.constraints.audio = {
364
+ ...this.constraints.audio,
365
+ deviceId: { exact: deviceId }
366
+ } as any;
367
+
368
+ // Restart recording with new device
369
+ await this.startRecording();
370
+ } else {
371
+ // Just update constraints for next recording
372
+ this.constraints.audio = {
373
+ ...this.constraints.audio,
374
+ deviceId: { exact: deviceId }
375
+ } as any;
376
+ }
377
+ }
378
+ }