ciyidogan commited on
Commit
fe3edc5
·
verified ·
1 Parent(s): c2548fb

Update flare-ui/src/app/services/conversation-manager.service.ts

Browse files
flare-ui/src/app/services/conversation-manager.service.ts CHANGED
@@ -1,856 +1,858 @@
1
- // conversation-manager.service.ts
2
- // Path: /flare-ui/src/app/services/conversation-manager.service.ts
3
-
4
- import { Injectable, OnDestroy } from '@angular/core';
5
- import { Subject, Subscription, BehaviorSubject, throwError } from 'rxjs';
6
- import { catchError, retry } from 'rxjs/operators';
7
- import { WebSocketService } from './websocket.service';
8
- import { AudioStreamService } from './audio-stream.service';
9
-
10
- export type ConversationState =
11
- | 'idle'
12
- | 'listening'
13
- | 'processing_stt'
14
- | 'processing_llm'
15
- | 'processing_tts'
16
- | 'playing_audio'
17
- | 'error';
18
-
19
- export interface ConversationMessage {
20
- role: 'user' | 'assistant' | 'system';
21
- text: string;
22
- timestamp: Date;
23
- audioUrl?: string;
24
- error?: boolean;
25
- }
26
-
27
- export interface ConversationConfig {
28
- language?: string;
29
- stt_engine?: string;
30
- tts_engine?: string;
31
- enable_barge_in?: boolean;
32
- max_silence_duration?: number;
33
- }
34
-
35
- export interface ConversationError {
36
- type: 'websocket' | 'audio' | 'permission' | 'network' | 'unknown';
37
- message: string;
38
- details?: any;
39
- timestamp: Date;
40
- }
41
-
42
- @Injectable({
43
- providedIn: 'root'
44
- })
45
- export class ConversationManagerService implements OnDestroy {
46
- private subscriptions = new Subscription();
47
- private audioQueue: string[] = [];
48
- private isInterrupting = false;
49
- private sessionId: string | null = null;
50
- private conversationConfig: ConversationConfig = {
51
- language: 'tr-TR',
52
- stt_engine: 'google',
53
- enable_barge_in: true
54
- };
55
-
56
- // State management
57
- private currentStateSubject = new BehaviorSubject<ConversationState>('idle');
58
- public currentState$ = this.currentStateSubject.asObservable();
59
-
60
- // Message history
61
- private messagesSubject = new BehaviorSubject<ConversationMessage[]>([]);
62
- public messages$ = this.messagesSubject.asObservable();
63
-
64
- // Current transcription
65
- private transcriptionSubject = new BehaviorSubject<string>('');
66
- public transcription$ = this.transcriptionSubject.asObservable();
67
-
68
- // Error handling
69
- private errorSubject = new Subject<ConversationError>();
70
- public error$ = this.errorSubject.asObservable();
71
-
72
- private sttReadySubject = new Subject<boolean>();
73
-
74
- // Audio player reference
75
- private audioPlayer: HTMLAudioElement | null = null;
76
- private audioPlayerPromise: Promise<void> | null = null;
77
-
78
- constructor(
79
- private wsService: WebSocketService,
80
- private audioService: AudioStreamService
81
- ) {}
82
-
83
- ngOnDestroy(): void {
84
- this.cleanup();
85
- }
86
-
87
- async startConversation(sessionId: string, config?: ConversationConfig): Promise<void> {
88
- try {
89
- if (!sessionId) {
90
- throw new Error('Session ID is required');
91
- }
92
-
93
- // Update configuration
94
- if (config) {
95
- this.conversationConfig = { ...this.conversationConfig, ...config };
96
- }
97
-
98
- this.sessionId = sessionId;
99
-
100
- // Start in listening state
101
- this.currentStateSubject.next('listening');
102
- console.log('🎤 Starting conversation in continuous listening mode');
103
-
104
- // Connect WebSocket first
105
- await this.wsService.connect(sessionId).catch(error => {
106
- throw new Error(`WebSocket connection failed: ${error.message}`);
107
- });
108
-
109
- // Set up subscriptions BEFORE sending any messages
110
- this.setupSubscriptions();
111
-
112
- // Send start signal with configuration
113
- this.wsService.sendControl('start_conversation', {
114
- ...this.conversationConfig,
115
- continuous_listening: true
116
- });
117
-
118
- console.log('✅ [ConversationManager] Conversation started - waiting for welcome TTS');
119
-
120
- } catch (error: any) {
121
- console.error('Failed to start conversation:', error);
122
-
123
- const conversationError: ConversationError = {
124
- type: this.determineErrorType(error),
125
- message: error.message || 'Failed to start conversation',
126
- details: error,
127
- timestamp: new Date()
128
- };
129
-
130
- this.errorSubject.next(conversationError);
131
- this.currentStateSubject.next('error');
132
- this.cleanup();
133
-
134
- throw error;
135
- }
136
- }
137
-
138
- stopConversation(): void {
139
- try {
140
- // First stop audio recording
141
- this.audioService.stopRecording();
142
-
143
- // Send conversation end signal
144
- if (this.wsService.isConnected()) {
145
- this.wsService.sendControl('stop_conversation'); // stop_session yerine
146
- }
147
-
148
- // Small delay before disconnecting
149
- setTimeout(() => {
150
- this.cleanup();
151
- this.addSystemMessage('Conversation ended');
152
- }, 100);
153
-
154
- } catch (error) {
155
- console.error('Error stopping conversation:', error);
156
- this.cleanup();
157
- }
158
- }
159
-
160
- private setupSubscriptions(): void {
161
- // Audio chunks from microphone
162
- this.subscriptions.add(
163
- this.audioService.audioChunk$.subscribe({
164
- next: (chunk) => {
165
- if (!this.isInterrupting && this.wsService.isConnected()) {
166
- try {
167
- this.wsService.sendAudioChunk(chunk.data);
168
- } catch (error) {
169
- console.error('Failed to send audio chunk:', error);
170
- }
171
- }
172
- },
173
- error: (error) => {
174
- console.error('Audio stream error:', error);
175
- this.handleAudioError(error);
176
- }
177
- })
178
- );
179
-
180
- // Audio stream errors
181
- this.subscriptions.add(
182
- this.audioService.error$.subscribe(error => {
183
- this.handleAudioError(error);
184
- })
185
- );
186
-
187
- // WebSocket messages
188
- this.subscriptions.add(
189
- this.wsService.message$.subscribe({
190
- next: (message) => {
191
- this.handleMessage(message);
192
- },
193
- error: (error) => {
194
- console.error('WebSocket message error:', error);
195
- this.handleWebSocketError(error);
196
- }
197
- })
198
- );
199
-
200
- // Subscribe to transcription updates - SADECE FINAL RESULTS
201
- this.subscriptions.add(
202
- this.wsService.transcription$.subscribe(result => {
203
- // SADECE final transcription'ları işle
204
- if (result.is_final) {
205
- console.log('📝 Final transcription received:', result);
206
- const messages = this.messagesSubject.value;
207
- const lastMessage = messages[messages.length - 1];
208
- if (!lastMessage || lastMessage.role !== 'user' || lastMessage.text !== result.text) {
209
- this.addMessage('user', result.text);
210
- }
211
- }
212
- })
213
- );
214
-
215
- // State changes
216
- this.subscriptions.add(
217
- this.wsService.stateChange$.subscribe(change => {
218
- this.currentStateSubject.next(change.to as ConversationState);
219
- this.handleStateChange(change.from, change.to);
220
- })
221
- );
222
-
223
- // WebSocket errors
224
- this.subscriptions.add(
225
- this.wsService.error$.subscribe(error => {
226
- console.error('WebSocket error:', error);
227
- this.handleWebSocketError({ message: error });
228
- })
229
- );
230
-
231
- // WebSocket connection state
232
- this.subscriptions.add(
233
- this.wsService.connection$.subscribe(connected => {
234
- if (!connected && this.currentStateSubject.value !== 'idle') {
235
- this.addSystemMessage('Connection lost. Attempting to reconnect...');
236
- }
237
- })
238
- );
239
- }
240
-
241
- private handleMessage(message: any): void {
242
- try {
243
- switch (message.type) {
244
- case 'transcription':
245
- // SADECE final transcription'ları işle. Interim transcription'ları işlemiyoruz
246
- if (message['is_final']) {
247
- const messages = this.messagesSubject.value;
248
- const lastMessage = messages[messages.length - 1];
249
- if (!lastMessage || lastMessage.role !== 'user' || lastMessage.text !== message['text']) {
250
- this.addMessage('user', message['text']);
251
- }
252
- }
253
- break;
254
-
255
- case 'assistant_response':
256
- // Welcome mesajı veya normal yanıt
257
- const isWelcome = message['is_welcome'] || false;
258
- this.addMessage('assistant', message['text']);
259
-
260
- if (isWelcome) {
261
- console.log('📢 Welcome message received:', message['text']);
262
- }
263
- break;
264
-
265
- case 'tts_audio':
266
- this.handleTTSAudio(message);
267
- break;
268
-
269
- case 'tts_error':
270
- // TTS hatası durumunda kullanıcıya bilgi ver
271
- console.error('TTS Error:', message['message']);
272
- this.addSystemMessage(message['message']);
273
- break;
274
-
275
- case 'control':
276
- if (message['action'] === 'stop_playback') {
277
- this.stopAudioPlayback();
278
- }
279
- break;
280
-
281
- case 'error':
282
- this.handleServerError(message);
283
- break;
284
-
285
- case 'session_config':
286
- // Update configuration from server
287
- if (message['config']) {
288
- this.conversationConfig = { ...this.conversationConfig, ...message['config'] };
289
- }
290
- break;
291
-
292
- case 'session_started':
293
- // Session başladı, STT durumunu kontrol et
294
- console.log('📢 Session started:', message);
295
- if (!message['stt_initialized']) {
296
- this.addSystemMessage('Speech recognition failed to initialize. Voice input will not be available.');
297
- }
298
- break;
299
-
300
- case 'stt_ready':
301
- console.log('✅ [ConversationManager] STT ready signal received');
302
- // ✅ STT hazır, recording'i başlat
303
- if (!this.audioService.isRecording()) {
304
- this.audioService.startRecording().then(() => {
305
- console.log('🎤 [ConversationManager] Audio recording started');
306
- }).catch(error => {
307
- console.error(' Failed to start recording:', error);
308
- this.handleAudioError(error);
309
- });
310
- }
311
- break;
312
-
313
- case 'state_change':
314
- // Backend'den gelen state'i frontend state'ine map et
315
- const backendState = message['to'] || message['state'];
316
- const mappedState = this.mapBackendStateToFrontend(backendState);
317
- if (mappedState) {
318
- this.currentStateSubject.next(mappedState);
319
-
320
- // Log state changes with better format
321
- console.log(`📊 Backend state: ${backendState} → Frontend state: ${mappedState}`);
322
- } else {
323
- console.warn(`⚠️ Unknown backend state: ${backendState}`);
324
- }
325
- break;
326
-
327
- case 'conversation_started':
328
- // Conversation başladığında log at
329
- console.log('📢 Conversation started:', message);
330
- break;
331
- }
332
- } catch (error) {
333
- console.error('Error handling message:', error);
334
- this.errorSubject.next({
335
- type: 'unknown',
336
- message: 'Failed to process message',
337
- details: error,
338
- timestamp: new Date()
339
- });
340
- }
341
- }
342
-
343
- private mapBackendStateToFrontend(backendState: string): ConversationState | null {
344
- const stateMap: { [key: string]: ConversationState } = {
345
- 'idle': 'idle',
346
- 'initializing': 'idle',
347
- 'preparing_welcome': 'processing_tts',
348
- 'playing_welcome': 'playing_audio',
349
- 'listening': 'listening',
350
- 'processing_speech': 'processing_stt',
351
- 'preparing_response': 'processing_llm',
352
- 'playing_response': 'playing_audio',
353
- 'error': 'error',
354
- 'ended': 'idle'
355
- };
356
-
357
- return stateMap[backendState] || null;
358
- }
359
-
360
- private handleStateChange(from: string, to: string): void {
361
- console.log(`📊 State: ${from} → ${to}`);
362
-
363
- // State değişimlerinde transcription'ı temizleme
364
- // Sadece error durumunda temizle
365
- if (to === 'error') {
366
- this.transcriptionSubject.next('');
367
- }
368
-
369
- // Log state changes for debugging
370
- console.log(`🎤 Continuous listening mode - state: ${to}`);
371
- }
372
-
373
- private playQueuedAudio(): void {
374
- const messages = this.messagesSubject.value;
375
- const lastMessage = messages[messages.length - 1];
376
-
377
- if (lastMessage?.audioUrl && lastMessage.role === 'assistant') {
378
- this.playAudio(lastMessage.audioUrl);
379
- }
380
- }
381
-
382
- private async playAudio(audioUrl: string): Promise<void> {
383
- try {
384
- console.log('🎵 [ConversationManager] playAudio called', {
385
- hasAudioPlayer: !!this.audioPlayer,
386
- audioUrl: audioUrl,
387
- timestamp: new Date().toISOString()
388
- });
389
-
390
- // Her seferinde yeni audio player oluştur ve handler'ları set et
391
- if (this.audioPlayer) {
392
- // Eski player'ı temizle
393
- this.audioPlayer.pause();
394
- this.audioPlayer.src = '';
395
- this.audioPlayer = null;
396
- }
397
-
398
- // Yeni player oluştur
399
- this.audioPlayer = new Audio();
400
- this.setupAudioPlayerHandlers(); // HER SEFERINDE handler'ları set et
401
-
402
- this.audioPlayer.src = audioUrl;
403
-
404
- // Store the play promise to handle interruptions properly
405
- this.audioPlayerPromise = this.audioPlayer.play();
406
-
407
- await this.audioPlayerPromise;
408
-
409
- } catch (error: any) {
410
- // Check if error is due to interruption
411
- if (error.name === 'AbortError') {
412
- console.log('Audio playback interrupted');
413
- } else {
414
- console.error('Audio playback error:', error);
415
- this.errorSubject.next({
416
- type: 'audio',
417
- message: 'Failed to play audio response',
418
- details: error,
419
- timestamp: new Date()
420
- });
421
- }
422
- } finally {
423
- this.audioPlayerPromise = null;
424
- }
425
- }
426
-
427
- private setupAudioPlayerHandlers(): void {
428
- if (!this.audioPlayer) return;
429
-
430
- this.audioPlayer.onended = async () => {
431
- console.log('🎵 [ConversationManager] Audio playback ended', {
432
- currentState: this.currentStateSubject.value,
433
- isRecording: this.audioService.isRecording(),
434
- timestamp: new Date().toISOString()
435
- });
436
-
437
- try {
438
- // Backend'e audio bittiğini bildir
439
- if (this.wsService.isConnected()) {
440
- console.log('📤 [ConversationManager] Sending audio_ended to backend');
441
- this.wsService.sendControl('audio_ended');
442
-
443
- // ✅ Backend STT başlatacak ve bize stt_ready sinyali gönderecek
444
- // ✅ Recording'i burada başlatmıyoruz, handleMessage'da stt_ready gelince başlatacağız
445
- console.log('⏳ [ConversationManager] Waiting for STT ready signal from backend...');
446
- }
447
-
448
- } catch (error) {
449
- console.error('❌ [ConversationManager] Failed to handle audio end:', error);
450
- this.handleAudioError(error);
451
- }
452
- };
453
-
454
- this.audioPlayer.onerror = (error) => {
455
- console.error('Audio player error:', error);
456
- this.errorSubject.next({
457
- type: 'audio',
458
- message: 'Audio playback error occurred',
459
- details: error,
460
- timestamp: new Date()
461
- });
462
- };
463
-
464
- this.audioPlayer.onplay = () => {
465
- console.log('▶️ [ConversationManager] Audio playback started');
466
- };
467
-
468
- this.audioPlayer.onpause = () => {
469
- console.log('⏸️ [ConversationManager] Audio playback paused');
470
- };
471
- }
472
-
473
- private stopAudioPlayback(): void {
474
- try {
475
- if (this.audioPlayer) {
476
- this.audioPlayer.pause();
477
- this.audioPlayer.currentTime = 0;
478
-
479
- // Cancel any pending play promise
480
- if (this.audioPlayerPromise) {
481
- this.audioPlayerPromise.catch(() => {
482
- // Ignore abort errors
483
- });
484
- this.audioPlayerPromise = null;
485
- }
486
- }
487
- } catch (error) {
488
- console.error('Error stopping audio playback:', error);
489
- }
490
- }
491
-
492
- // Barge-in handling - DEVRE DIŞI
493
- performBargeIn(): void {
494
- // Barge-in özelliği devre dışı bırakıldı
495
- console.log('⚠️ Barge-in is currently disabled');
496
-
497
- // Kullanıcıya bilgi ver
498
- this.addSystemMessage('Barge-in feature is currently disabled.');
499
- }
500
-
501
- private addMessage(role: 'user' | 'assistant', text: string, error: boolean = false): void {
502
- if (!text || text.trim().length === 0) {
503
- return;
504
- }
505
-
506
- const messages = this.messagesSubject.value;
507
- messages.push({
508
- role,
509
- text,
510
- timestamp: new Date(),
511
- error
512
- });
513
- this.messagesSubject.next([...messages]);
514
- }
515
-
516
- private addSystemMessage(text: string): void {
517
- console.log(`📢 System: ${text}`);
518
- const messages = this.messagesSubject.value;
519
- messages.push({
520
- role: 'system',
521
- text,
522
- timestamp: new Date()
523
- });
524
- this.messagesSubject.next([...messages]);
525
- }
526
-
527
- private handleTTSAudio(message: any): void {
528
- try {
529
- // Validate audio data
530
- if (!message['data']) {
531
- console.warn('❌ TTS audio message missing data');
532
- return;
533
- }
534
-
535
- // Detailed log
536
- console.log('🎵 TTS chunk received:', {
537
- chunkIndex: message['chunk_index'],
538
- totalChunks: message['total_chunks'],
539
- dataLength: message['data'].length,
540
- dataPreview: message['data'].substring(0, 50) + '...',
541
- isLast: message['is_last'],
542
- mimeType: message['mime_type']
543
- });
544
-
545
- // Accumulate audio chunks (already base64)
546
- this.audioQueue.push(message['data']);
547
- console.log(`📦 Audio queue size: ${this.audioQueue.length} chunks`);
548
-
549
- if (message['is_last']) {
550
- console.log('🔧 Processing final audio chunk...');
551
-
552
- try {
553
- // All chunks received, combine and create audio blob
554
- const combinedBase64 = this.audioQueue.join('');
555
- console.log('✅ Combined audio data:', {
556
- totalLength: combinedBase64.length,
557
- queueSize: this.audioQueue.length,
558
- preview: combinedBase64.substring(0, 100) + '...'
559
- });
560
-
561
- // Validate base64
562
- console.log('🔍 Validating base64...');
563
- if (!this.isValidBase64(combinedBase64)) {
564
- throw new Error('Invalid base64 data received');
565
- }
566
- console.log(' Base64 validation passed');
567
-
568
- const audioBlob = this.base64ToBlob(combinedBase64, message['mime_type'] || 'audio/mpeg');
569
- const audioUrl = URL.createObjectURL(audioBlob);
570
- console.log('🎧 Audio URL created:', audioUrl);
571
-
572
- // Update last message with audio URL
573
- const messages = this.messagesSubject.value;
574
- if (messages.length > 0) {
575
- const lastAssistantMessageIndex = this.findLastAssistantMessageIndex(messages);
576
- if (lastAssistantMessageIndex >= 0) {
577
- messages[lastAssistantMessageIndex].audioUrl = audioUrl;
578
- this.messagesSubject.next([...messages]);
579
- console.log('✅ Audio URL attached to assistant message at index:', lastAssistantMessageIndex);
580
-
581
- // Auto-play if it's welcome message or if in playing_audio state
582
- const isWelcomeMessage = messages[lastAssistantMessageIndex].text &&
583
- messages[lastAssistantMessageIndex].timestamp &&
584
- (new Date().getTime() - messages[lastAssistantMessageIndex].timestamp.getTime()) < 10000; // 10 saniye içinde
585
-
586
- if (isWelcomeMessage || this.currentStateSubject.value === 'playing_audio') {
587
- setTimeout(() => {
588
- console.log('🎵 Auto-playing audio for welcome message');
589
- this.playAudio(audioUrl);
590
- }, 500);
591
- }
592
- } else {
593
- console.warn('⚠️ No assistant message found to attach audio');
594
- }
595
- }
596
-
597
- // Clear queue
598
- this.audioQueue = [];
599
- console.log('🧹 Audio queue cleared');
600
-
601
- console.log(' Audio processing completed successfully');
602
- } catch (error) {
603
- console.error(' Error creating audio blob:', error);
604
- console.error('Queue size was:', this.audioQueue.length);
605
- this.audioQueue = [];
606
- }
607
- }
608
- } catch (error) {
609
- console.error('❌ Error handling TTS audio:', error);
610
- this.audioQueue = []; // Clear queue on error
611
- }
612
- }
613
-
614
- private findLastAssistantMessageIndex(messages: ConversationMessage[]): number {
615
- for (let i = messages.length - 1; i >= 0; i--) {
616
- if (messages[i].role === 'assistant') {
617
- return i;
618
- }
619
- }
620
- return -1;
621
- }
622
-
623
- private isValidBase64(str: string): boolean {
624
- try {
625
- console.log(`🔍 Checking base64 validity for ${str.length} chars`);
626
-
627
- // Check if string contains only valid base64 characters
628
- const base64Regex = /^[A-Za-z0-9+/]*={0,2}$/;
629
- if (!base64Regex.test(str)) {
630
- console.error('❌ Base64 regex test failed');
631
- return false;
632
- }
633
-
634
- // Try to decode to verify
635
- const decoded = atob(str);
636
- console.log(`✅ Base64 decode successful, decoded length: ${decoded.length}`);
637
- return true;
638
- } catch (e) {
639
- console.error('❌ Base64 validation error:', e);
640
- return false;
641
- }
642
- }
643
-
644
- private base64ToBlob(base64: string, mimeType: string): Blob {
645
- try {
646
- console.log('🔄 Converting base64 to blob:', {
647
- base64Length: base64.length,
648
- mimeType: mimeType
649
- });
650
-
651
- const byteCharacters = atob(base64);
652
- console.log(`📊 Decoded to ${byteCharacters.length} bytes`);
653
-
654
- const byteNumbers = new Array(byteCharacters.length);
655
-
656
- for (let i = 0; i < byteCharacters.length; i++) {
657
- byteNumbers[i] = byteCharacters.charCodeAt(i);
658
- }
659
-
660
- const byteArray = new Uint8Array(byteNumbers);
661
- const blob = new Blob([byteArray], { type: mimeType });
662
-
663
- console.log('✅ Blob created:', {
664
- size: blob.size,
665
- type: blob.type,
666
- sizeKB: (blob.size / 1024).toFixed(2) + ' KB'
667
- });
668
-
669
- return blob;
670
- } catch (error) {
671
- console.error('❌ Error converting base64 to blob:', error);
672
- console.error('Input details:', {
673
- base64Length: base64.length,
674
- base64Preview: base64.substring(0, 100) + '...',
675
- mimeType: mimeType
676
- });
677
- throw new Error('Failed to convert audio data');
678
- }
679
- }
680
-
681
- private handleAudioError(error: any): void {
682
- const conversationError: ConversationError = {
683
- type: error.type || 'audio',
684
- message: error.message || 'Audio error occurred',
685
- details: error,
686
- timestamp: new Date()
687
- };
688
-
689
- this.errorSubject.next(conversationError);
690
-
691
- // Add user-friendly message
692
- if (error.type === 'permission') {
693
- this.addSystemMessage('Microphone permission denied. Please allow microphone access.');
694
- } else if (error.type === 'device') {
695
- this.addSystemMessage('Microphone not found or not accessible.');
696
- } else {
697
- this.addSystemMessage('Audio error occurred. Please check your microphone.');
698
- }
699
-
700
- // Update state
701
- this.currentStateSubject.next('error');
702
- }
703
-
704
- private handleWebSocketError(error: any): void {
705
- const conversationError: ConversationError = {
706
- type: 'websocket',
707
- message: error.message || 'WebSocket error occurred',
708
- details: error,
709
- timestamp: new Date()
710
- };
711
-
712
- this.errorSubject.next(conversationError);
713
- this.addSystemMessage('Connection error. Please check your internet connection.');
714
-
715
- // Don't set error state for temporary connection issues
716
- if (this.wsService.getReconnectionInfo().isReconnecting) {
717
- this.addSystemMessage('Attempting to reconnect...');
718
- } else {
719
- this.currentStateSubject.next('error');
720
- }
721
- }
722
-
723
- private handleServerError(message: any): void {
724
- const errorType = message['error_type'] || 'unknown';
725
- const errorMessage = message['message'] || 'Server error occurred';
726
-
727
- const conversationError: ConversationError = {
728
- type: errorType === 'race_condition' ? 'network' : 'unknown',
729
- message: errorMessage,
730
- details: message,
731
- timestamp: new Date()
732
- };
733
-
734
- this.errorSubject.next(conversationError);
735
-
736
- // STT initialization hatası için özel handling
737
- if (errorType === 'stt_init_failed') {
738
- this.addSystemMessage('Speech recognition service failed to initialize. Please check your configuration.');
739
- // Konuşmayı durdur
740
- this.stopConversation();
741
- } else if (errorType === 'race_condition') {
742
- this.addSystemMessage('Session conflict detected. Please restart the conversation.');
743
- } else if (errorType === 'stt_error') {
744
- this.addSystemMessage('Speech recognition error. Please try speaking again.');
745
- // STT hatası durumunda yeniden başlatmayı dene
746
- if (errorMessage.includes('Streaming not started')) {
747
- this.addSystemMessage('Restarting speech recognition...');
748
- // WebSocket'e restart sinyali gönder
749
- if (this.wsService.isConnected()) {
750
- this.wsService.sendControl('restart_stt');
751
- }
752
- }
753
- } else if (errorType === 'tts_error') {
754
- this.addSystemMessage('Text-to-speech error. Response will be shown as text only.');
755
- } else {
756
- this.addSystemMessage(`Error: ${errorMessage}`);
757
- }
758
- }
759
-
760
- private determineErrorType(error: any): ConversationError['type'] {
761
- if (error.type) {
762
- return error.type;
763
- }
764
-
765
- if (error.message?.includes('WebSocket') || error.message?.includes('connection')) {
766
- return 'websocket';
767
- }
768
-
769
- if (error.message?.includes('microphone') || error.message?.includes('audio')) {
770
- return 'audio';
771
- }
772
-
773
- if (error.message?.includes('permission')) {
774
- return 'permission';
775
- }
776
-
777
- if (error.message?.includes('network') || error.status === 0) {
778
- return 'network';
779
- }
780
-
781
- return 'unknown';
782
- }
783
-
784
- private cleanup(): void {
785
- try {
786
- this.subscriptions.unsubscribe();
787
- this.subscriptions = new Subscription();
788
-
789
- // Audio recording'i kesinlikle durdur
790
- if (this.audioService.isRecording()) {
791
- this.audioService.stopRecording();
792
- }
793
-
794
- this.wsService.disconnect();
795
- this.stopAudioPlayback();
796
-
797
- if (this.audioPlayer) {
798
- this.audioPlayer = null;
799
- }
800
-
801
- this.audioQueue = [];
802
- this.isInterrupting = false;
803
- this.currentStateSubject.next('idle');
804
- this.sttReadySubject.complete();
805
-
806
- console.log('🧹 Conversation cleaned up');
807
- } catch (error) {
808
- console.error('Error during cleanup:', error);
809
- }
810
- }
811
-
812
- // Public methods for UI
813
- getCurrentState(): ConversationState {
814
- return this.currentStateSubject.value;
815
- }
816
-
817
- getMessages(): ConversationMessage[] {
818
- return this.messagesSubject.value;
819
- }
820
-
821
- clearMessages(): void {
822
- this.messagesSubject.next([]);
823
- this.transcriptionSubject.next('');
824
- }
825
-
826
- updateConfig(config: Partial<ConversationConfig>): void {
827
- this.conversationConfig = { ...this.conversationConfig, ...config };
828
-
829
- // Send config update if connected
830
- if (this.wsService.isConnected()) {
831
- try {
832
- this.wsService.sendControl('update_config', config);
833
- } catch (error) {
834
- console.error('Failed to update config:', error);
835
- }
836
- }
837
- }
838
-
839
- getConfig(): ConversationConfig {
840
- return { ...this.conversationConfig };
841
- }
842
-
843
- isConnected(): boolean {
844
- return this.wsService.isConnected();
845
- }
846
-
847
- // Retry connection
848
- async retryConnection(): Promise<void> {
849
- if (!this.sessionId) {
850
- throw new Error('No session ID available for retry');
851
- }
852
-
853
- this.currentStateSubject.next('idle');
854
- await this.startConversation(this.sessionId, this.conversationConfig);
855
- }
 
 
856
  }
 
1
+ // conversation-manager.service.ts
2
+ // Path: /flare-ui/src/app/services/conversation-manager.service.ts
3
+
4
+ import { Injectable, OnDestroy } from '@angular/core';
5
+ import { Subject, Subscription, BehaviorSubject, throwError } from 'rxjs';
6
+ import { catchError, retry } from 'rxjs/operators';
7
+ import { WebSocketService } from './websocket.service';
8
+ import { AudioStreamService } from './audio-stream.service';
9
+
10
+ export type ConversationState =
11
+ | 'idle'
12
+ | 'listening'
13
+ | 'processing_stt'
14
+ | 'processing_llm'
15
+ | 'processing_tts'
16
+ | 'playing_audio'
17
+ | 'error';
18
+
19
+ export interface ConversationMessage {
20
+ role: 'user' | 'assistant' | 'system';
21
+ text: string;
22
+ timestamp: Date;
23
+ audioUrl?: string;
24
+ error?: boolean;
25
+ }
26
+
27
+ export interface ConversationConfig {
28
+ language?: string;
29
+ stt_engine?: string;
30
+ tts_engine?: string;
31
+ enable_barge_in?: boolean;
32
+ max_silence_duration?: number;
33
+ }
34
+
35
+ export interface ConversationError {
36
+ type: 'websocket' | 'audio' | 'permission' | 'network' | 'unknown';
37
+ message: string;
38
+ details?: any;
39
+ timestamp: Date;
40
+ }
41
+
42
+ @Injectable({
43
+ providedIn: 'root'
44
+ })
45
+ export class ConversationManagerService implements OnDestroy {
46
+ private subscriptions = new Subscription();
47
+ private audioQueue: string[] = [];
48
+ private isInterrupting = false;
49
+ private sessionId: string | null = null;
50
+ private conversationConfig: ConversationConfig = {
51
+ language: 'tr-TR',
52
+ stt_engine: 'google',
53
+ enable_barge_in: true
54
+ };
55
+
56
+ // State management
57
+ private currentStateSubject = new BehaviorSubject<ConversationState>('idle');
58
+ public currentState$ = this.currentStateSubject.asObservable();
59
+
60
+ // Message history
61
+ private messagesSubject = new BehaviorSubject<ConversationMessage[]>([]);
62
+ public messages$ = this.messagesSubject.asObservable();
63
+
64
+ // Current transcription
65
+ private transcriptionSubject = new BehaviorSubject<string>('');
66
+ public transcription$ = this.transcriptionSubject.asObservable();
67
+
68
+ // Error handling
69
+ private errorSubject = new Subject<ConversationError>();
70
+ public error$ = this.errorSubject.asObservable();
71
+
72
+ private sttReadySubject = new Subject<boolean>();
73
+
74
+ // Audio player reference
75
+ private audioPlayer: HTMLAudioElement | null = null;
76
+ private audioPlayerPromise: Promise<void> | null = null;
77
+
78
+ constructor(
79
+ private wsService: WebSocketService,
80
+ private audioService: AudioStreamService
81
+ ) {
82
+ this.audioService.setUseLinear16(true);
83
+ }
84
+
85
+ ngOnDestroy(): void {
86
+ this.cleanup();
87
+ }
88
+
89
+ async startConversation(sessionId: string, config?: ConversationConfig): Promise<void> {
90
+ try {
91
+ if (!sessionId) {
92
+ throw new Error('Session ID is required');
93
+ }
94
+
95
+ // Update configuration
96
+ if (config) {
97
+ this.conversationConfig = { ...this.conversationConfig, ...config };
98
+ }
99
+
100
+ this.sessionId = sessionId;
101
+
102
+ // Start in listening state
103
+ this.currentStateSubject.next('listening');
104
+ console.log('🎤 Starting conversation in continuous listening mode');
105
+
106
+ // Connect WebSocket first
107
+ await this.wsService.connect(sessionId).catch(error => {
108
+ throw new Error(`WebSocket connection failed: ${error.message}`);
109
+ });
110
+
111
+ // Set up subscriptions BEFORE sending any messages
112
+ this.setupSubscriptions();
113
+
114
+ // Send start signal with configuration
115
+ this.wsService.sendControl('start_conversation', {
116
+ ...this.conversationConfig,
117
+ continuous_listening: true
118
+ });
119
+
120
+ console.log('✅ [ConversationManager] Conversation started - waiting for welcome TTS');
121
+
122
+ } catch (error: any) {
123
+ console.error('Failed to start conversation:', error);
124
+
125
+ const conversationError: ConversationError = {
126
+ type: this.determineErrorType(error),
127
+ message: error.message || 'Failed to start conversation',
128
+ details: error,
129
+ timestamp: new Date()
130
+ };
131
+
132
+ this.errorSubject.next(conversationError);
133
+ this.currentStateSubject.next('error');
134
+ this.cleanup();
135
+
136
+ throw error;
137
+ }
138
+ }
139
+
140
+ stopConversation(): void {
141
+ try {
142
+ // First stop audio recording
143
+ this.audioService.stopRecording();
144
+
145
+ // Send conversation end signal
146
+ if (this.wsService.isConnected()) {
147
+ this.wsService.sendControl('stop_conversation'); // stop_session yerine
148
+ }
149
+
150
+ // Small delay before disconnecting
151
+ setTimeout(() => {
152
+ this.cleanup();
153
+ this.addSystemMessage('Conversation ended');
154
+ }, 100);
155
+
156
+ } catch (error) {
157
+ console.error('Error stopping conversation:', error);
158
+ this.cleanup();
159
+ }
160
+ }
161
+
162
+ private setupSubscriptions(): void {
163
+ // Audio chunks from microphone
164
+ this.subscriptions.add(
165
+ this.audioService.audioChunk$.subscribe({
166
+ next: (chunk) => {
167
+ if (!this.isInterrupting && this.wsService.isConnected()) {
168
+ try {
169
+ this.wsService.sendAudioChunk(chunk.data);
170
+ } catch (error) {
171
+ console.error('Failed to send audio chunk:', error);
172
+ }
173
+ }
174
+ },
175
+ error: (error) => {
176
+ console.error('Audio stream error:', error);
177
+ this.handleAudioError(error);
178
+ }
179
+ })
180
+ );
181
+
182
+ // Audio stream errors
183
+ this.subscriptions.add(
184
+ this.audioService.error$.subscribe(error => {
185
+ this.handleAudioError(error);
186
+ })
187
+ );
188
+
189
+ // WebSocket messages
190
+ this.subscriptions.add(
191
+ this.wsService.message$.subscribe({
192
+ next: (message) => {
193
+ this.handleMessage(message);
194
+ },
195
+ error: (error) => {
196
+ console.error('WebSocket message error:', error);
197
+ this.handleWebSocketError(error);
198
+ }
199
+ })
200
+ );
201
+
202
+ // Subscribe to transcription updates - SADECE FINAL RESULTS
203
+ this.subscriptions.add(
204
+ this.wsService.transcription$.subscribe(result => {
205
+ // SADECE final transcription'ları işle
206
+ if (result.is_final) {
207
+ console.log('📝 Final transcription received:', result);
208
+ const messages = this.messagesSubject.value;
209
+ const lastMessage = messages[messages.length - 1];
210
+ if (!lastMessage || lastMessage.role !== 'user' || lastMessage.text !== result.text) {
211
+ this.addMessage('user', result.text);
212
+ }
213
+ }
214
+ })
215
+ );
216
+
217
+ // State changes
218
+ this.subscriptions.add(
219
+ this.wsService.stateChange$.subscribe(change => {
220
+ this.currentStateSubject.next(change.to as ConversationState);
221
+ this.handleStateChange(change.from, change.to);
222
+ })
223
+ );
224
+
225
+ // WebSocket errors
226
+ this.subscriptions.add(
227
+ this.wsService.error$.subscribe(error => {
228
+ console.error('WebSocket error:', error);
229
+ this.handleWebSocketError({ message: error });
230
+ })
231
+ );
232
+
233
+ // WebSocket connection state
234
+ this.subscriptions.add(
235
+ this.wsService.connection$.subscribe(connected => {
236
+ if (!connected && this.currentStateSubject.value !== 'idle') {
237
+ this.addSystemMessage('Connection lost. Attempting to reconnect...');
238
+ }
239
+ })
240
+ );
241
+ }
242
+
243
+ private handleMessage(message: any): void {
244
+ try {
245
+ switch (message.type) {
246
+ case 'transcription':
247
+ // SADECE final transcription'ları işle. Interim transcription'ları işlemiyoruz
248
+ if (message['is_final']) {
249
+ const messages = this.messagesSubject.value;
250
+ const lastMessage = messages[messages.length - 1];
251
+ if (!lastMessage || lastMessage.role !== 'user' || lastMessage.text !== message['text']) {
252
+ this.addMessage('user', message['text']);
253
+ }
254
+ }
255
+ break;
256
+
257
+ case 'assistant_response':
258
+ // Welcome mesajı veya normal yanıt
259
+ const isWelcome = message['is_welcome'] || false;
260
+ this.addMessage('assistant', message['text']);
261
+
262
+ if (isWelcome) {
263
+ console.log('📢 Welcome message received:', message['text']);
264
+ }
265
+ break;
266
+
267
+ case 'tts_audio':
268
+ this.handleTTSAudio(message);
269
+ break;
270
+
271
+ case 'tts_error':
272
+ // TTS hatası durumunda kullanıcıya bilgi ver
273
+ console.error('TTS Error:', message['message']);
274
+ this.addSystemMessage(message['message']);
275
+ break;
276
+
277
+ case 'control':
278
+ if (message['action'] === 'stop_playback') {
279
+ this.stopAudioPlayback();
280
+ }
281
+ break;
282
+
283
+ case 'error':
284
+ this.handleServerError(message);
285
+ break;
286
+
287
+ case 'session_config':
288
+ // Update configuration from server
289
+ if (message['config']) {
290
+ this.conversationConfig = { ...this.conversationConfig, ...message['config'] };
291
+ }
292
+ break;
293
+
294
+ case 'session_started':
295
+ // Session başladı, STT durumunu kontrol et
296
+ console.log('📢 Session started:', message);
297
+ if (!message['stt_initialized']) {
298
+ this.addSystemMessage('Speech recognition failed to initialize. Voice input will not be available.');
299
+ }
300
+ break;
301
+
302
+ case 'stt_ready':
303
+ console.log('✅ [ConversationManager] STT ready signal received');
304
+ // STT hazır, recording'i başlat
305
+ if (!this.audioService.isRecording()) {
306
+ this.audioService.startRecording().then(() => {
307
+ console.log('🎤 [ConversationManager] Audio recording started');
308
+ }).catch(error => {
309
+ console.error('❌ Failed to start recording:', error);
310
+ this.handleAudioError(error);
311
+ });
312
+ }
313
+ break;
314
+
315
+ case 'state_change':
316
+ // Backend'den gelen state'i frontend state'ine map et
317
+ const backendState = message['to'] || message['state'];
318
+ const mappedState = this.mapBackendStateToFrontend(backendState);
319
+ if (mappedState) {
320
+ this.currentStateSubject.next(mappedState);
321
+
322
+ // Log state changes with better format
323
+ console.log(`📊 Backend state: ${backendState} → Frontend state: ${mappedState}`);
324
+ } else {
325
+ console.warn(`⚠️ Unknown backend state: ${backendState}`);
326
+ }
327
+ break;
328
+
329
+ case 'conversation_started':
330
+ // Conversation başladığında log at
331
+ console.log('📢 Conversation started:', message);
332
+ break;
333
+ }
334
+ } catch (error) {
335
+ console.error('Error handling message:', error);
336
+ this.errorSubject.next({
337
+ type: 'unknown',
338
+ message: 'Failed to process message',
339
+ details: error,
340
+ timestamp: new Date()
341
+ });
342
+ }
343
+ }
344
+
345
+ private mapBackendStateToFrontend(backendState: string): ConversationState | null {
346
+ const stateMap: { [key: string]: ConversationState } = {
347
+ 'idle': 'idle',
348
+ 'initializing': 'idle',
349
+ 'preparing_welcome': 'processing_tts',
350
+ 'playing_welcome': 'playing_audio',
351
+ 'listening': 'listening',
352
+ 'processing_speech': 'processing_stt',
353
+ 'preparing_response': 'processing_llm',
354
+ 'playing_response': 'playing_audio',
355
+ 'error': 'error',
356
+ 'ended': 'idle'
357
+ };
358
+
359
+ return stateMap[backendState] || null;
360
+ }
361
+
362
+ private handleStateChange(from: string, to: string): void {
363
+ console.log(`📊 State: ${from} ${to}`);
364
+
365
+ // State değişimlerinde transcription'ı temizleme
366
+ // Sadece error durumunda temizle
367
+ if (to === 'error') {
368
+ this.transcriptionSubject.next('');
369
+ }
370
+
371
+ // Log state changes for debugging
372
+ console.log(`🎤 Continuous listening mode - state: ${to}`);
373
+ }
374
+
375
+ private playQueuedAudio(): void {
376
+ const messages = this.messagesSubject.value;
377
+ const lastMessage = messages[messages.length - 1];
378
+
379
+ if (lastMessage?.audioUrl && lastMessage.role === 'assistant') {
380
+ this.playAudio(lastMessage.audioUrl);
381
+ }
382
+ }
383
+
384
+ private async playAudio(audioUrl: string): Promise<void> {
385
+ try {
386
+ console.log('🎵 [ConversationManager] playAudio called', {
387
+ hasAudioPlayer: !!this.audioPlayer,
388
+ audioUrl: audioUrl,
389
+ timestamp: new Date().toISOString()
390
+ });
391
+
392
+ // Her seferinde yeni audio player oluştur ve handler'ları set et
393
+ if (this.audioPlayer) {
394
+ // Eski player'ı temizle
395
+ this.audioPlayer.pause();
396
+ this.audioPlayer.src = '';
397
+ this.audioPlayer = null;
398
+ }
399
+
400
+ // Yeni player oluştur
401
+ this.audioPlayer = new Audio();
402
+ this.setupAudioPlayerHandlers(); // HER SEFERINDE handler'ları set et
403
+
404
+ this.audioPlayer.src = audioUrl;
405
+
406
+ // Store the play promise to handle interruptions properly
407
+ this.audioPlayerPromise = this.audioPlayer.play();
408
+
409
+ await this.audioPlayerPromise;
410
+
411
+ } catch (error: any) {
412
+ // Check if error is due to interruption
413
+ if (error.name === 'AbortError') {
414
+ console.log('Audio playback interrupted');
415
+ } else {
416
+ console.error('Audio playback error:', error);
417
+ this.errorSubject.next({
418
+ type: 'audio',
419
+ message: 'Failed to play audio response',
420
+ details: error,
421
+ timestamp: new Date()
422
+ });
423
+ }
424
+ } finally {
425
+ this.audioPlayerPromise = null;
426
+ }
427
+ }
428
+
429
+ private setupAudioPlayerHandlers(): void {
430
+ if (!this.audioPlayer) return;
431
+
432
+ this.audioPlayer.onended = async () => {
433
+ console.log('🎵 [ConversationManager] Audio playback ended', {
434
+ currentState: this.currentStateSubject.value,
435
+ isRecording: this.audioService.isRecording(),
436
+ timestamp: new Date().toISOString()
437
+ });
438
+
439
+ try {
440
+ // Backend'e audio bittiğini bildir
441
+ if (this.wsService.isConnected()) {
442
+ console.log('📤 [ConversationManager] Sending audio_ended to backend');
443
+ this.wsService.sendControl('audio_ended');
444
+
445
+ // Backend STT başlatacak ve bize stt_ready sinyali gönderecek
446
+ // ✅ Recording'i burada başlatmıyoruz, handleMessage'da stt_ready gelince başlatacağız
447
+ console.log('⏳ [ConversationManager] Waiting for STT ready signal from backend...');
448
+ }
449
+
450
+ } catch (error) {
451
+ console.error('❌ [ConversationManager] Failed to handle audio end:', error);
452
+ this.handleAudioError(error);
453
+ }
454
+ };
455
+
456
+ this.audioPlayer.onerror = (error) => {
457
+ console.error('Audio player error:', error);
458
+ this.errorSubject.next({
459
+ type: 'audio',
460
+ message: 'Audio playback error occurred',
461
+ details: error,
462
+ timestamp: new Date()
463
+ });
464
+ };
465
+
466
+ this.audioPlayer.onplay = () => {
467
+ console.log('▶️ [ConversationManager] Audio playback started');
468
+ };
469
+
470
+ this.audioPlayer.onpause = () => {
471
+ console.log('⏸️ [ConversationManager] Audio playback paused');
472
+ };
473
+ }
474
+
475
+ private stopAudioPlayback(): void {
476
+ try {
477
+ if (this.audioPlayer) {
478
+ this.audioPlayer.pause();
479
+ this.audioPlayer.currentTime = 0;
480
+
481
+ // Cancel any pending play promise
482
+ if (this.audioPlayerPromise) {
483
+ this.audioPlayerPromise.catch(() => {
484
+ // Ignore abort errors
485
+ });
486
+ this.audioPlayerPromise = null;
487
+ }
488
+ }
489
+ } catch (error) {
490
+ console.error('Error stopping audio playback:', error);
491
+ }
492
+ }
493
+
494
+ // Barge-in handling - DEVRE DIŞI
495
+ performBargeIn(): void {
496
+ // Barge-in özelliği devre dışı bırakıldı
497
+ console.log('⚠️ Barge-in is currently disabled');
498
+
499
+ // Kullanıcıya bilgi ver
500
+ this.addSystemMessage('Barge-in feature is currently disabled.');
501
+ }
502
+
503
+ private addMessage(role: 'user' | 'assistant', text: string, error: boolean = false): void {
504
+ if (!text || text.trim().length === 0) {
505
+ return;
506
+ }
507
+
508
+ const messages = this.messagesSubject.value;
509
+ messages.push({
510
+ role,
511
+ text,
512
+ timestamp: new Date(),
513
+ error
514
+ });
515
+ this.messagesSubject.next([...messages]);
516
+ }
517
+
518
+ private addSystemMessage(text: string): void {
519
+ console.log(`📢 System: ${text}`);
520
+ const messages = this.messagesSubject.value;
521
+ messages.push({
522
+ role: 'system',
523
+ text,
524
+ timestamp: new Date()
525
+ });
526
+ this.messagesSubject.next([...messages]);
527
+ }
528
+
529
+ private handleTTSAudio(message: any): void {
530
+ try {
531
+ // Validate audio data
532
+ if (!message['data']) {
533
+ console.warn('❌ TTS audio message missing data');
534
+ return;
535
+ }
536
+
537
+ // Detailed log
538
+ console.log('🎵 TTS chunk received:', {
539
+ chunkIndex: message['chunk_index'],
540
+ totalChunks: message['total_chunks'],
541
+ dataLength: message['data'].length,
542
+ dataPreview: message['data'].substring(0, 50) + '...',
543
+ isLast: message['is_last'],
544
+ mimeType: message['mime_type']
545
+ });
546
+
547
+ // Accumulate audio chunks (already base64)
548
+ this.audioQueue.push(message['data']);
549
+ console.log(`📦 Audio queue size: ${this.audioQueue.length} chunks`);
550
+
551
+ if (message['is_last']) {
552
+ console.log('🔧 Processing final audio chunk...');
553
+
554
+ try {
555
+ // All chunks received, combine and create audio blob
556
+ const combinedBase64 = this.audioQueue.join('');
557
+ console.log('✅ Combined audio data:', {
558
+ totalLength: combinedBase64.length,
559
+ queueSize: this.audioQueue.length,
560
+ preview: combinedBase64.substring(0, 100) + '...'
561
+ });
562
+
563
+ // Validate base64
564
+ console.log('🔍 Validating base64...');
565
+ if (!this.isValidBase64(combinedBase64)) {
566
+ throw new Error('Invalid base64 data received');
567
+ }
568
+ console.log(' Base64 validation passed');
569
+
570
+ const audioBlob = this.base64ToBlob(combinedBase64, message['mime_type'] || 'audio/mpeg');
571
+ const audioUrl = URL.createObjectURL(audioBlob);
572
+ console.log('🎧 Audio URL created:', audioUrl);
573
+
574
+ // Update last message with audio URL
575
+ const messages = this.messagesSubject.value;
576
+ if (messages.length > 0) {
577
+ const lastAssistantMessageIndex = this.findLastAssistantMessageIndex(messages);
578
+ if (lastAssistantMessageIndex >= 0) {
579
+ messages[lastAssistantMessageIndex].audioUrl = audioUrl;
580
+ this.messagesSubject.next([...messages]);
581
+ console.log('✅ Audio URL attached to assistant message at index:', lastAssistantMessageIndex);
582
+
583
+ // Auto-play if it's welcome message or if in playing_audio state
584
+ const isWelcomeMessage = messages[lastAssistantMessageIndex].text &&
585
+ messages[lastAssistantMessageIndex].timestamp &&
586
+ (new Date().getTime() - messages[lastAssistantMessageIndex].timestamp.getTime()) < 10000; // 10 saniye içinde
587
+
588
+ if (isWelcomeMessage || this.currentStateSubject.value === 'playing_audio') {
589
+ setTimeout(() => {
590
+ console.log('🎵 Auto-playing audio for welcome message');
591
+ this.playAudio(audioUrl);
592
+ }, 500);
593
+ }
594
+ } else {
595
+ console.warn('⚠️ No assistant message found to attach audio');
596
+ }
597
+ }
598
+
599
+ // Clear queue
600
+ this.audioQueue = [];
601
+ console.log('🧹 Audio queue cleared');
602
+
603
+ console.log(' Audio processing completed successfully');
604
+ } catch (error) {
605
+ console.error('❌ Error creating audio blob:', error);
606
+ console.error('Queue size was:', this.audioQueue.length);
607
+ this.audioQueue = [];
608
+ }
609
+ }
610
+ } catch (error) {
611
+ console.error('❌ Error handling TTS audio:', error);
612
+ this.audioQueue = []; // Clear queue on error
613
+ }
614
+ }
615
+
616
+ private findLastAssistantMessageIndex(messages: ConversationMessage[]): number {
617
+ for (let i = messages.length - 1; i >= 0; i--) {
618
+ if (messages[i].role === 'assistant') {
619
+ return i;
620
+ }
621
+ }
622
+ return -1;
623
+ }
624
+
625
+ private isValidBase64(str: string): boolean {
626
+ try {
627
+ console.log(`🔍 Checking base64 validity for ${str.length} chars`);
628
+
629
+ // Check if string contains only valid base64 characters
630
+ const base64Regex = /^[A-Za-z0-9+/]*={0,2}$/;
631
+ if (!base64Regex.test(str)) {
632
+ console.error('❌ Base64 regex test failed');
633
+ return false;
634
+ }
635
+
636
+ // Try to decode to verify
637
+ const decoded = atob(str);
638
+ console.log(`✅ Base64 decode successful, decoded length: ${decoded.length}`);
639
+ return true;
640
+ } catch (e) {
641
+ console.error('❌ Base64 validation error:', e);
642
+ return false;
643
+ }
644
+ }
645
+
646
+ private base64ToBlob(base64: string, mimeType: string): Blob {
647
+ try {
648
+ console.log('🔄 Converting base64 to blob:', {
649
+ base64Length: base64.length,
650
+ mimeType: mimeType
651
+ });
652
+
653
+ const byteCharacters = atob(base64);
654
+ console.log(`📊 Decoded to ${byteCharacters.length} bytes`);
655
+
656
+ const byteNumbers = new Array(byteCharacters.length);
657
+
658
+ for (let i = 0; i < byteCharacters.length; i++) {
659
+ byteNumbers[i] = byteCharacters.charCodeAt(i);
660
+ }
661
+
662
+ const byteArray = new Uint8Array(byteNumbers);
663
+ const blob = new Blob([byteArray], { type: mimeType });
664
+
665
+ console.log('✅ Blob created:', {
666
+ size: blob.size,
667
+ type: blob.type,
668
+ sizeKB: (blob.size / 1024).toFixed(2) + ' KB'
669
+ });
670
+
671
+ return blob;
672
+ } catch (error) {
673
+ console.error('❌ Error converting base64 to blob:', error);
674
+ console.error('Input details:', {
675
+ base64Length: base64.length,
676
+ base64Preview: base64.substring(0, 100) + '...',
677
+ mimeType: mimeType
678
+ });
679
+ throw new Error('Failed to convert audio data');
680
+ }
681
+ }
682
+
683
+ private handleAudioError(error: any): void {
684
+ const conversationError: ConversationError = {
685
+ type: error.type || 'audio',
686
+ message: error.message || 'Audio error occurred',
687
+ details: error,
688
+ timestamp: new Date()
689
+ };
690
+
691
+ this.errorSubject.next(conversationError);
692
+
693
+ // Add user-friendly message
694
+ if (error.type === 'permission') {
695
+ this.addSystemMessage('Microphone permission denied. Please allow microphone access.');
696
+ } else if (error.type === 'device') {
697
+ this.addSystemMessage('Microphone not found or not accessible.');
698
+ } else {
699
+ this.addSystemMessage('Audio error occurred. Please check your microphone.');
700
+ }
701
+
702
+ // Update state
703
+ this.currentStateSubject.next('error');
704
+ }
705
+
706
+ private handleWebSocketError(error: any): void {
707
+ const conversationError: ConversationError = {
708
+ type: 'websocket',
709
+ message: error.message || 'WebSocket error occurred',
710
+ details: error,
711
+ timestamp: new Date()
712
+ };
713
+
714
+ this.errorSubject.next(conversationError);
715
+ this.addSystemMessage('Connection error. Please check your internet connection.');
716
+
717
+ // Don't set error state for temporary connection issues
718
+ if (this.wsService.getReconnectionInfo().isReconnecting) {
719
+ this.addSystemMessage('Attempting to reconnect...');
720
+ } else {
721
+ this.currentStateSubject.next('error');
722
+ }
723
+ }
724
+
725
+ private handleServerError(message: any): void {
726
+ const errorType = message['error_type'] || 'unknown';
727
+ const errorMessage = message['message'] || 'Server error occurred';
728
+
729
+ const conversationError: ConversationError = {
730
+ type: errorType === 'race_condition' ? 'network' : 'unknown',
731
+ message: errorMessage,
732
+ details: message,
733
+ timestamp: new Date()
734
+ };
735
+
736
+ this.errorSubject.next(conversationError);
737
+
738
+ // STT initialization hatası için özel handling
739
+ if (errorType === 'stt_init_failed') {
740
+ this.addSystemMessage('Speech recognition service failed to initialize. Please check your configuration.');
741
+ // Konuşmayı durdur
742
+ this.stopConversation();
743
+ } else if (errorType === 'race_condition') {
744
+ this.addSystemMessage('Session conflict detected. Please restart the conversation.');
745
+ } else if (errorType === 'stt_error') {
746
+ this.addSystemMessage('Speech recognition error. Please try speaking again.');
747
+ // STT hatası durumunda yeniden başlatmayı dene
748
+ if (errorMessage.includes('Streaming not started')) {
749
+ this.addSystemMessage('Restarting speech recognition...');
750
+ // WebSocket'e restart sinyali gönder
751
+ if (this.wsService.isConnected()) {
752
+ this.wsService.sendControl('restart_stt');
753
+ }
754
+ }
755
+ } else if (errorType === 'tts_error') {
756
+ this.addSystemMessage('Text-to-speech error. Response will be shown as text only.');
757
+ } else {
758
+ this.addSystemMessage(`Error: ${errorMessage}`);
759
+ }
760
+ }
761
+
762
+ private determineErrorType(error: any): ConversationError['type'] {
763
+ if (error.type) {
764
+ return error.type;
765
+ }
766
+
767
+ if (error.message?.includes('WebSocket') || error.message?.includes('connection')) {
768
+ return 'websocket';
769
+ }
770
+
771
+ if (error.message?.includes('microphone') || error.message?.includes('audio')) {
772
+ return 'audio';
773
+ }
774
+
775
+ if (error.message?.includes('permission')) {
776
+ return 'permission';
777
+ }
778
+
779
+ if (error.message?.includes('network') || error.status === 0) {
780
+ return 'network';
781
+ }
782
+
783
+ return 'unknown';
784
+ }
785
+
786
+ private cleanup(): void {
787
+ try {
788
+ this.subscriptions.unsubscribe();
789
+ this.subscriptions = new Subscription();
790
+
791
+ // Audio recording'i kesinlikle durdur
792
+ if (this.audioService.isRecording()) {
793
+ this.audioService.stopRecording();
794
+ }
795
+
796
+ this.wsService.disconnect();
797
+ this.stopAudioPlayback();
798
+
799
+ if (this.audioPlayer) {
800
+ this.audioPlayer = null;
801
+ }
802
+
803
+ this.audioQueue = [];
804
+ this.isInterrupting = false;
805
+ this.currentStateSubject.next('idle');
806
+ this.sttReadySubject.complete();
807
+
808
+ console.log('🧹 Conversation cleaned up');
809
+ } catch (error) {
810
+ console.error('Error during cleanup:', error);
811
+ }
812
+ }
813
+
814
+ // Public methods for UI
815
+ getCurrentState(): ConversationState {
816
+ return this.currentStateSubject.value;
817
+ }
818
+
819
+ getMessages(): ConversationMessage[] {
820
+ return this.messagesSubject.value;
821
+ }
822
+
823
+ clearMessages(): void {
824
+ this.messagesSubject.next([]);
825
+ this.transcriptionSubject.next('');
826
+ }
827
+
828
+ updateConfig(config: Partial<ConversationConfig>): void {
829
+ this.conversationConfig = { ...this.conversationConfig, ...config };
830
+
831
+ // Send config update if connected
832
+ if (this.wsService.isConnected()) {
833
+ try {
834
+ this.wsService.sendControl('update_config', config);
835
+ } catch (error) {
836
+ console.error('Failed to update config:', error);
837
+ }
838
+ }
839
+ }
840
+
841
+ getConfig(): ConversationConfig {
842
+ return { ...this.conversationConfig };
843
+ }
844
+
845
+ isConnected(): boolean {
846
+ return this.wsService.isConnected();
847
+ }
848
+
849
+ // Retry connection
850
+ async retryConnection(): Promise<void> {
851
+ if (!this.sessionId) {
852
+ throw new Error('No session ID available for retry');
853
+ }
854
+
855
+ this.currentStateSubject.next('idle');
856
+ await this.startConversation(this.sessionId, this.conversationConfig);
857
+ }
858
  }