File size: 22,377 Bytes
2e813e6
 
 
d7edecf
2e813e6
 
 
 
3cc7c13
d7edecf
2e813e6
d7edecf
2e813e6
 
 
 
 
 
 
 
 
d7edecf
2e813e6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d7edecf
 
 
 
 
 
2e813e6
 
 
d7edecf
 
 
2e813e6
 
 
 
 
 
 
 
d7edecf
 
 
 
 
 
2e813e6
 
 
 
 
 
 
 
 
 
 
 
 
 
d7edecf
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2e813e6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3cc7c13
 
 
 
 
 
2e813e6
3cc7c13
2e813e6
 
 
 
 
 
 
 
 
 
 
d7edecf
 
 
 
3cc7c13
 
 
 
 
 
2e813e6
 
 
 
 
 
 
 
3cc7c13
2e813e6
 
3cc7c13
2e813e6
 
 
 
 
d7edecf
2e813e6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d7edecf
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2e813e6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d7edecf
 
 
 
 
 
 
 
2e813e6
d7edecf
2e813e6
3cc7c13
2e813e6
 
3cc7c13
2e813e6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d7edecf
 
 
 
 
 
 
 
 
3cc7c13
2e813e6
d7edecf
2e813e6
3cc7c13
2e813e6
 
3cc7c13
2e813e6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3cc7c13
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d7edecf
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2e813e6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d7edecf
2e813e6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
// lib/services/clip_queue/clip_queue_manager.dart

import 'dart:async';
import 'dart:math';
import 'package:aitube2/config/config.dart';
import 'package:flutter/foundation.dart';
import 'package:collection/collection.dart';
import '../../models/video_result.dart';
import '../../models/video_orientation.dart';
import '../../models/chat_message.dart';
import '../websocket_api_service.dart';
import '../chat_service.dart';
import '../../utils/seed.dart';
import 'clip_states.dart';
import 'video_clip.dart';
import 'queue_stats_logger.dart';
import 'clip_generation_handler.dart';

/// Manages a queue of video clips for generation and playback
class ClipQueueManager {
  /// The video for which clips are being generated
  VideoResult video;
  
  /// WebSocket service for API communication
  final WebSocketApiService _websocketService;
  
  /// Callback for when the queue is updated
  final void Function()? onQueueUpdated;
  
  /// Buffer of clips being managed
  final List<VideoClip> _clipBuffer = [];
  
  /// History of played clips
  final List<VideoClip> _clipHistory = [];
  
  /// Set of active generations (by seed)
  final Set<String> _activeGenerations = {};
  
  /// Timer for checking the buffer state
  Timer? _bufferCheckTimer;
  
  /// Timer for evolving the description
  Timer? _descriptionEvolutionTimer;
  
  /// Last time the description was evolved
  DateTime _lastDescriptionEvolutionTime = DateTime.now();
  
  /// Whether the manager is disposed
  bool _isDisposed = false;
  
  /// Whether the simulation is paused (controlled by video playback)
  bool _isSimulationPaused = false;
  
  /// Stats logger
  final QueueStatsLogger _logger = QueueStatsLogger();
  
  /// Generation handler
  late final ClipGenerationHandler _generationHandler;
  
  /// ID of the video being managed
  final String videoId;
  
  /// Evolution counter for tracking how many times we've evolved the description
  int _evolutionCounter = 0;
  
  /// Recent chat messages to include in description evolution
  final List<ChatMessage> _recentChatMessages = [];

  /// Constructor
  ClipQueueManager({
    required this.video,
    WebSocketApiService? websocketService,
    this.onQueueUpdated,
  }) : videoId = video.id,
       _websocketService = websocketService ?? WebSocketApiService() {
    _generationHandler = ClipGenerationHandler(
      websocketService: _websocketService,
      logger: _logger,
      activeGenerations: _activeGenerations,
      onQueueUpdated: onQueueUpdated,
    );
    
    // Start listening to chat messages
    final chatService = ChatService();
    chatService.initialize().then((_) {
      chatService.joinRoom(videoId).then((_) {
        chatService.chatStream.listen(_addChatMessage);
      }).catchError((e) {
        debugPrint('ClipQueueManager: Error joining chat room: $e');
      });
    }).catchError((e) {
      debugPrint('ClipQueueManager: Error initializing chat service: $e');
    });
  }
  
  /// Add a chat message to the recent messages list
  void _addChatMessage(ChatMessage message) {
    if (message.videoId == videoId) {
      _recentChatMessages.add(message);
      // Keep only the 10 most recent messages
      if (_recentChatMessages.length > 10) {
        _recentChatMessages.removeAt(0);
      }
      ClipQueueConstants.logEvent('Added chat message: ${message.content.substring(0, min(20, message.content.length))}...');
    }
  }

  /// Whether a new generation can be started
  bool get canStartNewGeneration => 
      _activeGenerations.length < Configuration.instance.renderQueueMaxConcurrentGenerations;
      
  /// Number of pending generations
  int get pendingGenerations => _clipBuffer.where((c) => c.isPending).length;
  
  /// Number of active generations
  int get activeGenerations => _activeGenerations.length;
  
  /// Current clip that is ready or playing
  VideoClip? get currentClip => _clipBuffer.firstWhereOrNull((c) => c.isReady || c.isPlaying);
  
  /// Next clip that is ready to play
  VideoClip? get nextReadyClip => _clipBuffer.where((c) => c.isReady && !c.isPlaying).firstOrNull;
  
  /// Whether there are any ready clips
  bool get hasReadyClips => _clipBuffer.any((c) => c.isReady);
  
  /// Unmodifiable view of the clip buffer
  List<VideoClip> get clipBuffer => List.unmodifiable(_clipBuffer);
  
  /// Unmodifiable view of the clip history
  List<VideoClip> get clipHistory => List.unmodifiable(_clipHistory);

  /// Current orientation of clips being generated
  VideoOrientation _currentOrientation = VideoOrientation.LANDSCAPE;
  
  /// Get the current orientation
  VideoOrientation get currentOrientation => _currentOrientation;

  /// Initialize the clip queue
  Future<void> initialize({VideoOrientation? orientation}) async {
    if (_isDisposed) return;
    
    _logger.logStateChange(
      'initialize:start',
      clipBuffer: _clipBuffer,
      activeGenerations: _activeGenerations,
      clipHistory: _clipHistory,
      isDisposed: _isDisposed,
    );
    _clipBuffer.clear();
    
    // Reset evolution counter and last evolution time
    _evolutionCounter = 0;
    _lastDescriptionEvolutionTime = DateTime.now();
    
    // Set initial orientation
    _currentOrientation = orientation ?? getOrientationFromDimensions(
      Configuration.instance.originalClipWidth, 
      Configuration.instance.originalClipHeight
    );
    
    try {
      final bufferSize = Configuration.instance.renderQueueBufferSize;
      while (_clipBuffer.length < bufferSize) {
        if (_isDisposed) return;
        
        final newClip = VideoClip(
          prompt: "${video.title}\n${video.description}",
          seed: video.useFixedSeed && video.seed > 0 ? video.seed : generateSeed(),
          orientation: _currentOrientation,
        );
        _clipBuffer.add(newClip);
        ClipQueueConstants.logEvent('Added initial clip ${newClip.seed} to buffer with orientation: ${_currentOrientation.name}');
      }

      if (_isDisposed) return;

      _startBufferCheck();
      _startDescriptionEvolution();
      await _fillBuffer();
      ClipQueueConstants.logEvent('Initialization complete. Buffer size: ${_clipBuffer.length}');
      printQueueState();
    } catch (e) {
      ClipQueueConstants.logEvent('Initialization error: $e');
      rethrow;
    }

    _logger.logStateChange(
      'initialize:complete',
      clipBuffer: _clipBuffer,
      activeGenerations: _activeGenerations,
      clipHistory: _clipHistory,
      isDisposed: _isDisposed,
    );
  }

  /// Start the buffer check timer
  void _startBufferCheck() {
    _bufferCheckTimer?.cancel();
    _bufferCheckTimer = Timer.periodic(
      const Duration(milliseconds: 200),
      (timer) {
        if (!_isDisposed) {
          _fillBuffer();
        }
      },
    );
    ClipQueueConstants.logEvent('Started buffer check timer');
  }
  
  /// Start the simulation timer
  void _startDescriptionEvolution() {
    // Cancel any existing timer
    _descriptionEvolutionTimer?.cancel();
    
    // Only start if simulation frequency is greater than 0
    if (Configuration.instance.simLoopFrequencyInSec <= 0) {
      ClipQueueConstants.logEvent('Simulation disabled (frequency is 0)');
      return;
    }
    
    // Adaptive check interval - less frequent checks to reduce overhead
    final checkInterval = max(3, Configuration.instance.simLoopFrequencyInSec ~/ 3);
    
    ClipQueueConstants.logEvent('Starting simulation with check interval of $checkInterval seconds');
    
    // Check periodically if it's time to simulate the video
    _descriptionEvolutionTimer = Timer.periodic(
      Duration(seconds: checkInterval),
      (timer) async {
        if (_isDisposed) return;
        
        // Skip if simulation is paused (due to video playback being paused)
        if (_isSimulationPaused) {
          ClipQueueConstants.logEvent('Skipping simulation because it is paused');
          return;
        }
        
        // Check if we're currently generating a video - if so, delay simulation
        final isGenerating = _activeGenerations.isNotEmpty;
        if (isGenerating) {
          ClipQueueConstants.logEvent('Delaying simulation due to active generations');
          return;
        }
        
        // Calculate time since last simulation
        final now = DateTime.now();
        final duration = now.difference(_lastDescriptionEvolutionTime);
        
        // If we've waited long enough, simulate the video
        if (duration.inSeconds >= Configuration.instance.simLoopFrequencyInSec) {
          ClipQueueConstants.logEvent('Triggering simulation after ${duration.inSeconds} seconds');
          await _evolveDescription();
          _lastDescriptionEvolutionTime = now;
        }
      },
    );
    ClipQueueConstants.logEvent('Started simulation timer');
  }
  
  /// Simulate the video by evolving the description using the LLM
  Future<void> _evolveDescription() async {
    if (!_websocketService.isConnected) {
      ClipQueueConstants.logEvent('Cannot simulate video: websocket not connected');
      return;
    }
    
    int retryCount = 0;
    const maxRetries = 2;
    
    // Function to get chat message string
    String getChatMessagesString() {
      if (_recentChatMessages.isEmpty) return '';
      
      return _recentChatMessages.map((msg) => 
        "${msg.username}: ${msg.content}"
      ).join("\n");
    }
    
    while (retryCount <= maxRetries) {
      try {
        // Format recent chat messages as a string for the simulation prompt
        String chatMessagesString = getChatMessagesString();
        if (chatMessagesString.isNotEmpty) {
          ClipQueueConstants.logEvent('Including ${_recentChatMessages.length} chat messages in simulation');
        }
        
        // Use the WebSocketService to simulate the video
        final result = await _websocketService.simulate(
          videoId: video.id,
          originalTitle: video.title,
          originalDescription: video.description,
          currentDescription: video.evolvedDescription.isEmpty ? video.description : video.evolvedDescription,
          condensedHistory: video.condensedHistory,
          evolutionCount: _evolutionCounter,
          chatMessages: chatMessagesString,
        );
        
        // Update the video with the evolved description
        video = video.copyWith(
          evolvedDescription: result['evolved_description'],
          condensedHistory: result['condensed_history'],
        );
        
        _evolutionCounter++;
        ClipQueueConstants.logEvent('Video simulated (iteration $_evolutionCounter)');
        onQueueUpdated?.call();
        
        // Success, exit retry loop
        break;
      } catch (e) {
        retryCount++;
        ClipQueueConstants.logEvent('Error simulating video attempt $retryCount/$maxRetries: $e');
        
        if (retryCount <= maxRetries) {
          // Wait before retrying with exponential backoff
          final delay = Duration(seconds: 1 << retryCount);
          ClipQueueConstants.logEvent('Retrying simulation in ${delay.inSeconds} seconds...');
          await Future.delayed(delay);
        } else {
          ClipQueueConstants.logEvent('Failed to simulate video after $maxRetries attempts');
          
          // If we've been successful before but failed now, we can continue using the last evolved description
          if (_evolutionCounter > 0) {
            ClipQueueConstants.logEvent('Continuing with previous description');
          }
        }
      }
    }
  }

  /// Mark a specific clip as played
  void markClipAsPlayed(String clipId) {
    _logger.logStateChange(
      'markAsPlayed:start',
      clipBuffer: _clipBuffer,
      activeGenerations: _activeGenerations,
      clipHistory: _clipHistory,
      isDisposed: _isDisposed,
    );
    final playingClip = _clipBuffer.firstWhereOrNull((c) => c.id == clipId);
    if (playingClip != null) {
      playingClip.finishPlaying();
      
      _reorderBufferByPriority();
      _fillBuffer();
      onQueueUpdated?.call();
    }
    _logger.logStateChange(
      'markAsPlayed:complete',
      clipBuffer: _clipBuffer,
      activeGenerations: _activeGenerations,
      clipHistory: _clipHistory,
      isDisposed: _isDisposed,
    );
  }

  /// Fill the buffer with clips and start generations as needed
  Future<void> _fillBuffer() async {
    if (_isDisposed) return;

    // First ensure we have the correct buffer size
    while (_clipBuffer.length < Configuration.instance.renderQueueBufferSize) {
      // Determine which description to use for the prompt
      String descriptionToUse = video.description;
      
      // If we have an evolved description, use that instead
      if (video.evolvedDescription.isNotEmpty) {
        descriptionToUse = video.evolvedDescription;
      }
      
      final newClip = VideoClip(
        prompt: "${video.title}\n${descriptionToUse}",
        seed: video.useFixedSeed && video.seed > 0 ? video.seed : generateSeed(),
        orientation: _currentOrientation,
      );
      _clipBuffer.add(newClip);
      ClipQueueConstants.logEvent('Added new clip ${newClip.seed} with orientation ${_currentOrientation.name} to maintain buffer size');
    }

    // Process played clips first
    final playedClips = _clipBuffer.where((clip) => clip.hasPlayed).toList();
    if (playedClips.isNotEmpty) {
      _processPlayedClips(playedClips);
    }
  
    // Remove failed clips and replace them
    final failedClips = _clipBuffer.where((clip) => clip.hasFailed && !clip.canRetry).toList();
    for (final clip in failedClips) {
      _clipBuffer.remove(clip);
      final newClip = VideoClip(
        prompt: "${video.title}\n${video.description}",
        seed: video.useFixedSeed && video.seed > 0 ? video.seed : generateSeed(),
      );
      _clipBuffer.add(newClip);
    }

    // Clean up stuck generations
    _generationHandler.checkForStuckGenerations(_clipBuffer);

    // Get pending clips that aren't being generated
    final pendingClips = _clipBuffer
        .where((clip) => clip.isPending && !_activeGenerations.contains(clip.seed.toString()))
        .toList();

    // Calculate available generation slots
    final availableSlots = Configuration.instance.renderQueueMaxConcurrentGenerations - _activeGenerations.length;

    if (availableSlots > 0 && pendingClips.isNotEmpty) {
      final clipsToGenerate = pendingClips.take(availableSlots).toList();
      ClipQueueConstants.logEvent('Starting ${clipsToGenerate.length} parallel generations');

      final generationFutures = clipsToGenerate.map((clip) => 
        _generationHandler.generateClip(clip, video).catchError((e) {
          debugPrint('Generation failed for clip ${clip.seed}: $e');
          return null;
        })
      ).toList();

      ClipQueueConstants.unawaited(
        Future.wait(generationFutures, eagerError: false).then((_) {
          if (!_isDisposed) {
            onQueueUpdated?.call();
            // Recursively ensure buffer stays full
            _fillBuffer();
          }
        })
      );
    }

    onQueueUpdated?.call();

    _logger.logStateChange(
      'fillBuffer:complete',
      clipBuffer: _clipBuffer,
      activeGenerations: _activeGenerations,
      clipHistory: _clipHistory,
      isDisposed: _isDisposed,
    );
  }

  /// Reorder the buffer by priority
  void _reorderBufferByPriority() {
    // First, extract all clips that aren't played
    final activeClips = _clipBuffer.where((c) => !c.hasPlayed).toList();
    
    // Sort clips by priority:
    // 1. Currently playing clips stay at their position
    // 2. Ready clips move to the front (right after playing clips)
    // 3. In-progress generations
    // 4. Pending generations
    // 5. Failed generations
    activeClips.sort((a, b) {
      // Helper function to get priority value for a state
      int getPriority(ClipState state) {
        switch (state) {
          case ClipState.generatedAndPlaying:
            return 0;
          case ClipState.generatedAndReadyToPlay:
            return 1;
          case ClipState.generationInProgress:
            return 2;
          case ClipState.generationPending:
            return 3;
          case ClipState.failedToGenerate:
            return 4;
          case ClipState.generatedAndPlayed:
            return 5;
        }
      }

      // Compare priorities
      final priorityA = getPriority(a.state);
      final priorityB = getPriority(b.state);
      
      if (priorityA != priorityB) {
        return priorityA.compareTo(priorityB);
      }
      
      // If same priority, maintain relative order by keeping original indices
      return _clipBuffer.indexOf(a).compareTo(_clipBuffer.indexOf(b));
    });

    // Clear and refill the buffer with the sorted clips
    _clipBuffer.clear();
    _clipBuffer.addAll(activeClips);
  }

  /// Process clips that have been played
  void _processPlayedClips(List<VideoClip> playedClips) {
    for (final clip in playedClips) {
      _clipBuffer.remove(clip);
      _clipHistory.add(clip);
      
      // Determine which description to use for the prompt
      String descriptionToUse = video.description;
      
      // If we have an evolved description, use that instead
      if (video.evolvedDescription.isNotEmpty) {
        descriptionToUse = video.evolvedDescription;
        ClipQueueConstants.logEvent('Using evolved description for new clip (evolution #$_evolutionCounter)');
      }
      
      // Add a new pending clip with current orientation
      final newClip = VideoClip(
        prompt: "${video.title}\n${descriptionToUse}",
        seed: video.useFixedSeed && video.seed > 0 ? video.seed : generateSeed(),
        orientation: _currentOrientation,
      );
      _clipBuffer.add(newClip);
      ClipQueueConstants.logEvent('Replaced played clip ${clip.seed} with new clip ${newClip.seed} using orientation ${_currentOrientation.name}');
    }
    
    // Immediately trigger buffer fill to start generating new clips
    _fillBuffer();
  }

  /// Mark the current playing clip as played
  void markCurrentClipAsPlayed() {
    _logger.logStateChange(
      'markAsPlayed:start',
      clipBuffer: _clipBuffer,
      activeGenerations: _activeGenerations,
      clipHistory: _clipHistory,
      isDisposed: _isDisposed,
    );
    final playingClip = _clipBuffer.firstWhereOrNull((c) => c.isPlaying);
    if (playingClip != null) {
      playingClip.finishPlaying();
      
      _reorderBufferByPriority();
      _fillBuffer();
      onQueueUpdated?.call();
    }
    _logger.logStateChange(
      'markAsPlayed:complete',
      clipBuffer: _clipBuffer,
      activeGenerations: _activeGenerations,
      clipHistory: _clipHistory,
      isDisposed: _isDisposed,
    );
  }

  /// Start playing a specific clip
  void startPlayingClip(VideoClip clip) {
    _logger.logStateChange(
      'startPlaying:start',
      clipBuffer: _clipBuffer,
      activeGenerations: _activeGenerations,
      clipHistory: _clipHistory,
      isDisposed: _isDisposed,
    );
    if (clip.isReady) {
      clip.startPlaying();
      onQueueUpdated?.call();
    }
    _logger.logStateChange(
      'startPlaying:complete',
      clipBuffer: _clipBuffer,
      activeGenerations: _activeGenerations,
      clipHistory: _clipHistory,
      isDisposed: _isDisposed,
    );
  }

  /// Manually fill the buffer
  void fillBuffer() {
    ClipQueueConstants.logEvent('Manual buffer fill requested');
    _fillBuffer();
  }
  
  /// Handle orientation change
  Future<void> updateOrientation(VideoOrientation newOrientation) async {
    if (_currentOrientation == newOrientation) {
      ClipQueueConstants.logEvent('Orientation unchanged: ${newOrientation.name}');
      return;
    }
    
    ClipQueueConstants.logEvent('Orientation changed from ${_currentOrientation.name} to ${newOrientation.name}');
    _currentOrientation = newOrientation;
    
    // Cancel any active generations
    for (var clipSeed in _activeGenerations.toList()) {
      _activeGenerations.remove(clipSeed);
    }
    
    // Clear buffer and history
    _clipBuffer.clear();
    _clipHistory.clear();
    
    // Re-initialize the queue with the new orientation
    await initialize(orientation: newOrientation);
    
    // Notify listeners
    onQueueUpdated?.call();
  }
  
  /// Set the simulation pause state based on video playback
  void setSimulationPaused(bool isPaused) {
    if (_isSimulationPaused == isPaused) return;
    
    _isSimulationPaused = isPaused;
    ClipQueueConstants.logEvent(
      isPaused 
        ? 'Simulation paused (video playback paused)' 
        : 'Simulation resumed (video playback resumed)'
    );
    
    // If we're resuming after a pause, update the last evolution time
    // to avoid immediate evolution after resuming
    if (!isPaused) {
      _lastDescriptionEvolutionTime = DateTime.now();
    }
  }

  /// Print the current state of the queue
  void printQueueState() {
    _logger.printQueueState(
      clipBuffer: _clipBuffer,
      activeGenerations: _activeGenerations,
      clipHistory: _clipHistory,
    );
  }

  /// Get statistics for the buffer
  Map<String, dynamic> getBufferStats() {
    return _logger.getBufferStats(
      clipBuffer: _clipBuffer,
      clipHistory: _clipHistory,
      activeGenerations: _activeGenerations,
    );
  }

  /// Dispose the manager and clean up resources
  Future<void> dispose() async {
    debugPrint('ClipQueueManager: Starting disposal for video $videoId');
    _isDisposed = true;
    _generationHandler.isDisposed = true;

    // Cancel all timers first
    _bufferCheckTimer?.cancel();
    _descriptionEvolutionTimer?.cancel();
    
    // Complete any pending generation completers
    for (var clip in _clipBuffer) {
      clip.retryTimer?.cancel();
      
      if (clip.isGenerating && 
          clip.generationCompleter != null && 
          !clip.generationCompleter!.isCompleted) {
        // Don't throw an error, just complete normally
        clip.generationCompleter!.complete();
      }
    }

    // Cancel any pending requests for this video
    if (videoId.isNotEmpty) {
      _websocketService.cancelRequestsForVideo(videoId);
    }

    // Clear all collections
    _clipBuffer.clear();
    _clipHistory.clear();
    _activeGenerations.clear();

    debugPrint('ClipQueueManager: Completed disposal for video $videoId');
  }
}