+
@@ -423,42 +334,31 @@
// --- Global state variables ---
let apiKey = localStorage.getItem('googleTTSApiKey') || '';
let currentText = '';
- let selectedVoice = null;
+ let currentNotes = '';
+ let currentBookmarks = [];
+ let selectedVoiceObject = null; // Stores the full voice object from API
let availableVoices = [];
let isPlaying = false;
let isPaused = false;
let audioContext = null;
- let audioSource = null; // BufferSourceNode for playing audio
- let audioBuffer = null; // Holds the decoded audio data (potentially concatenated)
- let playbackStartTime = 0; // audioContext.currentTime when playback started/resumed
- let pausedAtTime = 0; // Time within the audioBuffer where pause occurred
- let currentAudioBlob = null; // Blob of the full synthesized audio for download
- let currentReadingPosition = 0; // Character position for reading highlight
- // No estimatedDuration, will rely on audioBuffer.duration
-
- const MAX_CHUNK_SIZE = 4800; // Reduced slightly for safety with UTF-8 characters (Google TTS limit is 5000 bytes)
- const PRICING = {
- standard: 4.00,
- wavenet: 16.00,
- neural2: 16.00,
- studio: 160.00
- };
- const defaultStandardMaleVoices = [ // Prioritized list for default voice selection
- 'en-US-Standard-D', 'en-US-Standard-B', 'en-US-Standard-A', 'en-US-Standard-I',
- 'en-GB-Standard-B', 'en-GB-Standard-D', 'en-GB-Standard-A',
- 'en-AU-Standard-B', 'en-AU-Standard-D', 'en-AU-Standard-A'
- ];
-
- // --- Global error handlers ---
- window.addEventListener('error', (e) => {
- console.error('Global error caught:', e.error);
- showToast(`An unexpected error occurred: ${e.message}`, 'error', 5000);
- });
- window.addEventListener('unhandledrejection', (e) => {
- console.error('Unhandled promise rejection:', e.reason);
- showToast(`An unhandled promise error occurred: ${e.reason.message || e.reason}`, 'error', 5000);
- e.preventDefault();
- });
+ let audioSource = null;
+ let audioBuffer = null; // Holds the fully decoded and (if needed) concatenated audio
+ let playbackStartTime = 0;
+ let pausedAtTime = 0;
+ let currentAudioBlob = null; // Holds the raw MP3 blob for download
+ let currentReadingPosition = 0;
+
+ let isSynthesizing = false; // NEW: Tracks if synthesis is in progress
+ let synthesisCancelled = false; // NEW: Flag to signal cancellation
+
+ const MAX_CHUNK_SIZE = 4800;
+ const PRICING = { standard: 4.00, wavenet: 16.00, neural2: 16.00, studio: 160.00 };
+ const defaultStandardMaleVoices = [ /* ... existing ... */ ];
+ const LIBRARY_STORAGE_KEY = 'ttsReaderLibrary';
+
+ // --- Global error handlers (existing) ---
+ window.addEventListener('error', (e) => { console.error('Global error:', e.error); showToast(`Unexpected error: ${e.message}`, 'error'); });
+ window.addEventListener('unhandledrejection', (e) => { console.error('Unhandled rejection:', e.reason); showToast(`Unhandled error: ${e.reason.message || e.reason}`, 'error'); e.preventDefault(); });
// --- DOM Elements ---
const apiKeyInput = document.getElementById('apiKey');
@@ -467,753 +367,465 @@
const pasteBtn = document.getElementById('pasteBtn');
const dropzone = document.getElementById('dropzone');
const fileInput = document.getElementById('fileInput');
- const documentContent = document.getElementById('documentContent');
+ const documentContentEl = document.getElementById('documentContent');
const charCount = document.getElementById('charCount');
const refreshVoicesBtn = document.getElementById('refreshVoicesBtn');
const languageSelect = document.getElementById('languageSelect');
const rateSelect = document.getElementById('rateSelect');
const pitchSelect = document.getElementById('pitchSelect');
- const modelSelect = document.getElementById('modelSelect');
- const playBtn = document.getElementById('playBtn');
- const pauseBtn = document.getElementById('pauseBtn');
+ const modelSelect = document.getElementById('modelSelect'); // Filters voices for voiceSelectorDropdown
+ const voiceSelectorDropdown = document.getElementById('voiceSelectorDropdown'); // NEW: For specific voice selection
+
+ const synthesizeBtn = document.getElementById('synthesizeBtn'); // NEW: Triggers synthesis
+ const playAudioBtn = document.getElementById('playAudioBtn'); // RENAMED from playBtn, handles playback
const stopBtn = document.getElementById('stopBtn');
const downloadBtn = document.getElementById('downloadBtn');
+
const currentVoiceName = document.getElementById('currentVoiceName');
const currentVoiceDetails = document.getElementById('currentVoiceDetails');
const playVoiceSample = document.getElementById('playVoiceSample');
- const synthesisStatus = document.getElementById('synthesisStatus');
+
+ const synthesisActionStatus = document.getElementById('synthesisActionStatus'); // RENAMED from synthesisStatus
const currentTimeEl = document.getElementById('currentTime');
const totalTimeEl = document.getElementById('totalTime');
- const progressContainer = document.getElementById('progressContainer');
- const progressBar = document.getElementById('progressBar');
- const readingProgress = document.getElementById('readingProgress');
+ const progressContainer = document.getElementById('progressContainer'); // Playback progress bar container
+ const progressBar = document.getElementById('progressBar'); // Playback progress bar fill
+ const readingProgress = document.getElementById('readingProgress'); // Text highlight progress
const currentPositionMarker = document.getElementById('currentPositionMarker');
- const synthesisProgressEl = document.getElementById('synthesisProgress');
- const progressPercentageEl = document.getElementById('progressPercentage');
+
+ const synthesisProgressEl = document.getElementById('synthesisProgress'); // Container for synthesis progress text & bar
+ const progressPercentageEl = document.getElementById('progressPercentage'); // Text for X%
+ const synthesisProgressBarInner = document.getElementById('synthesisProgressBarInner'); // Visual bar for synthesis
+
const darkModeToggle = document.getElementById('darkModeToggle');
const toggleKeyVisibilityBtn = document.getElementById('toggleKeyVisibility');
const eyeIcon = document.getElementById('eyeIcon');
+ const costEstimatorEl = document.getElementById('costEstimator');
+
+ const notesTextarea = document.getElementById('notesTextarea');
+ const saveToLibraryBtn = document.getElementById('saveToLibraryBtn');
+ const viewLibraryBtn = document.getElementById('viewLibraryBtn');
+ const libraryModal = document.getElementById('libraryModal');
+ const closeLibraryModalBtn = document.getElementById('closeLibraryModalBtn');
+ const libraryItemsUl = document.getElementById('libraryItems');
// --- Initialization ---
function init() {
- if (apiKey) {
- apiKeyInput.value = apiKey;
- loadVoices(); // Attempt to load voices if API key exists
- }
+ if (apiKey) { apiKeyInput.value = apiKey; loadVoices(); }
if (typeof pdfjsLib !== 'undefined') {
pdfjsLib.GlobalWorkerOptions.workerSrc = 'https://cdnjs.cloudflare.com/ajax/libs/pdf.js/2.16.105/pdf.worker.min.js';
- } else {
- console.warn('pdf.js library not found. PDF processing will not be available.');
- }
- if (typeof mammoth === 'undefined') {
- console.warn('mammoth.js library not found. DOCX processing will not be available.');
- }
-
+ } else { console.warn('pdf.js not loaded.'); }
+ if (typeof mammoth === 'undefined') { console.warn('mammoth.js not loaded.'); }
- // Initialize AudioContext on first user interaction to comply with autoplay policies
document.body.addEventListener('click', initAudioContext, { once: true });
document.body.addEventListener('keydown', initAudioContext, { once: true });
-
toggleKeyVisibilityBtn.addEventListener('click', () => {
- if (apiKeyInput.type === 'password') {
- apiKeyInput.type = 'text';
- eyeIcon.className = 'fas fa-eye-slash';
- } else {
- apiKeyInput.type = 'password';
- eyeIcon.className = 'fas fa-eye';
- }
+ apiKeyInput.type = apiKeyInput.type === 'password' ? 'text' : 'password';
+ eyeIcon.className = apiKeyInput.type === 'password' ? 'fas fa-eye' : 'fas fa-eye-slash';
});
initDarkMode();
- initProgressIndicator();
+ initProgressIndicator(); // For playback seeking
initDownloadFeature();
- modelSelect.addEventListener('change', (e) => {
- updateCostEstimator();
- autoSelectVoiceForModel(e.target.value);
- });
- rateSelect.addEventListener('change', updateCostEstimator);
+ // Voice selection chain: Language -> Model -> Voice Dropdown
+ languageSelect.addEventListener('change', loadVoices);
+ modelSelect.addEventListener('change', populateVoiceSelectorDropdown); // Model change repopulates specific voices
+ voiceSelectorDropdown.addEventListener('change', handleVoiceSelectionChange); // User picks a specific voice
+
+ rateSelect.addEventListener('change', updateCostEstimator); // Cost might depend on features tied to rate (though not directly for Google basic)
pitchSelect.addEventListener('change', updateCostEstimator);
- languageSelect.addEventListener('change', loadVoices); // Reload voices on language change
- document.getElementById('showCostBreakdown')?.addEventListener('click', showCostBreakdownModal);
+
playVoiceSample.addEventListener('click', () => {
- if (selectedVoice) playSample(selectedVoice);
- else showToast('Please select a voice first.', 'info');
+ if (selectedVoiceObject) playSample(selectedVoiceObject); else showToast('Select a voice first.', 'info');
});
- // Setup main playback controls
- playBtn.addEventListener('click', handlePlayPause);
- pauseBtn.addEventListener('click', pausePlayback);
- stopBtn.addEventListener('click', stopPlayback);
+ // NEW Synthesis and Playback buttons
+ synthesizeBtn.addEventListener('click', startFullSynthesis);
+ playAudioBtn.addEventListener('click', handlePlayPauseAudio); // For playing already synthesized audio
+ stopBtn.addEventListener('click', handleStopAll);
- // File handling listeners
+ // File and text input
uploadBtn.addEventListener('click', () => fileInput.click());
- fileInput.addEventListener('change', (e) => {
- if (e.target.files.length > 0) handleFile(e.target.files[0]);
- });
+ fileInput.addEventListener('change', (e) => { if (e.target.files.length > 0) handleFile(e.target.files[0]); });
pasteBtn.addEventListener('click', handlePaste);
setupDragAndDrop();
refreshVoicesBtn.addEventListener('click', loadVoices);
saveKeyBtn.addEventListener('click', saveApiKey);
- updatePlaybackButtons(); // Initial state
- updateSynthesisStatus(); // Initial state
- }
-
- // --- Audio Context ---
- function initAudioContext() {
- if (!audioContext) {
- try {
- audioContext = new (window.AudioContext || window.webkitAudioContext)();
- if (audioContext.state === 'suspended') {
- audioContext.resume();
- }
- console.log('AudioContext initialized.');
- } catch (e) {
- console.error('Web Audio API not supported or could not be initialized.', e);
- showToast('Your browser does not support the Web Audio API. Playback may not work.', 'error', 5000);
- }
- }
- return audioContext;
- }
-
-
- // --- Dark Mode ---
- function initDarkMode() {
- const savedTheme = localStorage.getItem('theme');
- const systemPrefersDark = window.matchMedia('(prefers-color-scheme: dark)').matches;
- const root = document.documentElement;
- const icon = darkModeToggle.querySelector('i');
-
- if (savedTheme === 'dark' || (!savedTheme && systemPrefersDark)) {
- root.setAttribute('data-theme', 'dark');
- icon.className = 'fas fa-sun text-xl';
- } else {
- root.setAttribute('data-theme', 'light');
- icon.className = 'fas fa-moon text-xl';
- }
- darkModeToggle.addEventListener('click', toggleDarkMode);
- }
-
- function toggleDarkMode() {
- const root = document.documentElement;
- const currentTheme = root.getAttribute('data-theme');
- const newTheme = currentTheme === 'dark' ? 'light' : 'dark';
- const icon = darkModeToggle.querySelector('i');
-
- root.setAttribute('data-theme', newTheme);
- localStorage.setItem('theme', newTheme);
- icon.className = newTheme === 'dark' ? 'fas fa-sun text-xl' : 'fas fa-moon text-xl';
- }
-
- // --- Progress Indicator & Seeking ---
- function initProgressIndicator() {
- progressContainer.addEventListener('click', (e) => {
- if (!audioBuffer || !isPlaying) return; // Only seek if playing and buffer exists
-
- const rect = progressContainer.getBoundingClientRect();
- const clickX = e.clientX - rect.left;
- const percentage = Math.max(0, Math.min(1, clickX / rect.width)); // Clamp between 0 and 1
- const newTime = percentage * audioBuffer.duration;
-
- seekToTime(newTime);
- });
+ // Library and Notes
+ notesTextarea.addEventListener('input', () => currentNotes = notesTextarea.value);
+ saveToLibraryBtn.addEventListener('click', handleSaveToLibrary);
+ viewLibraryBtn.addEventListener('click', openLibraryModal);
+ closeLibraryModalBtn.addEventListener('click', closeLibraryModal);
+ libraryModal.addEventListener('click', (e) => { if (e.target === libraryModal) closeLibraryModal(); });
+ documentContentEl.addEventListener('click', handleParagraphClickForBookmark);
+
+ updateAllControlButtons();
+ updateSynthesisActionStatus();
+ renderLibraryItems();
}
- function seekToTime(time) {
- if (!audioContext || !audioSource || !audioBuffer || !isPlaying) return;
-
- // Stop current playback
- audioSource.onended = null; // Prevent stopPlayback from being called
- audioSource.stop();
-
- // Create new source and start from new time
- audioSource = audioContext.createBufferSource();
- audioSource.buffer = audioBuffer;
- audioSource.connect(audioContext.destination);
-
- pausedAtTime = time; // Store the seeked time
- playbackStartTime = audioContext.currentTime - pausedAtTime; // Adjust playbackStartTime
-
- audioSource.start(0, pausedAtTime);
+ // --- Audio Context (Existing) ---
+ function initAudioContext() { /* ... same as before ... */ }
- audioSource.onended = () => { // Re-attach onended handler
- if (isPlaying && !isPaused) { // If it naturally ends
- stopPlayback();
- }
- };
- // Update UI immediately
- updateProgressUI(pausedAtTime, audioBuffer.duration);
- }
+ // --- Dark Mode (Existing) ---
+ function initDarkMode() { /* ... same as before ... */ }
+ function toggleDarkMode() { /* ... same as before ... */ }
- // --- Download ---
- function initDownloadFeature() {
- downloadBtn.addEventListener('click', downloadCurrentAudio);
- }
+ // --- Progress Indicator & Seeking (For Playback - Existing) ---
+ function initProgressIndicator() { /* ... same as before, targets playback progress bar ... */ }
+ function seekToTime(time) { /* ... same as before, for seeking in audioBuffer ... */ }
- async function downloadCurrentAudio() {
- if (!currentAudioBlob) {
- showToast('No audio available to download. Synthesize first.', 'info');
- return;
- }
- try {
- const url = URL.createObjectURL(currentAudioBlob);
- const a = document.createElement('a');
- a.style.display = 'none';
- a.href = url;
- const timestamp = new Date().toISOString().slice(0, 19).replace(/[T:]/g, '-');
- const voiceNameSafe = selectedVoice ? selectedVoice.name.replace(/[^a-zA-Z0-9_]/g, '-') : 'unknown-voice';
- a.download = `tts-audio-${voiceNameSafe}-${timestamp}.mp3`;
- document.body.appendChild(a);
- a.click();
- document.body.removeChild(a);
- URL.revokeObjectURL(url);
- showToast('Audio download started.', 'success');
- } catch (error) {
- console.error('Download error:', error);
- showToast('Failed to download audio: ' + error.message, 'error');
- }
- }
+ // --- Download (Existing, uses currentAudioBlob) ---
+ function initDownloadFeature() { /* ... same as before ... */ }
+ async function downloadCurrentAudio() { /* ... same as before ... */ }
- // --- Toast Notifications ---
- function showToast(message, type = 'info', duration = 3000) {
- document.querySelectorAll('.toast').forEach(toast => toast.remove()); // Remove existing
- const toast = document.createElement('div');
- toast.className = `toast ${type}`;
- toast.textContent = message;
- document.body.appendChild(toast);
- setTimeout(() => {
- toast.style.animation = 'fadeOut 0.3s ease forwards';
- setTimeout(() => toast.remove(), 300);
- }, duration - 300);
- }
+ // --- Toast Notifications (Existing) ---
+ function showToast(message, type = 'info', duration = 3000) { /* ... same as before ... */ }
- // --- File Utilities ---
- function formatFileSize(bytes) {
- if (bytes === 0) return '0 Bytes';
- const k = 1024;
- const sizes = ['Bytes', 'KB', 'MB', 'GB'];
- const i = Math.floor(Math.log(bytes) / Math.log(k));
- return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i];
- }
+ // --- File Utilities (Existing) ---
+ function formatFileSize(bytes) { /* ... same as before ... */ }
// --- UI Updates (Status, Cost) ---
- function updateSynthesisStatus() {
- if (!apiKey) {
- synthesisStatus.textContent = 'API key needed'; return;
- }
- if (!selectedVoice) {
- synthesisStatus.textContent = 'Loading/Select voice'; return;
- }
- if (!currentText.trim()) {
- synthesisStatus.textContent = 'Add text to synthesize'; return;
+ function updateSynthesisActionStatus(message = "") {
+ if (isSynthesizing) {
+ synthesisActionStatus.textContent = message || `Synthesizing audio...`;
+ } else if (audioBuffer && !isPlaying && !isPaused) {
+ synthesisActionStatus.textContent = "Synthesis complete. Ready to play or download.";
+ } else if (isPlaying && !isPaused) {
+ synthesisActionStatus.textContent = "Playing audio...";
+ } else if (isPaused) {
+ synthesisActionStatus.textContent = "Playback paused.";
+ } else if (!apiKey) {
+ synthesisActionStatus.textContent = "API key needed.";
+ } else if (availableVoices.length === 0) {
+ synthesisActionStatus.textContent = "Loading voices or no voices available for language/model.";
+ } else if (!selectedVoiceObject) {
+ synthesisActionStatus.textContent = "Select a specific voice.";
+ } else if (!currentText.trim()) {
+ synthesisActionStatus.textContent = "Load or paste document text.";
+ } else {
+ synthesisActionStatus.textContent = "Ready to synthesize.";
}
- const voiceTypeInfo = getVoiceTypeAndPricing(selectedVoice.name);
- const estimation = calculateEstimatedCost(currentText.length, voiceTypeInfo);
- synthesisStatus.textContent = `Ready (${currentText.length.toLocaleString()} chars, ~$${estimation.cost.toFixed(4)})`;
- }
-
- function getVoiceTypeAndPricing(voiceName) {
- const nameLower = voiceName.toLowerCase();
- if (nameLower.includes('studio')) return { type: 'studio', name: 'Studio', rate: PRICING.studio };
- if (nameLower.includes('wavenet')) return { type: 'wavenet', name: 'WaveNet', rate: PRICING.wavenet };
- if (nameLower.includes('neural2')) return { type: 'neural2', name: 'Neural2', rate: PRICING.neural2 };
- return { type: 'standard', name: 'Standard', rate: PRICING.standard };
- }
-
- function calculateEstimatedCost(textLength, voiceTypeInfo) {
- if (!textLength || !voiceTypeInfo) return { cost: 0, details: '' };
- const cost = (textLength / 1000000) * voiceTypeInfo.rate;
- return { cost: cost, details: `${textLength.toLocaleString()} characters` };
}
+ function getVoiceTypeAndPricing(voiceName) { /* ... same as before ... */ }
+ function calculateEstimatedCost(textLength, voiceTypeInfo) { /* ... same as before ... */ }
function updateCostEstimator() {
- const estimatorEl = document.getElementById('costEstimator');
- const costEl = document.getElementById('estimatedCost');
- const detailsEl = document.getElementById('costDetails');
- const voiceTypeDisplayEl = document.getElementById('voiceType');
- const priceRateDisplayEl = document.getElementById('priceRate');
-
- if (!selectedVoice || !currentText.trim()) {
- estimatorEl.classList.add('hidden');
+ // This function now relies on selectedVoiceObject
+ if (!selectedVoiceObject || !currentText.trim()) {
+ costEstimatorEl.classList.add('hidden');
return;
}
-
- const voiceTypeInfo = getVoiceTypeAndPricing(selectedVoice.name);
- const estimation = calculateEstimatedCost(currentText.length, voiceTypeInfo);
-
- estimatorEl.classList.remove('hidden');
- costEl.textContent = `$${estimation.cost.toFixed(4)}`;
- detailsEl.textContent = estimation.details;
- voiceTypeDisplayEl.textContent = `${voiceTypeInfo.name} voice`;
- priceRateDisplayEl.textContent = `$${voiceTypeInfo.rate.toFixed(2)}/million chars`;
-
- // Reset classes then apply new ones for color coding
- estimatorEl.className = 'mb-4 p-3 rounded-lg'; // Base classes
- let colorClasses = 'bg-blue-100 border border-blue-200 text-blue-800 dark:bg-blue-900 dark:border-blue-700 dark:text-blue-200';
- let costColor = 'text-blue-900 dark:text-blue-100';
-
- if (estimation.cost < 0.01) { // Adjusted threshold for green
- colorClasses = 'bg-green-100 border border-green-200 text-green-800 dark:bg-green-900 dark:border-green-700 dark:text-green-200';
- costColor = 'text-green-900 dark:text-green-100';
- } else if (estimation.cost >= 1.00 && estimation.cost < 5.00) {
- colorClasses = 'bg-yellow-100 border border-yellow-200 text-yellow-800 dark:bg-yellow-900 dark:border-yellow-700 dark:text-yellow-200';
- costColor = 'text-yellow-900 dark:text-yellow-100';
- } else if (estimation.cost >= 5.00) {
- colorClasses = 'bg-red-100 border border-red-200 text-red-800 dark:bg-red-900 dark:border-red-700 dark:text-red-200';
- costColor = 'text-red-900 dark:text-red-100';
- }
- estimatorEl.classList.add(...colorClasses.split(' '));
- costEl.className = `text-lg font-bold ${costColor}`;
- // Ensure child elements also get dark mode text colors if not covered by parent
- detailsEl.className = `text-xs ${colorClasses.split(' ').filter(c => c.startsWith('text-') || c.startsWith('dark:text-')).join(' ')}`;
- voiceTypeDisplayEl.parentElement.className = `text-xs ${colorClasses.split(' ').filter(c => c.startsWith('text-') || c.startsWith('dark:text-')).join(' ')}`;
- }
-
-
- function showCostBreakdownModal() {
- const modalId = 'costBreakdownModal';
- if (document.getElementById(modalId)) return; // Prevent multiple modals
-
- const modal = document.createElement('div');
- modal.id = modalId;
- modal.className = 'fixed inset-0 bg-black bg-opacity-60 flex items-center justify-center z-50 p-4';
- const voiceTypeInfo = selectedVoice ? getVoiceTypeAndPricing(selectedVoice.name) : null;
- const estimation = (currentText && voiceTypeInfo) ? calculateEstimatedCost(currentText.length, voiceTypeInfo) : null;
-
- modal.innerHTML = `
-
-
-
Google TTS Pricing
-
-
-
- ${Object.entries(PRICING).map(([key, value]) => `
-
-
- ${key.charAt(0).toUpperCase() + key.slice(1)} Voices ${key === 'standard' ? '(Default)' : ''}
- $${value.toFixed(2)}
-
-
per million characters
-
- `).join('')}
-
- ${currentText ? `
-
-
Current Estimate:
-
-
Document: ${currentText.length.toLocaleString()} characters
- ${selectedVoice && voiceTypeInfo ? `
-
Voice Type: ${voiceTypeInfo.name}
-
Rate: $${voiceTypeInfo.rate.toFixed(2)}/million chars
-
Estimated Total: $${estimation ? estimation.cost.toFixed(4) : 'N/A'}
- ` : '
No voice selected for precise estimation.
'}
-
-
` : '
Load a document to see a specific cost estimate.
'}
-
-
* Prices are based on Google Cloud Text-to-Speech API. Actual costs may vary.
-
* Always check your Google Cloud Console for the most current pricing information.
-
-
-
+ // ... rest of the logic is similar, using selectedVoiceObject.name
+ const voiceInfo = getVoiceTypeAndPricing(selectedVoiceObject.name);
+ const est = calculateEstimatedCost(currentText.length, voiceInfo);
+ costEstimatorEl.classList.remove('hidden');
+
+ if (!costEstimatorEl.querySelector('#estimatedCost')) {
+ costEstimatorEl.innerHTML = `
+
-
-
- `;
- document.body.appendChild(modal);
- // Animate in
- setTimeout(() => {
- const modalContent = modal.querySelector('.bg-white');
- if (modalContent) {
- modalContent.classList.remove('scale-95', 'opacity-0');
- }
- }, 10);
-
-
- modal.addEventListener('click', (e) => {
- if (e.target === modal || e.target.closest('[data-close-modal]')) {
- const modalContent = modal.querySelector('.bg-white');
- if (modalContent) {
- modalContent.style.transform = 'scale(0.95)';
- modalContent.style.opacity = '0';
- }
- setTimeout(() => modal.remove(), 200);
- }
- });
- }
-
-
- // --- API Key Management ---
- async function saveApiKey() {
- const key = apiKeyInput.value ? apiKeyInput.value.trim() : '';
- if (!key) {
- showToast('Please enter a valid API key.', 'error');
- return;
+
+
Standard voice • $4.00/M chars
+
+
`;
+ costEstimatorEl.querySelector('#showCostBreakdownInner')?.addEventListener('click', showCostBreakdownModal);
}
- saveKeyBtn.innerHTML = '
Validating...';
- saveKeyBtn.disabled = true;
-
- try {
- if (key.length < 20 || !key.match(/^[A-Za-z0-9_-]+$/)) { // Basic sanity check
- throw new Error('Invalid API key format. Please check your key.');
- }
-
- // Test API key by trying to fetch voices (a light, authenticated request)
- const testUrl = `https://texttospeech.googleapis.com/v1/voices?languageCode=en-US`;
- const response = await fetch(testUrl, {
- method: 'GET',
- headers: { 'X-Goog-Api-Key': key, 'Accept': 'application/json' },
- mode: 'cors', cache: 'no-cache'
- });
-
- if (response.ok) {
- apiKey = key;
- localStorage.setItem('googleTTSApiKey', key);
- showToast('API key validated and saved successfully!', 'success');
- loadVoices(); // Load voices with the new key
- } else {
- const errorData = await response.json().catch(() => null);
- const message = errorData?.error?.message || `Validation failed (HTTP ${response.status})`;
- throw new Error(`API key validation failed: ${message}. Check key and Cloud settings.`);
- }
- } catch (error) {
- console.error('API key validation error:', error);
- showToast(error.message, 'error', 6000);
- apiKey = ''; // Clear invalid key
- localStorage.removeItem('googleTTSApiKey');
- } finally {
- saveKeyBtn.innerHTML = '
Save Key';
- saveKeyBtn.disabled = false;
- updateSynthesisStatus();
- }
+ costEstimatorEl.querySelector('#estimatedCost').textContent = `$${est.cost.toFixed(4)}`;
+ costEstimatorEl.querySelector('#costDetails').textContent = est.details;
+ costEstimatorEl.querySelector('#voiceTypeEst').textContent = `${voiceInfo.name} voice`;
+ costEstimatorEl.querySelector('#priceRateEst').textContent = `$${voiceInfo.rate.toFixed(2)}/M chars`;
+ // ... color coding logic ...
}
+ function showCostBreakdownModal() { /* ... same as before, ensure it uses selectedVoiceObject if needed ... */ }
- // --- Voice Loading & Selection ---
+ // --- API Key Management (Existing) ---
+ async function saveApiKey() { /* ... same as before ... */ }
+
+ // --- Voice Loading & Selection - REWORKED ---
async function loadVoices() {
if (!apiKey) {
- currentVoiceName.textContent = 'No API key';
- currentVoiceDetails.textContent = 'Enter API key to load voices.';
- playVoiceSample.disabled = true;
+ updateVoiceStatusDisplay("API key required.", "Enter API key to load voices.");
+ clearVoiceSelector();
availableVoices = [];
- selectedVoice = null;
- updatePlaybackButtons();
- updateSynthesisStatus();
+ selectedVoiceObject = null;
+ updateAllControlButtons();
+ updateSynthesisActionStatus();
return;
}
-
- currentVoiceName.textContent = 'Loading voices...';
- currentVoiceDetails.textContent = 'Please wait...';
+ updateVoiceStatusDisplay("Loading voices...", "Please wait...");
playVoiceSample.disabled = true;
refreshVoicesBtn.disabled = true;
const loadingIcon = refreshVoicesBtn.querySelector('i');
if (loadingIcon) loadingIcon.classList.add('fa-spin');
-
try {
const languageCode = languageSelect.value;
const response = await fetch(`https://texttospeech.googleapis.com/v1/voices?languageCode=${languageCode}`, {
- method: 'GET',
- headers: { 'X-Goog-Api-Key': apiKey, 'Accept': 'application/json' },
+ method: 'GET', headers: { 'X-Goog-Api-Key': apiKey, 'Accept': 'application/json' },
mode: 'cors', cache: 'no-cache'
});
-
if (!response.ok) {
const errorData = await response.json().catch(() => null);
- const message = errorData?.error?.message || `API error (${response.status})`;
- throw new Error(`Failed to load voices: ${message}`);
+ throw new Error(`API error (${response.status}): ${errorData?.error?.message || 'Failed to load voices'}`);
}
const data = await response.json();
if (!data || !data.voices || data.voices.length === 0) {
throw new Error('No voices returned for the selected language.');
}
-
availableVoices = data.voices;
showToast(`Loaded ${availableVoices.length} voices for ${languageCode}.`, 'success');
- // Attempt to select a default voice based on current model preference
- autoSelectVoiceForModel(modelSelect.value, true); // true to force selection even if current exists
-
+ populateVoiceSelectorDropdown(); // This will also try to select a default
} catch (error) {
console.error('Error loading voices:', error);
- currentVoiceName.textContent = 'Error loading voices';
- currentVoiceDetails.textContent = error.message.substring(0, 100); // Keep it brief
- showToast(error.message, 'error', 5000);
+ updateVoiceStatusDisplay("Error loading voices", error.message.substring(0, 100));
+ clearVoiceSelector();
availableVoices = [];
- selectedVoice = null;
+ selectedVoiceObject = null;
+ showToast('Failed to load voices: ' + error.message, 'error', 5000);
} finally {
- playVoiceSample.disabled = !selectedVoice;
refreshVoicesBtn.disabled = false;
if (loadingIcon) loadingIcon.classList.remove('fa-spin');
- updateSynthesisStatus();
- updateCostEstimator();
+ updateAllControlButtons();
+ updateSynthesisActionStatus();
}
}
- function selectDefaultStandardMaleVoice() {
- if (!availableVoices.length) return null;
- const standardVoices = availableVoices.filter(v => v.name.toLowerCase().includes('standard'));
- for (const preferredName of defaultStandardMaleVoices) {
- const voice = standardVoices.find(v => v.name === preferredName && v.languageCodes.includes(languageSelect.value));
- if (voice) return selectVoice(voice);
- }
- const anyStandardMale = standardVoices.find(v => v.ssmlGender === 'MALE' && v.languageCodes.includes(languageSelect.value));
- if (anyStandardMale) return selectVoice(anyStandardMale);
- const anyStandard = standardVoices.find(v => v.languageCodes.includes(languageSelect.value));
- if (anyStandard) return selectVoice(anyStandard);
- // If still no standard, pick first available for language
- const firstAvailable = availableVoices.find(v => v.languageCodes.includes(languageSelect.value));
- return firstAvailable ? selectVoice(firstAvailable) : null;
- }
+ function populateVoiceSelectorDropdown() {
+ clearVoiceSelector();
+ const currentModelType = modelSelect.value.toLowerCase(); // e.g. "standard", "wavenet"
- function autoSelectVoiceForModel(modelType, forceSelection = false) {
- if (!availableVoices.length) {
- selectDefaultStandardMaleVoice(); // Try to pick a general default if no voices for model
- return;
- }
-
- // If a voice is already selected and it matches the model type, and not forcing, keep it.
- if (selectedVoice && !forceSelection) {
- const currentVoiceModel = getVoiceTypeAndPricing(selectedVoice.name).type.toLowerCase();
- if (currentVoiceModel === modelType.toLowerCase() || (modelType.toLowerCase() === 'standard' && !currentVoiceModel.match(/wavenet|neural2|studio/))) {
- // Ensure it matches current language too
- if (selectedVoice.languageCodes.includes(languageSelect.value)) {
- console.log(`Keeping current voice ${selectedVoice.name} for model ${modelType}`);
- selectVoice(selectedVoice); // Re-select to update UI if needed
- return;
- }
+ const filteredVoices = availableVoices.filter(voice => {
+ const voiceNameLower = voice.name.toLowerCase();
+ // Check if voice name contains model type (e.g., "en-US-Standard-C" for "standard")
+ // Or, for "standard" model, accept voices that don't explicitly state Wavenet, Neural2, Studio
+ if (currentModelType === "standard") {
+ return !voiceNameLower.includes("wavenet") && !voiceNameLower.includes("neural2") && !voiceNameLower.includes("studio");
}
- }
+ return voiceNameLower.includes(currentModelType);
+ });
+ if (filteredVoices.length === 0) {
+ voiceSelectorDropdown.options.add(new Option(`No ${modelSelect.options[modelSelect.selectedIndex].text} voices found`, ""));
+ selectedVoiceObject = null;
+ updateVoiceStatusDisplay("No voices match criteria.", "Try different language/model.");
+ updateAllControlButtons();
+ return;
+ }
- const modelKeyword = modelType === 'standard' ? 'Standard' : modelType.charAt(0).toUpperCase() + modelType.slice(1);
- let candidates = availableVoices.filter(v =>
- v.name.includes(modelKeyword) && v.languageCodes.includes(languageSelect.value)
- );
+ filteredVoices.forEach(voice => {
+ // Create a more descriptive name: e.g., "Wavenet D (Male)" or "Standard C (Female)"
+ let displayName = voice.name.replace(`${languageSelect.value}-`, ''); // Remove lang prefix
+ displayName = displayName.replace(/Standard-|Wavenet-|Neural2-|Studio-/i, ''); // Remove type prefix
+ displayName = `${displayName.replace(/-/g, ' ')} (${voice.ssmlGender === 'MALE' ? 'M' : voice.ssmlGender === 'FEMALE' ? 'F' : 'N'})`;
+ voiceSelectorDropdown.options.add(new Option(displayName, voice.name));
+ });
- if (modelType === 'standard' && candidates.length === 0) { // Broader match for standard if no "Standard" keyword found
- candidates = availableVoices.filter(v =>
- !v.name.match(/Wavenet|Neural2|Studio/i) && v.languageCodes.includes(languageSelect.value)
- );
+ // Try to pre-select a voice (e.g., first one, or a default if applicable)
+ if (voiceSelectorDropdown.options.length > 1) { // more than just the "-- Select --"
+ // Attempt to select a previously selected voice if it's still in the list
+ if (selectedVoiceObject && filteredVoices.some(v => v.name === selectedVoiceObject.name)) {
+ voiceSelectorDropdown.value = selectedVoiceObject.name;
+ } else {
+ // Try to find a default standard male voice if model is standard
+ let defaultSelected = false;
+ if (currentModelType === "standard") {
+ for (const preferredName of defaultStandardMaleVoices) {
+ if (filteredVoices.some(v => v.name === preferredName)) {
+ voiceSelectorDropdown.value = preferredName;
+ defaultSelected = true;
+ break;
+ }
+ }
+ }
+ if(!defaultSelected) voiceSelectorDropdown.selectedIndex = 1; // Select the first actual voice
+ }
}
+ handleVoiceSelectionChange(); // Update based on (auto)selection
+ }
-
- if (candidates.length > 0) {
- // Prefer male, then female, then any
- let voiceToSelect = candidates.find(v => v.ssmlGender === 'MALE');
- if (!voiceToSelect) voiceToSelect = candidates.find(v => v.ssmlGender === 'FEMALE');
- if (!voiceToSelect) voiceToSelect = candidates[0];
- selectVoice(voiceToSelect);
- showToast(`Auto-selected ${voiceToSelect.name} for ${modelType} model.`, 'info', 2000);
+ function handleVoiceSelectionChange() {
+ const selectedName = voiceSelectorDropdown.value;
+ if (selectedName) {
+ selectedVoiceObject = availableVoices.find(v => v.name === selectedName);
+ if (selectedVoiceObject) {
+ updateVoiceStatusDisplay();
+ updateCostEstimator();
+ } else {
+ selectedVoiceObject = null; // Should not happen if populated correctly
+ updateVoiceStatusDisplay("Error selecting voice.", "");
+ }
} else {
- showToast(`No ${modelKeyword} voices found for ${languageSelect.value}. Trying default.`, 'info', 2500);
- selectDefaultStandardMaleVoice(); // Fallback to general default
+ selectedVoiceObject = null;
+ updateVoiceStatusDisplay("No voice selected.", "Please pick a voice from the dropdown.");
}
+ updateAllControlButtons();
+ updateSynthesisActionStatus();
}
+ function clearVoiceSelector() {
+ voiceSelectorDropdown.innerHTML = '
';
+ selectedVoiceObject = null;
+ }
- function selectVoice(voice) {
- if (!voice) {
- console.warn("Attempted to select a null voice.");
- selectedVoice = null;
- currentVoiceName.textContent = 'No voice selected';
- currentVoiceDetails.textContent = 'Please choose a voice.';
+ function updateVoiceStatusDisplay(nameOverride = null, detailsOverride = null) {
+ if (nameOverride) {
+ currentVoiceName.textContent = nameOverride;
+ currentVoiceDetails.textContent = detailsOverride || "";
+ playVoiceSample.disabled = true;
+ return;
+ }
+ if (selectedVoiceObject) {
+ const displayName = selectedVoiceObject.name.replace(`${languageSelect.value}-`, '').replace(/-/g, ' ');
+ currentVoiceName.textContent = displayName;
+ const voiceTypeInfo = getVoiceTypeAndPricing(selectedVoiceObject.name);
+ const gender = selectedVoiceObject.ssmlGender === 'MALE' ? '♂ Male' : selectedVoiceObject.ssmlGender === 'FEMALE' ? '♀ Female' : 'Neutral';
+ currentVoiceDetails.textContent = `${gender} • ${voiceTypeInfo.name} • $${voiceTypeInfo.rate.toFixed(2)}/M chars`;
+ playVoiceSample.disabled = false;
+ } else {
+ currentVoiceName.textContent = "No voice selected";
+ currentVoiceDetails.textContent = "Select language, model, and specific voice.";
playVoiceSample.disabled = true;
- return null;
}
- selectedVoice = voice;
- const displayName = voice.name.replace(`${languageSelect.value}-`, '').replace(/-/g, ' ');
- currentVoiceName.textContent = displayName;
- const voiceTypeInfo = getVoiceTypeAndPricing(voice.name);
- const genderIcon = voice.ssmlGender === 'MALE' ? '♂ Male' : voice.ssmlGender === 'FEMALE' ? '♀ Female' : 'Neutral';
- currentVoiceDetails.textContent = `${genderIcon} • ${voiceTypeInfo.name} • $${voiceTypeInfo.rate.toFixed(2)}/M chars`;
- playVoiceSample.disabled = false;
- updateCostEstimator();
- updateSynthesisStatus();
- // Do not show toast here, autoSelectVoiceForModel or manual selection should handle it.
- return voice;
}
- // --- TTS Synthesis & Playback Sample ---
- async function playSample(voice) {
- if (!apiKey) { showToast('API key required for sample.', 'error'); return; }
- if (isPlaying) stopPlayback(); // Stop any ongoing main playback
+ // --- TTS Synthesis & Playback Sample (Largely existing, uses selectedVoiceObject) ---
+ async function playSample(voiceObj) {
+ if (!apiKey) { showToast('API key required.', 'error'); return; }
+ if (isSynthesizing || isPlaying) handleStopAll(); // Stop current activity
- const sampleButtonOriginalText = playVoiceSample.innerHTML;
+ const btnOrig = playVoiceSample.innerHTML;
playVoiceSample.innerHTML = '
';
playVoiceSample.disabled = true;
+ let sampleAudioSource = null; // Local source for sample
try {
- const sampleText = "Hello, this is a sample of my voice. I can read your documents with natural sounding speech.";
- // Use current settings for rate/pitch but always standard model for quick sample
- const audioBase64 = await synthesizeSpeech(sampleText, voice,
- parseFloat(rateSelect.value),
- parseFloat(pitchSelect.value),
- getVoiceTypeAndPricing(voice.name).type // Use actual model of selected voice
- );
- await playSingleAudioChunk(audioBase64, true); // true for isSample
- } catch (error) {
- console.error('Error playing sample:', error);
- showToast('Failed to play sample: ' + error.message, 'error');
- } finally {
- playVoiceSample.innerHTML = sampleButtonOriginalText;
+ const sampleText = "Hello, this is a sample of my voice.";
+ const audioB64 = await callSynthesizeSpeechAPI(sampleText, voiceObj, parseFloat(rateSelect.value), parseFloat(pitchSelect.value), getVoiceTypeAndPricing(voiceObj.name).type);
+
+ if (!initAudioContext()) { showToast("Audio system not ready.", "error"); return; }
+ const audioBytes = Uint8Array.from(atob(audioB64), c => c.charCodeAt(0));
+ const decodedSampleBuffer = await audioContext.decodeAudioData(audioBytes.buffer.slice(0));
+
+ sampleAudioSource = audioContext.createBufferSource();
+ sampleAudioSource.buffer = decodedSampleBuffer;
+ sampleAudioSource.connect(audioContext.destination);
+ sampleAudioSource.start(0);
+ sampleAudioSource.onended = () => {
+ playVoiceSample.innerHTML = btnOrig;
+ playVoiceSample.disabled = false;
+ };
+ } catch (err) {
+ console.error('Sample error:', err);
+ showToast('Sample failed: ' + err.message, 'error');
+ playVoiceSample.innerHTML = btnOrig;
playVoiceSample.disabled = false;
}
}
- function showSynthesisProgress(progress) {
+ function showSynthesisProgressUI(progress) { // For the main synthesis progress bar
synthesisProgressEl.classList.remove('hidden');
progressPercentageEl.textContent = `${Math.round(progress)}%`;
+ synthesisProgressBarInner.style.width = `${progress}%`;
}
- function hideSynthesisProgress() {
+ function hideSynthesisProgressUI() {
synthesisProgressEl.classList.add('hidden');
+ progressPercentageEl.textContent = `0%`;
+ synthesisProgressBarInner.style.width = `0%`;
}
- async function synthesizeSpeech(text, voiceConfig, rate = 1, pitch = 0, model = 'standard') {
- if (!apiKey) throw new Error('API key not provided.');
+ // Renamed from synthesizeSpeech to avoid confusion with the new startFullSynthesis
+ async function callSynthesizeSpeechAPI(text, voiceConfigObj, rate = 1, pitch = 0, model = 'standard') {
+ // ... (This is the actual API call logic, same as previous synthesizeSpeech)
+ // Ensure it uses voiceConfigObj.name and voiceConfigObj.languageCodes[0]
+ if (!apiKey) throw new Error('API key not provided.');
if (!text || !text.trim()) throw new Error('No text for synthesis.');
- if (!voiceConfig) throw new Error('Voice configuration not provided.');
-
- const requestBody = {
+ if (!voiceConfigObj) throw new Error('Voice config not provided.');
+ const reqBody = {
input: { text: text.trim() },
- voice: {
- languageCode: voiceConfig.languageCodes[0], // Use first lang code of selected voice
- name: voiceConfig.name,
- // ssmlGender: voiceConfig.ssmlGender // Not always needed if name is specific
- },
- audioConfig: {
- audioEncoding: 'MP3',
- speakingRate: Math.max(0.25, Math.min(4.0, rate)),
- pitch: Math.max(-20.0, Math.min(20.0, pitch)),
- }
+ voice: { languageCode: voiceConfigObj.languageCodes[0], name: voiceConfigObj.name },
+ audioConfig: { audioEncoding: 'MP3', speakingRate: Math.max(0.25, Math.min(4.0, rate)), pitch: Math.max(-20.0, Math.min(20.0, pitch)) }
};
- // Add effectsProfileId for Studio voices if applicable
- if (model.toLowerCase() === 'studio' && voiceConfig.name.toLowerCase().includes('studio')) {
- // Example effect, check Google Docs for valid profiles for your voice
- // requestBody.audioConfig.effectsProfileId = ['telephony-class-application'];
- console.log("Studio voice detected, consider effectsProfileId if needed.");
- }
-
-
+ // Studio voice effect (example, check docs)
+ // if (model.toLowerCase() === 'studio' && voiceConfigObj.name.toLowerCase().includes('studio')) {
+ // reqBody.audioConfig.effectsProfileId = ['telephony-class-application'];
+ // }
const url = `https://texttospeech.googleapis.com/v1/text:synthesize`;
- let response;
+ let res;
try {
- response = await fetch(url, {
- method: 'POST',
- headers: { 'X-Goog-Api-Key': apiKey, 'Content-Type': 'application/json', 'Accept': 'application/json' },
- body: JSON.stringify(requestBody),
- mode: 'cors', cache: 'no-cache'
- });
- } catch (networkOrCorsError) {
- // Fallback attempt with key in URL (less secure, for specific restrictive environments)
- console.warn("Primary API call failed, attempting fallback with key in URL:", networkOrCorsError);
- const fallbackUrl = `https://texttospeech.googleapis.com/v1/text:synthesize?key=${encodeURIComponent(apiKey)}`;
- response = await fetch(fallbackUrl, {
- method: 'POST',
- headers: { 'Content-Type': 'application/json', 'Accept': 'application/json' },
- body: JSON.stringify(requestBody),
- mode: 'cors', cache: 'no-cache'
- });
- }
-
-
- if (!response.ok) {
- let errorMessage = `Failed to synthesize speech (HTTP ${response.status})`;
- try {
- const errorData = await response.json();
- errorMessage = errorData.error?.message || errorMessage;
- if (errorData.error?.details) errorMessage += ` Details: ${JSON.stringify(errorData.error.details)}`;
- } catch (e) { /* Ignore if error response is not JSON */ }
- throw new Error(errorMessage);
+ res = await fetch(url, { method: 'POST', headers: { 'X-Goog-Api-Key': apiKey, 'Content-Type': 'application/json', 'Accept': 'application/json' }, body: JSON.stringify(reqBody), mode: 'cors', cache: 'no-cache' });
+ } catch (netErr) { // Fallback for restrictive environments
+ console.warn("Primary API call failed, fallback with key in URL:", netErr);
+ const fbUrl = `https://texttospeech.googleapis.com/v1/text:synthesize?key=${encodeURIComponent(apiKey)}`;
+ res = await fetch(fbUrl, { method: 'POST', headers: { 'Content-Type': 'application/json', 'Accept': 'application/json' }, body: JSON.stringify(reqBody), mode: 'cors', cache: 'no-cache' });
}
-
- const data = await response.json();
- if (!data || !data.audioContent) {
- throw new Error('Invalid response from TTS API: no audio content.');
+ if (!res.ok) {
+ let errMsg = `TTS API failed (HTTP ${res.status})`;
+ try { const errD = await res.json(); errMsg = errD.error?.message || errMsg; if (errD.error?.details) errMsg += ` Details: ${JSON.stringify(errD.error.details)}`; } catch (e) { /*ignore*/ }
+ throw new Error(errMsg);
}
- return data.audioContent; // This is a base64 encoded string
+ const data = await res.json();
+ if (!data || !data.audioContent) throw new Error('Invalid TTS API response: no audio content.');
+ return data.audioContent; // base64 string
}
- // --- Main Playback Logic (Handling single or multiple chunks) ---
+ // --- NEW Main Synthesis Logic ---
+ async function startFullSynthesis() {
+ if (!selectedVoiceObject) { showToast('Please select a specific voice.', 'info'); return; }
+ if (!currentText.trim()) { showToast('No document content to synthesize.', 'info'); return; }
+ if (isSynthesizing) { showToast('Synthesis already in progress.', 'info'); return; }
+ if (isPlaying || isPaused) handleStopAll(); // Stop current playback before new synthesis
- /**
- * Plays a single chunk of audio (base64 encoded).
- * Used for voice samples or if text is very short (single chunk).
- * @param {string} audioBase64 - The base64 encoded audio data.
- * @param {boolean} isSample - True if this is for a voice sample (affects button states).
- */
- async function playSingleAudioChunk(audioBase64, isSample = false) {
- if (!initAudioContext()) { showToast("Audio system not ready.", "error"); return; }
+ isSynthesizing = true;
+ synthesisCancelled = false;
+ audioBuffer = null; // Clear previous buffer
+ currentAudioBlob = null; // Clear previous blob
+ updateAllControlButtons();
+ showSynthesisProgressUI(0);
+ updateSynthesisActionStatus("Starting synthesis...");
try {
- const audioBytes = Uint8Array.from(atob(audioBase64), c => c.charCodeAt(0));
- // Important: Pass a copy of the buffer for decodeAudioData
- const bufferToDecode = audioBytes.buffer.slice(0);
- audioBuffer = await audioContext.decodeAudioData(bufferToDecode);
-
- if (!isSample) { // For main playback, create blob for download
- currentAudioBlob = new Blob([audioBytes], { type: 'audio/mpeg' });
+ const chunks = splitTextIntoChunks(currentText, MAX_CHUNK_SIZE);
+ if (chunks.length === 0) {
+ showToast('Nothing to synthesize.', 'info');
+ throw new Error("Empty content after chunking.");
}
- if (audioSource) audioSource.stop(); // Stop any previous
- audioSource = audioContext.createBufferSource();
- audioSource.buffer = audioBuffer;
- audioSource.connect(audioContext.destination);
-
- if (!isSample) {
- isPlaying = true;
- isPaused = false;
- updatePlaybackButtons();
+ const allAudioBase64Data = [];
+ for (let i = 0; i < chunks.length; i++) {
+ if (synthesisCancelled) {
+ showToast('Synthesis cancelled by user.', 'info');
+ throw new Error("Synthesis Cancelled");
+ }
+ updateSynthesisActionStatus(`Synthesizing chunk ${i + 1} of ${chunks.length}...`);
+ const audioData = await callSynthesizeSpeechAPI(
+ chunks[i], selectedVoiceObject,
+ parseFloat(rateSelect.value), parseFloat(pitchSelect.value),
+ modelSelect.value // Use the general model type here
+ );
+ allAudioBase64Data.push(audioData);
+ showSynthesisProgressUI(((i + 1) / chunks.length) * 100);
}
- audioSource.start(0);
- playbackStartTime = audioContext.currentTime;
- pausedAtTime = 0;
-
- startProgressTracking();
-
- audioSource.onended = () => {
- if (isSample) return; // Samples don't trigger full stopPlayback
- if (isPlaying && !isPaused) { // Natural end
- stopPlayback();
- }
- };
- } catch (error) {
- console.error('Error playing single audio chunk:', error);
- showToast('Failed to play audio: ' + error.message, 'error');
- if (!isSample) stopPlayback();
- }
- }
+ if (synthesisCancelled) return; // Double check
- /**
- * Decodes and concatenates multiple base64 audio chunks, then plays them.
- * @param {string[]} audioBase64Array - Array of base64 encoded audio data strings.
- */
- async function playCombinedAudioChunks(audioBase64Array) {
- if (!initAudioContext()) { showToast("Audio system not ready.", "error"); return; }
- if (!audioBase64Array || audioBase64Array.length === 0) {
- showToast("No audio data to play.", "info");
- return;
- }
+ // Combine and decode all chunks
+ updateSynthesisActionStatus("Finalizing audio...");
+ if (!initAudioContext()) throw new Error("Audio system not ready.");
- try {
- // 1. Create full MP3 blob for download from all chunks
- const allByteArrays = audioBase64Array.map(b64 => Uint8Array.from(atob(b64), c => c.charCodeAt(0)));
- let totalBlobLength = 0;
- allByteArrays.forEach(ba => totalBlobLength += ba.length);
+ const allByteArrays = allAudioBase64Data.map(b64 => Uint8Array.from(atob(b64), c => c.charCodeAt(0)));
+ let totalBlobLength = 0; allByteArrays.forEach(ba => totalBlobLength += ba.length);
const combinedBlobBytes = new Uint8Array(totalBlobLength);
let blobOffset = 0;
for (const byteArray of allByteArrays) {
@@ -1222,22 +834,15 @@
}
currentAudioBlob = new Blob([combinedBlobBytes], { type: 'audio/mpeg' });
- // 2. Decode all individual audio chunks into AudioBuffers
- // Pass copies of ArrayBuffers to decodeAudioData
const decodedBuffersPromises = allByteArrays.map(byteArray =>
audioContext.decodeAudioData(byteArray.buffer.slice(byteArray.byteOffset, byteArray.byteOffset + byteArray.byteLength))
);
const decodedBuffers = await Promise.all(decodedBuffersPromises);
- if (decodedBuffers.length === 0) {
- throw new Error("Failed to decode any audio chunks.");
- }
-
- // 3. Concatenate AudioBuffers
+ if (decodedBuffers.length === 0) throw new Error("Failed to decode any audio chunks.");
const totalLength = decodedBuffers.reduce((sum, buffer) => sum + buffer.length, 0);
- const numChannels = decodedBuffers[0].numberOfChannels; // Assume all same
- const sampleRate = decodedBuffers[0].sampleRate; // Assume all same
-
+ const numChannels = decodedBuffers[0].numberOfChannels;
+ const sampleRate = decodedBuffers[0].sampleRate;
const concatenatedBuffer = audioContext.createBuffer(numChannels, totalLength, sampleRate);
let currentBufferOffset = 0;
for (const buffer of decodedBuffers) {
@@ -1246,499 +851,607 @@
}
currentBufferOffset += buffer.length;
}
- audioBuffer = concatenatedBuffer; // This is now the full audio
+ audioBuffer = concatenatedBuffer; // Audio is ready!
+
+ showToast('Synthesis complete! Ready to play.', 'success');
+
+ } catch (error) {
+ if (!synthesisCancelled) { // Don't show error if user cancelled
+ console.error('Error during full synthesis:', error);
+ showToast('Synthesis error: ' + error.message, 'error', 5000);
+ }
+ audioBuffer = null;
+ currentAudioBlob = null;
+ } finally {
+ isSynthesizing = false;
+ hideSynthesisProgressUI();
+ updateAllControlButtons();
+ updateSynthesisActionStatus(); // Update to "Ready to play" or error state
+ }
+ }
+
+
+ // --- Main Playback Logic (Plays pre-synthesized audioBuffer) ---
+ function handlePlayPauseAudio() {
+ if (!audioBuffer) { showToast("No audio synthesized yet. Click 'Synthesize Audio' first.", 'info'); return; }
+ if (!initAudioContext()) { showToast("Audio system not ready.", "error"); return; }
+
+ if (isPlaying && !isPaused) { // Currently playing, so pause
+ pauseAudioPlayback();
+ } else if (isPaused) { // Currently paused, so resume
+ resumeAudioPlayback();
+ } else { // Not playing, so start from beginning of audioBuffer
+ startAudioPlayback();
+ }
+ }
+
+ function startAudioPlayback() {
+ if (!audioBuffer || !audioContext) return;
+ if (audioSource) audioSource.stop(); // Stop any previous
+
+ audioSource = audioContext.createBufferSource();
+ audioSource.buffer = audioBuffer;
+ audioSource.connect(audioContext.destination);
+
+ isPlaying = true;
+ isPaused = false;
+ pausedAtTime = 0; // Start from beginning
+ playbackStartTime = audioContext.currentTime;
+
+ audioSource.start(0, pausedAtTime);
+ startProgressTracking(); // For playback progress bar
+ updateAllControlButtons();
+ updateSynthesisActionStatus();
+
+ audioSource.onended = () => {
+ if (isPlaying && !isPaused) { // Natural end
+ handleStopAll(false); // Don't show "stopped by user"
+ }
+ };
+ }
- // 4. Play the concatenated buffer
- if (audioSource) audioSource.stop();
- audioSource = audioContext.createBufferSource();
- audioSource.buffer = audioBuffer;
- audioSource.connect(audioContext.destination);
+ function pauseAudioPlayback() {
+ if (!audioContext || !audioSource || !isPlaying || isPaused) return;
+ audioContext.suspend().then(() => {
+ pausedAtTime += audioContext.currentTime - playbackStartTime;
+ isPaused = true;
+ if (animationFrameId) cancelAnimationFrame(animationFrameId);
+ updateAllControlButtons();
+ updateSynthesisActionStatus();
+ // showToast('Playback paused.', 'info', 1500);
+ });
+ }
- isPlaying = true;
+ function resumeAudioPlayback() {
+ if (!audioContext || !audioSource || !isPlaying || !isPaused) return;
+ audioContext.resume().then(() => {
+ playbackStartTime = audioContext.currentTime; // Reset for elapsed calculation
isPaused = false;
- updatePlaybackButtons();
- audioSource.start(0);
- playbackStartTime = audioContext.currentTime;
- pausedAtTime = 0;
+ // Recreate source if it was stopped completely by a previous full stop
+ // For simple pause/resume with suspend/resume, source might still be valid if not explicitly stopped.
+ // However, to be safe, especially if stop logic fully nullifies audioSource:
+ if (!audioSource || audioSource.playbackState === AudioBufferSourceNode.FINISHED_STATE) {
+ audioSource = audioContext.createBufferSource();
+ audioSource.buffer = audioBuffer;
+ audioSource.connect(audioContext.destination);
+ }
+ audioSource.start(0, pausedAtTime); // Start from where it was paused
startProgressTracking();
+ updateAllControlButtons();
+ updateSynthesisActionStatus();
+ // showToast('Playback resumed.', 'info', 1500);
- audioSource.onended = () => {
- if (isPlaying && !isPaused) { // Natural end
- stopPlayback();
+ audioSource.onended = () => { // Re-attach onended
+ if (isPlaying && !isPaused) {
+ handleStopAll(false);
}
};
+ });
+ }
- } catch (error) {
- console.error('Error playing combined audio:', error);
- showToast('Failed to process or play combined audio: ' + error.message, 'error', 5000);
- stopPlayback();
+ function handleStopAll(showMsg = true) {
+ if (isSynthesizing) {
+ synthesisCancelled = true; // Signal synthesis loop to stop
+ isSynthesizing = false; // Immediately update state
+ if(showMsg) showToast('Synthesis cancelled.', 'info');
+ }
+ if (audioSource) {
+ audioSource.onended = null;
+ try { audioSource.stop(); } catch (e) { /* ignore if already stopped */ }
+ audioSource = null;
+ }
+ if (animationFrameId) {
+ cancelAnimationFrame(animationFrameId);
+ animationFrameId = null;
}
+
+ isPlaying = false;
+ isPaused = false;
+ playbackStartTime = 0;
+ pausedAtTime = 0;
+ // Keep audioBuffer and currentAudioBlob available unless new synthesis starts or content changes
+
+ updateAllControlButtons();
+ updateProgressUI(0, audioBuffer ? audioBuffer.duration : 0); // Reset playback progress
+ documentContentEl.querySelectorAll('.reading-highlight').forEach(el => el.classList.remove('reading-highlight'));
+ currentReadingPosition = 0; readingProgress.style.width = '0%';
+ hideSynthesisProgressUI();
+ updateSynthesisActionStatus(showMsg && !isSynthesizing ? "Stopped." : "");
}
+ function updateAllControlButtons() {
+ // Synthesis Button
+ synthesizeBtn.disabled = !currentText.trim() || !selectedVoiceObject || isSynthesizing;
- // --- Progress Tracking & UI Update ---
- let animationFrameId = null;
- function startProgressTracking() {
- if (animationFrameId) cancelAnimationFrame(animationFrameId); // Clear previous frame
+ // Play Audio Button
+ if (isPlaying && !isPaused) { // Playing
+ playAudioBtn.innerHTML = '
'; playAudioBtn.title = "Pause Audio";
+ playAudioBtn.disabled = false;
+ } else if (isPaused) { // Paused
+ playAudioBtn.innerHTML = '
'; playAudioBtn.title = "Resume Audio";
+ playAudioBtn.disabled = false;
+ } else { // Stopped or initial, or synthesis complete
+ playAudioBtn.innerHTML = '
'; playAudioBtn.title = "Play Audio";
+ playAudioBtn.disabled = !audioBuffer || isSynthesizing; // Enabled if buffer exists and not synthesizing
+ }
- function updateLoop() {
- if (!isPlaying || isPaused || !audioBuffer || !audioContext) {
- animationFrameId = null;
- return;
+ // Stop Button
+ stopBtn.disabled = !isPlaying && !isPaused && !isSynthesizing;
+
+ // Download Button
+ downloadBtn.disabled = !currentAudioBlob || isSynthesizing;
+
+ // Sample Button
+ playVoiceSample.disabled = !selectedVoiceObject || isSynthesizing || isPlaying || isPaused;
+ }
+
+
+ // --- Progress Tracking & UI Update (For Playback - Existing) ---
+ // let animationFrameId = null; // Already declared
+ function startProgressTracking() { /* ... same as before ... */ }
+ function updateProgressUI(currentPlayTime, duration) { /* ... same as before ... */ }
+ function updateReadingHighlight() { /* ... same as before ... */ }
+ function formatTime(totalSeconds) { /* ... same as before ... */ }
+
+ // --- File Input & Processing (setDocumentContent resets audio states) ---
+ function setupDragAndDrop() { /* ... same as before ... */ }
+ function preventDefaults(e) { e.preventDefault(); e.stopPropagation(); }
+ function handlePaste() { /* ... same as before ... */ }
+ function handleFile(file) { /* ... same as before ... */ }
+ function readTextFile(file) { /* ... same as before ... */ }
+ async function readPDFFile(file) { /* ... same as before ... */ }
+ async function readWordDocument(file) { /* ... same as before ... */ }
+ function readHTMLFile(file) { /* ... same as before ... */ }
+ // Other basic readers: readRTFFile, readMarkdownFile, etc. (same)
+
+ function setDocumentContent(text, notes = '', bookmarks = []) {
+ handleStopAll(false); // Stop any activity and clear states before loading new content
+ audioBuffer = null;
+ currentAudioBlob = null;
+
+ currentText = text.trim();
+ currentNotes = notes;
+ notesTextarea.value = currentNotes;
+ currentBookmarks = bookmarks;
+
+ documentContentEl.innerHTML = currentText.split(/\r\n|\r|\n/)
+ .map((line, index) => {
+ const isBookmarked = currentBookmarks.includes(index);
+ return `
${line.trim() || ' '}
`;
+ })
+ .join('');
+ charCount.textContent = `${currentText.length.toLocaleString()} characters`;
+
+ if (currentText.length > 1000000) showToast('Doc >1M chars. Consider splitting.', 'info', 5000);
+
+ updateCostEstimator();
+ updateAllControlButtons();
+ updateSynthesisActionStatus();
+ updateProgressUI(0,0); // Reset playback progress display
+ }
+
+ function splitTextIntoChunks(text, chunkSize) { /* ... same as before ... */ }
+
+ // --- Notes & Bookmarking Logic (Existing) ---
+ function handleParagraphClickForBookmark(event) { /* ... same as before ... */ }
+
+ // --- Library Logic (Existing - loadFromLibrary needs to set voice and then user synthesizes) ---
+ function getLibrary() { /* ... same as before ... */ }
+ function saveLibrary(library) { /* ... same as before ... */ }
+ function handleSaveToLibrary() { /* ... same as before, saves selectedVoiceObject.name ... */ }
+ function renderLibraryItems() { /* ... same as before ... */ }
+ function loadFromLibrary(itemId) {
+ const library = getLibrary();
+ const item = library.find(entry => entry.id === itemId);
+ if (item) {
+ setDocumentContent(item.text, item.notes || '', item.bookmarks || []); // This also stops audio and clears buffers
+
+ if (item.voiceSettings) {
+ languageSelect.value = item.voiceSettings.languageCode || 'en-US';
+ modelSelect.value = item.voiceSettings.model || 'standard';
+ // Important: loadVoices will trigger populateVoiceSelectorDropdown,
+ // which will then try to select the specific voice.
+ loadVoices().then(() => {
+ if (item.voiceSettings.voiceName) {
+ const voiceExists = Array.from(voiceSelectorDropdown.options).some(opt => opt.value === item.voiceSettings.voiceName);
+ if (voiceExists) {
+ voiceSelectorDropdown.value = item.voiceSettings.voiceName;
+ } else {
+ showToast(`Voice "${item.voiceSettings.voiceName}" not found for current language/model. Select manually.`, 'info', 4000);
+ // voiceSelectorDropdown will have a default selected by populateVoiceSelectorDropdown
+ }
+ }
+ handleVoiceSelectionChange(); // Ensure selectedVoiceObject is updated
+
+ rateSelect.value = item.voiceSettings.rate || 1;
+ pitchSelect.value = item.voiceSettings.pitch || 0;
+ updateCostEstimator();
+ updateAllControlButtons();
+ updateSynthesisActionStatus("Settings loaded. Click 'Synthesize Audio'.");
+ });
}
+ showToast(`Loaded "${item.title}". Synthesize to hear audio.`, 'success');
+ closeLibraryModal();
+ } else {
+ showToast('Error: Item not found in library.', 'error');
+ }
+ }
+ function deleteFromLibrary(itemId) { /* ... same as before ... */ }
+ function openLibraryModal() { /* ... same as before ... */ }
+ function closeLibraryModal() { /* ... same as before ... */ }
+
+
+ // --- Initialize the application ---
+ init();
+ // Helper functions from previous versions (ensure they are defined or simplified)
+ const defaultStandardMaleVoices = [
+ 'en-US-Standard-D', 'en-US-Standard-B', 'en-US-Standard-A', 'en-US-Standard-I',
+ 'en-GB-Standard-B', 'en-GB-Standard-D', 'en-GB-Standard-A',
+ 'en-AU-Standard-B', 'en-AU-Standard-D', 'en-AU-Standard-A'
+ ]; // Ensure this is defined
+ function initDarkMode() {
+ const savedTheme = localStorage.getItem('theme');
+ const systemPrefersDark = window.matchMedia('(prefers-color-scheme: dark)').matches;
+ const root = document.documentElement;
+ const icon = darkModeToggle.querySelector('i');
+
+ if (savedTheme === 'dark' || (!savedTheme && systemPrefersDark)) {
+ root.setAttribute('data-theme', 'dark');
+ icon.className = 'fas fa-sun text-xl';
+ } else {
+ root.setAttribute('data-theme', 'light');
+ icon.className = 'fas fa-moon text-xl';
+ }
+ darkModeToggle.addEventListener('click', toggleDarkMode);
+ }
+
+ function toggleDarkMode() {
+ const root = document.documentElement;
+ const currentTheme = root.getAttribute('data-theme');
+ const newTheme = currentTheme === 'dark' ? 'light' : 'dark';
+ const icon = darkModeToggle.querySelector('i');
+
+ root.setAttribute('data-theme', newTheme);
+ localStorage.setItem('theme', newTheme);
+ icon.className = newTheme === 'dark' ? 'fas fa-sun text-xl' : 'fas fa-moon text-xl';
+ }
+ function initProgressIndicator() {
+ progressContainer.addEventListener('click', (e) => {
+ if (!audioBuffer || !isPlaying) return;
+ const rect = progressContainer.getBoundingClientRect();
+ const clickX = e.clientX - rect.left;
+ const percentage = Math.max(0, Math.min(1, clickX / rect.width));
+ const newTime = percentage * audioBuffer.duration;
+ seekToTime(newTime);
+ });
+ }
+ function seekToTime(time) {
+ if (!audioContext || !audioSource || !audioBuffer || !isPlaying || isPaused) return; // Don't seek if paused, resume will handle
+ // Stop current playback if playing
+ audioSource.onended = null;
+ try { audioSource.stop(); } catch(e) {/* already stopped */}
+
+ // Create new source and start from new time
+ audioSource = audioContext.createBufferSource();
+ audioSource.buffer = audioBuffer;
+ audioSource.connect(audioContext.destination);
+
+ pausedAtTime = time; // Store the seeked time
+ playbackStartTime = audioContext.currentTime - pausedAtTime; // Adjust playbackStartTime for progress calc
+
+ audioSource.start(0, pausedAtTime);
+ audioSource.onended = () => { // Re-attach onended handler
+ if (isPlaying && !isPaused) { // If it naturally ends
+ handleStopAll(false);
+ }
+ };
+ // Update UI immediately
+ updateProgressUI(pausedAtTime, audioBuffer.duration);
+ }
+ function initDownloadFeature() {
+ downloadBtn.addEventListener('click', downloadCurrentAudio);
+ }
+ async function downloadCurrentAudio() {
+ if (!currentAudioBlob) { showToast('No audio to download.', 'info'); return; }
+ try {
+ const url = URL.createObjectURL(currentAudioBlob);
+ const a = document.createElement('a');
+ a.style.display = 'none'; a.href = url;
+ const ts = new Date().toISOString().slice(0, 19).replace(/[T:]/g, '-');
+ const vName = selectedVoiceObject ? selectedVoiceObject.name.replace(/[^a-zA-Z0-9_]/g, '-') : 'unknown';
+ a.download = `tts-${vName}-${ts}.mp3`;
+ document.body.appendChild(a); a.click(); document.body.removeChild(a);
+ URL.revokeObjectURL(url); showToast('Download started.', 'success');
+ } catch (err) { console.error('Download error:', err); showToast('Download failed.', 'error'); }
+ }
+ function showToast(message, type = 'info', duration = 3000) {
+ document.querySelectorAll('.toast').forEach(t => t.remove());
+ const toast = document.createElement('div');
+ toast.className = `toast ${type}`;
+ toast.textContent = message;
+ document.body.appendChild(toast);
+ setTimeout(() => {
+ toast.style.animation = 'fadeOutToast 0.3s ease forwards';
+ setTimeout(() => toast.remove(), 300);
+ }, duration - 300);
+ }
+ function formatFileSize(bytes) {
+ if (bytes === 0) return '0 Bytes'; const k = 1024;
+ const sizes = ['Bytes', 'KB', 'MB', 'GB'];
+ const i = Math.floor(Math.log(bytes) / Math.log(k));
+ return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i];
+ }
+ function getVoiceTypeAndPricing(voiceName) {
+ const nameL = voiceName.toLowerCase();
+ if (nameL.includes('studio')) return { type: 'studio', name: 'Studio', rate: PRICING.studio };
+ if (nameL.includes('wavenet')) return { type: 'wavenet', name: 'WaveNet', rate: PRICING.wavenet };
+ if (nameL.includes('neural2')) return { type: 'neural2', name: 'Neural2', rate: PRICING.neural2 };
+ return { type: 'standard', name: 'Standard', rate: PRICING.standard };
+ }
+ function calculateEstimatedCost(textLength, voiceTypeInfo) {
+ if (!textLength || !voiceTypeInfo) return { cost: 0, details: '' };
+ const cost = (textLength / 1000000) * voiceTypeInfo.rate;
+ return { cost: cost, details: `${textLength.toLocaleString()} characters` };
+ }
+ function showCostBreakdownModal() { /* ... ensure this is defined or remove call if not needed ... */ }
+ async function saveApiKey() {
+ const key = apiKeyInput.value ? apiKeyInput.value.trim() : '';
+ if (!key) { showToast('Enter API key.', 'error'); return; }
+ saveKeyBtn.innerHTML = '
Validating...';
+ saveKeyBtn.disabled = true;
+ try {
+ if (key.length < 20 || !key.match(/^[A-Za-z0-9_-]+$/)) throw new Error('Invalid API key format.');
+ const testUrl = `https://texttospeech.googleapis.com/v1/voices?languageCode=en-US`;
+ const res = await fetch(testUrl, { headers: { 'X-Goog-Api-Key': key, 'Accept': 'application/json' }, mode: 'cors', cache: 'no-cache' });
+ if (res.ok) {
+ apiKey = key; localStorage.setItem('googleTTSApiKey', key);
+ showToast('API key saved!', 'success'); loadVoices();
+ } else {
+ const errData = await res.json().catch(() => null);
+ throw new Error(`Validation failed: ${errData?.error?.message || `HTTP ${res.status}`}`);
+ }
+ } catch (err) {
+ console.error('API key error:', err); showToast(err.message, 'error', 6000);
+ apiKey = ''; localStorage.removeItem('googleTTSApiKey');
+ } finally {
+ saveKeyBtn.innerHTML = '
Save Key';
+ saveKeyBtn.disabled = false; updateSynthesisActionStatus(); updateAllControlButtons();
+ }
+ }
+ function startProgressTracking() {
+ if (animationFrameId) cancelAnimationFrame(animationFrameId);
+ function updateLoop() {
+ if (!isPlaying || isPaused || !audioBuffer || !audioContext) { animationFrameId = null; return; }
const elapsed = audioContext.currentTime - playbackStartTime;
const currentPlaybackTime = pausedAtTime + elapsed;
const duration = audioBuffer.duration;
-
- if (currentPlaybackTime >= duration) {
- // Playback likely ended or is about to, onended will handle stop.
- // Ensure UI shows full progress.
- updateProgressUI(duration, duration);
- animationFrameId = null;
- return;
- }
-
+ if (currentPlaybackTime >= duration) { updateProgressUI(duration, duration); animationFrameId = null; return; }
updateProgressUI(currentPlaybackTime, duration);
animationFrameId = requestAnimationFrame(updateLoop);
}
animationFrameId = requestAnimationFrame(updateLoop);
}
-
function updateProgressUI(currentPlayTime, duration) {
if (isNaN(currentPlayTime) || isNaN(duration) || duration === 0) {
- progressBar.style.width = '0%';
- currentPositionMarker.style.left = '0%';
- readingProgress.style.width = '0%';
- currentTimeEl.textContent = formatTime(0);
- totalTimeEl.textContent = formatTime(0);
- return;
+ progressBar.style.width = '0%'; currentPositionMarker.style.left = '0%'; readingProgress.style.width = '0%';
+ currentTimeEl.textContent = formatTime(0); totalTimeEl.textContent = formatTime(0); return;
}
-
- const progressPercent = (currentPlayTime / duration) * 100;
- progressBar.style.width = `${progressPercent}%`;
- currentPositionMarker.style.left = `${progressPercent}%`;
-
+ const progPerc = (currentPlayTime / duration) * 100;
+ progressBar.style.width = `${progPerc}%`; currentPositionMarker.style.left = `${progPerc}%`;
currentTimeEl.textContent = formatTime(Math.floor(currentPlayTime));
totalTimeEl.textContent = formatTime(Math.floor(duration));
-
- // Update reading highlight based on text length and audio progress
if (currentText.length > 0) {
- currentReadingPosition = Math.floor(progressPercent / 100 * currentText.length);
- const readingProgPercent = (currentReadingPosition / currentText.length) * 100;
- readingProgress.style.width = `${readingProgPercent}%`;
- updateReadingHighlight();
- } else {
- readingProgress.style.width = '0%';
- }
+ currentReadingPosition = Math.floor(progPerc / 100 * currentText.length);
+ const readProgPerc = Math.min(100, (currentReadingPosition / currentText.length) * 100); // Cap at 100
+ readingProgress.style.width = `${readProgPerc}%`; updateReadingHighlight();
+ } else { readingProgress.style.width = '0%'; }
}
-
-
function updateReadingHighlight() {
- documentContent.querySelectorAll('.reading-highlight').forEach(el => {
- el.classList.remove('reading-highlight');
- });
- if (!currentText) return;
+ documentContentEl.querySelectorAll('.reading-highlight').forEach(el => el.classList.remove('reading-highlight'));
+ if (!currentText || !audioBuffer || audioBuffer.duration === 0) return; // Ensure audio is loaded
+ const paragraphs = Array.from(documentContentEl.children);
+ let charAcc = 0;
+ // Calculate current character based on audio progress, ONLY if playing
+ let effectiveReadingPosition = currentReadingPosition;
+ if (!isPlaying && !isPaused && audioBuffer && audioBuffer.duration > 0) { // If stopped, highlight based on full text vs audio duration
+ effectiveReadingPosition = currentText.length; // Highlight all if stopped but audio was loaded
+ } else if (!isPlaying && isPaused) {
+ // Keep currentReadingPosition as is (where it was paused)
+ }
- const paragraphs = Array.from(documentContent.children);
- let charAccumulator = 0;
- for (const p of paragraphs) {
- const pText = p.textContent || "";
- const pLength = pText.length + 1; // +1 for newline/paragraph break
- if (charAccumulator <= currentReadingPosition && currentReadingPosition < charAccumulator + pLength) {
+ for (let i = 0; i < paragraphs.length; i++) {
+ const p = paragraphs[i];
+ const pText = p.textContent || "";
+ const pLen = pText.length + 1;
+ if (charAcc <= effectiveReadingPosition && effectiveReadingPosition < charAcc + pLen) {
p.classList.add('reading-highlight');
- // Optional: scroll into view
- // p.scrollIntoView({ behavior: 'smooth', block: 'nearest' });
break;
}
- charAccumulator += pLength;
+ charAcc += pLen;
}
}
-
function formatTime(totalSeconds) {
- const minutes = Math.floor(totalSeconds / 60);
- const seconds = totalSeconds % 60;
- return `${minutes}:${seconds.toString().padStart(2, '0')}`;
+ const min = Math.floor(totalSeconds / 60); const sec = totalSeconds % 60;
+ return `${min}:${sec.toString().padStart(2, '0')}`;
}
-
- // --- File Input & Processing ---
function setupDragAndDrop() {
- ['dragenter', 'dragover', 'dragleave', 'drop'].forEach(eventName => {
- dropzone.addEventListener(eventName, preventDefaults, false);
- document.body.addEventListener(eventName, preventDefaults, false); // Prevent browser default for whole page
- });
- ['dragenter', 'dragover'].forEach(eventName => {
- dropzone.addEventListener(eventName, () => dropzone.classList.add('active'), false);
- });
- ['dragleave', 'drop'].forEach(eventName => {
- dropzone.addEventListener(eventName, () => dropzone.classList.remove('active'), false);
- });
- dropzone.addEventListener('drop', (e) => {
- const dt = e.dataTransfer;
- if (dt.files && dt.files.length > 0) {
- handleFile(dt.files[0]);
- }
+ ['dragenter', 'dragover', 'dragleave', 'drop'].forEach(evt => {
+ dropzone.addEventListener(evt, preventDefaults, false);
+ document.body.addEventListener(evt, preventDefaults, false);
});
+ ['dragenter', 'dragover'].forEach(evt => dropzone.addEventListener(evt, () => dropzone.classList.add('active'), false));
+ ['dragleave', 'drop'].forEach(evt => dropzone.addEventListener(evt, () => dropzone.classList.remove('active'), false));
+ dropzone.addEventListener('drop', (e) => { if (e.dataTransfer.files.length > 0) handleFile(e.dataTransfer.files[0]); });
}
- function preventDefaults(e) { e.preventDefault(); e.stopPropagation(); }
-
- function handlePaste() {
- documentContent.innerHTML = ''; // Clear previous
- const placeholder = document.createElement('p');
- placeholder.className = 'text-gray-500 dark:text-gray-400 italic';
- placeholder.textContent = 'Pasting... Press Ctrl+V (Cmd+V on Mac) or right-click paste.';
- documentContent.appendChild(placeholder);
- documentContent.contentEditable = "true"; // Make it editable for paste
- documentContent.focus();
-
- // Use a timeout to allow paste to happen, then grab content
+ function handlePaste() {
+ documentContentEl.innerHTML = '';
+ const ph = document.createElement('p'); ph.className = 'text-gray-500 dark:text-gray-400 italic';
+ ph.textContent = 'Pasting...'; documentContentEl.appendChild(ph);
+ documentContentEl.contentEditable = "true"; documentContentEl.focus();
setTimeout(() => {
- const text = documentContent.innerText; // Use innerText to get rendered text
- documentContent.contentEditable = "false"; // Disable editing
- if (text && text !== placeholder.textContent) {
- setDocumentContent(text.trim());
- showToast('Text pasted successfully.', 'success');
- } else {
- documentContent.innerHTML = '
Paste text here or upload a file.
';
- // showToast('Paste cancelled or no text pasted.', 'info');
- }
- }, 100); // Small delay for paste action
+ const text = documentContentEl.innerText; documentContentEl.contentEditable = "false";
+ if (text && text !== ph.textContent) { setDocumentContent(text.trim()); showToast('Pasted.', 'success'); }
+ else { documentContentEl.innerHTML = '
Paste or upload.
'; }
+ }, 100);
}
-
-
function handleFile(file) {
- stopPlayback(); // Stop any current playback before loading new file
- const maxSize = 50 * 1024 * 1024; // 50MB
- if (file.size > maxSize) {
- showToast(`File too large (max ${formatFileSize(maxSize)}).`, 'error');
- return;
- }
-
- const fileExtension = file.name.toLowerCase().split('.').pop();
- documentContent.innerHTML = `
-
-
-
Processing ${file.name}...
-
Size: ${formatFileSize(file.size)}
-
`;
-
+ // Call setDocumentContent which now handles stopping audio etc.
+ // setDocumentContent("", "", []); // Clear existing content and states first
+ if (file.size > 50 * 1024 * 1024) { showToast('File too large (max 50MB).', 'error'); return; }
+ const ext = file.name.toLowerCase().split('.').pop();
+ documentContentEl.innerHTML = `
Processing ${file.name}...
`;
try {
- switch (fileExtension) {
+ switch (ext) {
case 'txt': readTextFile(file); break;
- case 'pdf':
- if (typeof pdfjsLib !== 'undefined') readPDFFile(file);
- else throw new Error('PDF library (pdf.js) not loaded.');
- break;
- case 'doc': case 'docx':
- if (typeof mammoth !== 'undefined') readWordDocument(file);
- else throw new Error('Word library (mammoth.js) not loaded.');
- break;
- case 'rtf': readRTFFile(file); break;
- case 'md': case 'markdown': readMarkdownFile(file); break;
- case 'json': readJSONFile(file); break;
+ case 'pdf': if (typeof pdfjsLib !== 'undefined') readPDFFile(file); else throw new Error('PDF lib not loaded.'); break;
+ case 'doc': case 'docx': if (typeof mammoth !== 'undefined') readWordDocument(file); else throw new Error('Word lib not loaded.'); break;
case 'html': case 'htm': readHTMLFile(file); break;
- case 'xml': readXMLFile(file); break;
- case 'csv': readCSVFile(file); break;
- default:
- if (file.type && file.type.startsWith('text/')) {
- readTextFile(file); // Treat unknown text/* as plain text
- } else {
- throw new Error(`Unsupported file type: .${fileExtension}`);
- }
+ default: if (file.type && file.type.startsWith('text/')) readTextFile(file); else throw new Error(`Unsupported: .${ext}`);
}
- } catch (error) {
- console.error('Error processing file:', error);
- showToast('Error processing file: ' + error.message, 'error', 5000);
- documentContent.innerHTML = '
Error loading document. Please try another file or format.
';
+ } catch (err) {
+ console.error('File process error:', err); showToast('File error: ' + err.message, 'error', 5000);
+ setDocumentContent("
Error loading document.
"); // Show error in content area
}
}
-
function readTextFile(file) {
const reader = new FileReader();
- reader.onload = (e) => { setDocumentContent(e.target.result); showToast(`Loaded ${file.name}`, 'success'); };
- reader.onerror = () => { showToast('Error reading text file', 'error'); documentContent.innerHTML = '
Error reading file.
';};
+ reader.onload = (e) => setDocumentContent(e.target.result);
+ reader.onerror = () => { showToast('Error reading file', 'error'); setDocumentContent('
Error reading file.
');};
reader.readAsText(file, 'UTF-8');
}
-
async function readPDFFile(file) {
- const fileURL = URL.createObjectURL(file);
+ const fileURL = URL.createObjectURL(file);
try {
const loadingTask = pdfjsLib.getDocument({ url: fileURL, verbosity: 0 });
- const pdf = await loadingTask.promise;
- let fullText = '';
- const numPages = pdf.numPages;
-
+ const pdf = await loadingTask.promise; let fullText = ''; const numPages = pdf.numPages;
for (let i = 1; i <= numPages; i++) {
- const page = await pdf.getPage(i);
- const textContent = await page.getTextContent();
- fullText += textContent.items.map(item => item.str).join(' ') + '\n\n'; // Add space and paragraph break
- // Update progress in UI
+ const page = await pdf.getPage(i); const tc = await page.getTextContent();
+ fullText += tc.items.map(item => item.str).join(' ') + '\n\n';
const progress = (i / numPages) * 100;
- documentContent.innerHTML = `
-
-
-
Extracting PDF Text...
-
-
${i}/${numPages} pages
-
`;
+ documentContentEl.innerHTML = `
Extracting PDF ${i}/${numPages}...
`;
}
- setDocumentContent(fullText.trim());
- showToast(`Loaded PDF: ${numPages} pages extracted.`, 'success');
- } catch (error) {
- console.error('Error reading PDF:', error);
- showToast('Failed to extract text from PDF: ' + error.message, 'error');
- documentContent.innerHTML = '
Error reading PDF.
';
- } finally {
- URL.revokeObjectURL(fileURL);
- }
+ setDocumentContent(fullText.trim()); showToast(`Loaded PDF: ${numPages} pages.`, 'success');
+ } catch (err) { console.error('PDF error:', err); showToast('PDF extract failed: ' + err.message, 'error'); setDocumentContent('
Error reading PDF.
');}
+ finally { URL.revokeObjectURL(fileURL); }
}
-
async function readWordDocument(file) {
const reader = new FileReader();
reader.onload = async (e) => {
try {
const result = await mammoth.extractRawText({ arrayBuffer: e.target.result });
- if (result.value && result.value.trim()) {
- setDocumentContent(result.value);
- showToast(`Loaded Word document: ${file.name}`, 'success');
- } else {
- showToast('No text content found in Word document.', 'info');
- documentContent.innerHTML = '
No readable text in document.
';
- }
- if (result.messages && result.messages.length > 0) {
- console.warn('Mammoth.js messages:', result.messages);
- result.messages.forEach(msg => showToast(`Processing note: ${msg.message}`, 'info', 4000));
- }
- } catch (error) {
- console.error('Error reading Word doc:', error);
- showToast('Error processing Word document: ' + error.message, 'error');
- documentContent.innerHTML = '
Error loading Word document.
';
- }
+ if (result.value && result.value.trim()) { setDocumentContent(result.value); showToast(`Loaded Word: ${file.name}`, 'success'); }
+ else { showToast('No text in Word doc.', 'info'); setDocumentContent('
No text in doc.
'); }
+ if (result.messages?.length) console.warn('Mammoth:', result.messages);
+ } catch (err) { console.error('Word doc error:', err); showToast('Word doc error: ' + err.message, 'error'); setDocumentContent('
Error loading Word doc.
');}
};
- reader.onerror = () => { showToast('Error reading file for Word processing.', 'error'); documentContent.innerHTML = '
Error reading file.
';};
+ reader.onerror = () => { showToast('File read error for Word.', 'error'); setDocumentContent('
Error reading file.
');};
reader.readAsArrayBuffer(file);
}
-
- // Basic parsers for other formats (could be improved with dedicated libraries if needed)
- function readRTFFile(file) { /* Basic implementation */ readTextFile(file); showToast('RTF treated as text. Formatting may be lost.', 'info'); }
- function readMarkdownFile(file) { /* Basic implementation */ readTextFile(file); showToast('Markdown treated as text. Formatting may be lost.', 'info'); }
- function readJSONFile(file) { /* Basic implementation */ readTextFile(file); showToast('JSON treated as text.', 'info'); }
- function readHTMLFile(file) {
+ function readHTMLFile(file) {
const reader = new FileReader();
reader.onload = (e) => {
- const tempDiv = document.createElement('div');
- tempDiv.innerHTML = e.target.result;
- // Remove script and style tags
+ const tempDiv = document.createElement('div'); tempDiv.innerHTML = e.target.result;
tempDiv.querySelectorAll('script, style, link[rel="stylesheet"]').forEach(el => el.remove());
- let text = tempDiv.textContent || tempDiv.innerText || "";
- text = text.replace(/\s\s+/g, ' ').replace(/\n\s*\n/g, '\n\n').trim(); // Clean up whitespace
- setDocumentContent(text);
- showToast(`Loaded HTML file: ${file.name}`, 'success');
+ let text = (tempDiv.textContent || tempDiv.innerText || "").replace(/\s\s+/g, ' ').replace(/\n\s*\n/g, '\n\n').trim();
+ setDocumentContent(text); showToast(`Loaded HTML: ${file.name}`, 'success');
};
- reader.onerror = () => { showToast('Error reading HTML file', 'error'); documentContent.innerHTML = '
Error reading file.
';};
+ reader.onerror = () => { showToast('Error reading HTML', 'error'); setDocumentContent('
Error reading file.
');};
reader.readAsText(file);
}
- function readXMLFile(file) { /* Basic implementation */ readTextFile(file); showToast('XML treated as text.', 'info'); }
- function readCSVFile(file) { /* Basic implementation */ readTextFile(file); showToast('CSV treated as text.', 'info'); }
-
-
- function setDocumentContent(text) {
- currentText = text.trim(); // Store trimmed text
- // Split by newlines and create
tags. Ensure non-empty paragraphs for spacing.
- documentContent.innerHTML = currentText.split(/\r\n|\r|\n/)
- .map(line => `
${line.trim() || ' '}
`) // Use for empty lines to maintain height
- .join('');
- charCount.textContent = `${currentText.length.toLocaleString()} characters`;
-
- if (currentText.length > 1000000) { // Google's general recommendation for single requests
- showToast('Warning: Document is very long (>1M chars). Consider splitting for best performance.', 'info', 5000);
- }
- updateCostEstimator();
- updateSynthesisStatus();
- stopPlayback(); // Reset playback state for new content
- }
-
- // --- Playback Controls & State Management ---
- async function handlePlayPause() {
- if (!initAudioContext()) { showToast("Audio system not ready.", "error"); return; }
-
- if (isPlaying && !isPaused) { // Currently playing, so pause
- pausePlayback();
- } else if (isPaused) { // Currently paused, so resume
- resumePlayback();
- } else { // Not playing, so start
- await startFullPlayback();
- }
- }
-
-
- async function startFullPlayback() {
- if (!selectedVoice) { showToast('Please select a voice first.', 'info'); return; }
- if (!currentText.trim()) { showToast('No document content to play.', 'info'); return; }
- if (isPlaying) stopPlayback(); // Stop if somehow already playing
-
- playBtn.innerHTML = '
'; // Show loading in play button
- playBtn.disabled = true;
-
- try {
- showToast('Synthesizing speech...', 'info', 1500);
- const chunks = splitTextIntoChunks(currentText, MAX_CHUNK_SIZE);
- if (chunks.length === 0) {
- showToast('Nothing to synthesize.', 'info');
- return;
- }
-
- showSynthesisProgress(0); // Show 0% initially
- const allAudioBase64Data = [];
- for (let i = 0; i < chunks.length; i++) {
- if (isPaused || !isPlaying) { // Check if stop was called during synthesis
- console.log("Synthesis interrupted by stop/pause.");
- hideSynthesisProgress();
- updatePlaybackButtons(); // Reset play button
- return; // Exit synthesis loop
- }
- const chunkText = chunks[i];
- const audioData = await synthesizeSpeech(
- chunkText, selectedVoice,
- parseFloat(rateSelect.value), parseFloat(pitchSelect.value),
- modelSelect.value
- );
- allAudioBase64Data.push(audioData);
- showSynthesisProgress(((i + 1) / chunks.length) * 100); // Update progress
- }
- hideSynthesisProgress();
-
- if (allAudioBase64Data.length > 0) {
- if (allAudioBase64Data.length === 1) {
- await playSingleAudioChunk(allAudioBase64Data[0]);
- } else {
- await playCombinedAudioChunks(allAudioBase64Data);
- }
- } else if (isPlaying) { // If synthesis produced nothing but was "playing"
- stopPlayback(); // Clean up state
- showToast('Synthesis completed but no audio data was generated.', 'info');
- }
-
- } catch (error) {
- hideSynthesisProgress();
- console.error('Error during full playback:', error);
- showToast('Playback error: ' + error.message, 'error', 5000);
- stopPlayback(); // Ensure UI and state are reset
- } finally {
- // updatePlaybackButtons will be called by playSingle/Combined or stopPlayback
- // but ensure playBtn is re-enabled if it was disabled and no playback started
- if (playBtn.disabled && !isPlaying) {
- updatePlaybackButtons();
+ function splitTextIntoChunks(text, chunkSize) {
+ const sentences = text.match(/[^.!?]+[.!?]+(\s|$)/g) || [text];
+ const chunks = []; let currentChunk = "";
+ for (const sentence of sentences) {
+ if (currentChunk.length + sentence.length <= chunkSize) currentChunk += sentence;
+ else {
+ if (currentChunk.trim()) chunks.push(currentChunk.trim());
+ if (sentence.length > chunkSize) { for (let i = 0; i < sentence.length; i += chunkSize) chunks.push(sentence.substring(i, i + chunkSize)); }
+ else currentChunk = sentence;
}
}
+ if (currentChunk.trim()) chunks.push(currentChunk.trim());
+ return chunks.filter(c => c.length > 0);
}
-
-
- function pausePlayback() {
- if (!audioContext || !audioSource || !isPlaying || isPaused) return;
- audioContext.suspend().then(() => { // Gracefully suspend context
- pausedAtTime += audioContext.currentTime - playbackStartTime; // Accumulate played duration before pause
- isPaused = true;
- if (animationFrameId) cancelAnimationFrame(animationFrameId);
- updatePlaybackButtons();
- showToast('Playback paused.', 'info', 1500);
- });
- }
-
- function resumePlayback() {
- if (!audioContext || !audioSource || !isPlaying || !isPaused) return;
- audioContext.resume().then(() => {
- playbackStartTime = audioContext.currentTime; // Reset start time for elapsed calculation
- isPaused = false;
- startProgressTracking(); // Restart UI updates
- updatePlaybackButtons();
- showToast('Playback resumed.', 'info', 1500);
- });
- }
-
- function stopPlayback() {
- if (audioSource) {
- audioSource.onended = null; // Remove onended to prevent recursion if stop is called from onended
- try { audioSource.stop(); } catch (e) { /* ignore if already stopped */ }
- audioSource = null;
- }
- if (animationFrameId) {
- cancelAnimationFrame(animationFrameId);
- animationFrameId = null;
+ function handleParagraphClickForBookmark(event) {
+ const pElement = event.target.closest('p[data-paragraph-index]');
+ if (!pElement) return;
+ const paragraphIndex = parseInt(pElement.dataset.paragraphIndex, 10);
+ if (isNaN(paragraphIndex)) return;
+ const iconEl = pElement.querySelector('.bookmark-icon i');
+ if (currentBookmarks.includes(paragraphIndex)) {
+ currentBookmarks = currentBookmarks.filter(idx => idx !== paragraphIndex);
+ pElement.classList.remove('bookmarked');
+ if (iconEl) iconEl.className = 'fas fa-bookmark-o';
+ showToast('Bookmark removed.', 'info', 1500);
+ } else {
+ currentBookmarks.push(paragraphIndex); currentBookmarks.sort((a,b) => a-b);
+ pElement.classList.add('bookmarked');
+ if (iconEl) iconEl.className = 'fas fa-bookmark';
+ showToast('Bookmark added.', 'success', 1500);
}
-
- isPlaying = false;
- isPaused = false;
- playbackStartTime = 0;
- pausedAtTime = 0;
- // currentAudioBlob = null; // Keep blob for download unless new content is loaded
- audioBuffer = null; // Clear decoded buffer
-
- updatePlaybackButtons();
- updateProgressUI(0, 0); // Reset progress bar and times
- documentContent.querySelectorAll('.reading-highlight').forEach(el => el.classList.remove('reading-highlight'));
- currentReadingPosition = 0;
- readingProgress.style.width = '0%';
-
- // showToast('Playback stopped.', 'info', 1500); // Can be a bit noisy
}
-
- function updatePlaybackButtons() {
- if (isPlaying && !isPaused) { // Playing
- playBtn.innerHTML = '
';
- playBtn.title = "Pause";
- playBtn.disabled = false;
- pauseBtn.disabled = false; // This button is now part of playBtn logic
- stopBtn.disabled = false;
- } else if (isPaused) { // Paused
- playBtn.innerHTML = '
';
- playBtn.title = "Resume";
- playBtn.disabled = false;
- pauseBtn.disabled = true;
- stopBtn.disabled = false;
- } else { // Stopped or initial state
- playBtn.innerHTML = '
';
- playBtn.title = "Play";
- playBtn.disabled = !currentText.trim() || !selectedVoice; // Disabled if no text/voice
- pauseBtn.disabled = true;
- stopBtn.disabled = true;
- }
- downloadBtn.disabled = !currentAudioBlob;
+ function getLibrary() { try { const libJson = localStorage.getItem(LIBRARY_STORAGE_KEY); return libJson ? JSON.parse(libJson) : []; } catch (e) { console.error("Lib parse error:", e); localStorage.removeItem(LIBRARY_STORAGE_KEY); return []; } }
+ function saveLibrary(library) { localStorage.setItem(LIBRARY_STORAGE_KEY, JSON.stringify(library)); }
+ function handleSaveToLibrary() {
+ if (!currentText.trim()) { showToast('Nothing to save.', 'info'); return; }
+ const library = getLibrary();
+ const title = currentText.substring(0, 50).replace(/\s+/g, ' ').trim() + (currentText.length > 50 ? '...' : '');
+ const newEntry = {
+ id: `doc-${new Date().getTime()}-${Math.random().toString(36).substr(2, 5)}`, title: title, text: currentText,
+ notes: notesTextarea.value, bookmarks: [...currentBookmarks],
+ voiceSettings: {
+ voiceName: selectedVoiceObject ? selectedVoiceObject.name : null,
+ languageCode: languageSelect.value, // Save current language selection
+ rate: parseFloat(rateSelect.value), pitch: parseFloat(pitchSelect.value), model: modelSelect.value
+ }, savedAt: new Date().toISOString()
+ };
+ library.unshift(newEntry); saveLibrary(library);
+ showToast(`"${title}" saved!`, 'success'); renderLibraryItems();
}
-
-
- function splitTextIntoChunks(text, chunkSize) {
- const sentences = text.match(/[^.!?]+[.!?]+(\s|$)/g) || [text]; // Split by sentences, fallback to whole text
- const chunks = [];
- let currentChunk = "";
-
- for (const sentence of sentences) {
- if (currentChunk.length + sentence.length <= chunkSize) {
- currentChunk += sentence;
- } else {
- // If currentChunk is not empty, push it
- if (currentChunk.trim()) chunks.push(currentChunk.trim());
- // If sentence itself is larger than chunkSize, split it hard
- if (sentence.length > chunkSize) {
- for (let i = 0; i < sentence.length; i += chunkSize) {
- chunks.push(sentence.substring(i, i + chunkSize));
- }
- } else {
- currentChunk = sentence; // Start new chunk with this sentence
- }
- }
- }
- if (currentChunk.trim()) chunks.push(currentChunk.trim()); // Push the last chunk
- return chunks.filter(chunk => chunk.length > 0); // Ensure no empty chunks
+ function renderLibraryItems() {
+ const library = getLibrary(); libraryItemsUl.innerHTML = '';
+ if (library.length === 0) { libraryItemsUl.innerHTML = '
Library is empty.
'; return; }
+ library.forEach(item => {
+ const li = document.createElement('li');
+ li.className = 'flex justify-between items-center py-3 px-2 hover:bg-gray-100 dark:hover:bg-gray-700 rounded';
+ const titleDiv = document.createElement('div'); titleDiv.className = 'flex-grow cursor-pointer';
+ titleDiv.innerHTML = `
${item.title}${new Date(item.savedAt).toLocaleString()}`;
+ titleDiv.onclick = () => loadFromLibrary(item.id);
+ const delBtn = document.createElement('button');
+ delBtn.innerHTML = '
';
+ delBtn.className = 'p-2 rounded hover:bg-red-100 dark:hover:bg-red-800'; delBtn.title = "Delete";
+ delBtn.onclick = (e) => { e.stopPropagation(); if (confirm(`Delete "${item.title}"?`)) deleteFromLibrary(item.id); };
+ li.appendChild(titleDiv); li.appendChild(delBtn); libraryItemsUl.appendChild(li);
+ });
}
+ function deleteFromLibrary(itemId) { let lib = getLibrary(); lib = lib.filter(i => i.id !== itemId); saveLibrary(lib); renderLibraryItems(); showToast('Item deleted.', 'success'); }
+ function openLibraryModal() { renderLibraryItems(); libraryModal.style.display = 'block'; const firstItem = libraryItemsUl.querySelector('li div[onclick]'); if (firstItem) firstItem.focus(); else closeLibraryModalBtn.focus(); }
+ function closeLibraryModal() { libraryModal.style.display = 'none'; }
- // --- Initialize the application ---
- init();
});