ayymen commited on
Commit
86a1270
·
1 Parent(s): d58228b

Initial commit

Browse files
Files changed (2) hide show
  1. best_model.pth +3 -0
  2. config.json +261 -0
best_model.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f2443bf952ed3717da8b8e9304cb2355fa6257ef38cc23d7c36f83ee19ea0632
3
+ size 997700278
config.json ADDED
@@ -0,0 +1,261 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "output_path": "/home/aymen/Tamazight-NLP/Speech/coqui_tts",
3
+ "logger_uri": null,
4
+ "run_name": "tashelhit_bible",
5
+ "project_name": null,
6
+ "run_description": "\ud83d\udc38Coqui trainer run.",
7
+ "print_step": 25,
8
+ "plot_step": 100,
9
+ "model_param_stats": false,
10
+ "wandb_entity": null,
11
+ "dashboard_logger": "tensorboard",
12
+ "save_on_interrupt": true,
13
+ "log_model_step": null,
14
+ "save_step": 10000,
15
+ "save_n_checkpoints": 5,
16
+ "save_checkpoints": true,
17
+ "save_all_best": true,
18
+ "save_best_after": 1000,
19
+ "target_loss": null,
20
+ "print_eval": false,
21
+ "test_delay_epochs": 0,
22
+ "run_eval": true,
23
+ "run_eval_steps": null,
24
+ "distributed_backend": "nccl",
25
+ "distributed_url": "tcp://localhost:54321",
26
+ "mixed_precision": true,
27
+ "precision": "fp16",
28
+ "epochs": 1000,
29
+ "batch_size": 16,
30
+ "eval_batch_size": 4,
31
+ "grad_clip": [
32
+ 1000.0,
33
+ 1000.0
34
+ ],
35
+ "scheduler_after_epoch": true,
36
+ "lr": 0.001,
37
+ "optimizer": "AdamW",
38
+ "optimizer_params": {
39
+ "betas": [
40
+ 0.8,
41
+ 0.99
42
+ ],
43
+ "eps": 1e-09,
44
+ "weight_decay": 0.01
45
+ },
46
+ "lr_scheduler": null,
47
+ "lr_scheduler_params": {},
48
+ "use_grad_scaler": false,
49
+ "allow_tf32": false,
50
+ "cudnn_enable": true,
51
+ "cudnn_deterministic": false,
52
+ "cudnn_benchmark": false,
53
+ "training_seed": 54321,
54
+ "model": "vits",
55
+ "num_loader_workers": 4,
56
+ "num_eval_loader_workers": 4,
57
+ "use_noise_augment": false,
58
+ "audio": {
59
+ "fft_size": 1024,
60
+ "sample_rate": 16000,
61
+ "win_length": 1024,
62
+ "hop_length": 256,
63
+ "num_mels": 80,
64
+ "mel_fmin": 0,
65
+ "mel_fmax": null
66
+ },
67
+ "use_phonemes": false,
68
+ "phonemizer": null,
69
+ "phoneme_language": null,
70
+ "compute_input_seq_cache": true,
71
+ "text_cleaner": "no_cleaners",
72
+ "enable_eos_bos_chars": false,
73
+ "test_sentences_file": "",
74
+ "phoneme_cache_path": null,
75
+ "characters": {
76
+ "characters_class": "TTS.tts.models.vits.VitsCharacters",
77
+ "vocab_dict": null,
78
+ "pad": "<PAD>",
79
+ "eos": "<EOS>",
80
+ "bos": "<BOS>",
81
+ "blank": "<BLNK>",
82
+ "characters": "\u2d30\u2d31\u2d33\u2d37\u2d39\u2d3b\u2d3c\u2d3d\u2d40\u2d43\u2d44\u2d45\u2d47\u2d49\u2d4a\u2d4d\u2d4e\u2d4f\u2d53\u2d54\u2d55\u2d56\u2d59\u2d5a\u2d5b\u2d5c\u2d5f\u2d61\u2d62\u2d63\u2d65\u2d6f",
83
+ "punctuations": " ",
84
+ "phonemes": null,
85
+ "is_unique": true,
86
+ "is_sorted": true
87
+ },
88
+ "add_blank": true,
89
+ "batch_group_size": 0,
90
+ "loss_masking": null,
91
+ "min_audio_len": 0,
92
+ "max_audio_len": Infinity,
93
+ "min_text_len": 1,
94
+ "max_text_len": 250,
95
+ "compute_f0": false,
96
+ "compute_energy": false,
97
+ "compute_linear_spec": true,
98
+ "precompute_num_workers": 0,
99
+ "start_by_longest": false,
100
+ "shuffle": false,
101
+ "drop_last": false,
102
+ "datasets": [
103
+ {
104
+ "formatter": "nemo",
105
+ "dataset_name": "",
106
+ "path": "/home/aymen/Tamazight-NLP/Speech/media_ipsapps/shi_tls/hq_segmented/manifests/",
107
+ "meta_file_train": "manifest.json",
108
+ "ignored_speakers": null,
109
+ "language": "",
110
+ "phonemizer": "",
111
+ "meta_file_val": "",
112
+ "meta_file_attn_mask": ""
113
+ }
114
+ ],
115
+ "test_sentences": [
116
+ [
117
+ "\u2d30\u2d63\u2d53\u2d4d"
118
+ ],
119
+ [
120
+ "\u2d33\u2d4f \u2d30\u2d37 \u2d30\u2d3d \u2d49\u2d59\u2d59\u2d33\u2d4f \u2d55\u2d31\u2d31\u2d49 \u2d49\u2d5c\u2d5c\u2d53 \u2d3d"
121
+ ]
122
+ ],
123
+ "eval_split_max_size": null,
124
+ "eval_split_size": 0.01,
125
+ "use_speaker_weighted_sampler": false,
126
+ "speaker_weighted_sampler_alpha": 1.0,
127
+ "use_language_weighted_sampler": false,
128
+ "language_weighted_sampler_alpha": 1.0,
129
+ "use_length_weighted_sampler": false,
130
+ "length_weighted_sampler_alpha": 1.0,
131
+ "model_args": {
132
+ "num_chars": 35,
133
+ "out_channels": 513,
134
+ "spec_segment_size": 32,
135
+ "hidden_channels": 192,
136
+ "hidden_channels_ffn_text_encoder": 768,
137
+ "num_heads_text_encoder": 2,
138
+ "num_layers_text_encoder": 6,
139
+ "kernel_size_text_encoder": 3,
140
+ "dropout_p_text_encoder": 0.1,
141
+ "dropout_p_duration_predictor": 0.5,
142
+ "kernel_size_posterior_encoder": 5,
143
+ "dilation_rate_posterior_encoder": 1,
144
+ "num_layers_posterior_encoder": 16,
145
+ "kernel_size_flow": 5,
146
+ "dilation_rate_flow": 1,
147
+ "num_layers_flow": 4,
148
+ "resblock_type_decoder": "1",
149
+ "resblock_kernel_sizes_decoder": [
150
+ 3,
151
+ 7,
152
+ 11
153
+ ],
154
+ "resblock_dilation_sizes_decoder": [
155
+ [
156
+ 1,
157
+ 3,
158
+ 5
159
+ ],
160
+ [
161
+ 1,
162
+ 3,
163
+ 5
164
+ ],
165
+ [
166
+ 1,
167
+ 3,
168
+ 5
169
+ ]
170
+ ],
171
+ "upsample_rates_decoder": [
172
+ 8,
173
+ 8,
174
+ 2,
175
+ 2
176
+ ],
177
+ "upsample_initial_channel_decoder": 512,
178
+ "upsample_kernel_sizes_decoder": [
179
+ 16,
180
+ 16,
181
+ 4,
182
+ 4
183
+ ],
184
+ "periods_multi_period_discriminator": [
185
+ 2,
186
+ 3,
187
+ 5,
188
+ 7,
189
+ 11
190
+ ],
191
+ "use_sdp": true,
192
+ "noise_scale": 1.0,
193
+ "inference_noise_scale": 0.667,
194
+ "length_scale": 1.0,
195
+ "noise_scale_dp": 1.0,
196
+ "inference_noise_scale_dp": 1.0,
197
+ "max_inference_len": null,
198
+ "init_discriminator": true,
199
+ "use_spectral_norm_disriminator": false,
200
+ "use_speaker_embedding": false,
201
+ "num_speakers": 0,
202
+ "speakers_file": null,
203
+ "d_vector_file": null,
204
+ "speaker_embedding_channels": 256,
205
+ "use_d_vector_file": false,
206
+ "d_vector_dim": 0,
207
+ "detach_dp_input": true,
208
+ "use_language_embedding": false,
209
+ "embedded_language_dim": 4,
210
+ "num_languages": 0,
211
+ "language_ids_file": null,
212
+ "use_speaker_encoder_as_loss": false,
213
+ "speaker_encoder_config_path": "",
214
+ "speaker_encoder_model_path": "",
215
+ "condition_dp_on_speaker": true,
216
+ "freeze_encoder": false,
217
+ "freeze_DP": false,
218
+ "freeze_PE": false,
219
+ "freeze_flow_decoder": false,
220
+ "freeze_waveform_decoder": false,
221
+ "encoder_sample_rate": null,
222
+ "interpolate_z": true,
223
+ "reinit_DP": false,
224
+ "reinit_text_encoder": false
225
+ },
226
+ "lr_gen": 0.0002,
227
+ "lr_disc": 0.0002,
228
+ "lr_scheduler_gen": "ExponentialLR",
229
+ "lr_scheduler_gen_params": {
230
+ "gamma": 0.999875,
231
+ "last_epoch": -1
232
+ },
233
+ "lr_scheduler_disc": "ExponentialLR",
234
+ "lr_scheduler_disc_params": {
235
+ "gamma": 0.999875,
236
+ "last_epoch": -1
237
+ },
238
+ "kl_loss_alpha": 1.0,
239
+ "disc_loss_alpha": 1.0,
240
+ "gen_loss_alpha": 1.0,
241
+ "feat_loss_alpha": 1.0,
242
+ "mel_loss_alpha": 45.0,
243
+ "dur_loss_alpha": 1.0,
244
+ "speaker_encoder_loss_alpha": 1.0,
245
+ "return_wav": true,
246
+ "use_weighted_sampler": false,
247
+ "weighted_sampler_attrs": {},
248
+ "weighted_sampler_multipliers": {},
249
+ "r": 1,
250
+ "num_speakers": 0,
251
+ "use_speaker_embedding": false,
252
+ "speakers_file": null,
253
+ "speaker_embedding_channels": 256,
254
+ "language_ids_file": null,
255
+ "use_language_embedding": false,
256
+ "use_d_vector_file": false,
257
+ "d_vector_file": null,
258
+ "d_vector_dim": 0,
259
+ "restore_path": "/home/aymen/Tamazight-NLP/Speech/coqui_tts/tashelhit_bible-November-17-2024_01+37PM-0000000/checkpoint_28947.pth",
260
+ "github_branch": "inside_docker"
261
+ }