Update app.py
Browse files
app.py
CHANGED
@@ -62,7 +62,7 @@ fix_duration = None
|
|
62 |
|
63 |
|
64 |
def load_model(page_name, repo_name, exp_name, model_cls, model_cfg, ckpt_step):
|
65 |
-
ckpt_path = str(cached_path(f"hf://{page_name}/{repo_name}/{exp_name}/model_{ckpt_step}.
|
66 |
# ckpt_path = f"ckpts/{exp_name}/model_{ckpt_step}.pt" # .pt | .safetensors
|
67 |
vocab_char_map, vocab_size = get_tokenizer("Emilia_ZH_EN", "pinyin")
|
68 |
model = CFM(
|
@@ -92,7 +92,7 @@ F5TTS_model_cfg = dict(
|
|
92 |
E2TTS_model_cfg = dict(dim=1024, depth=24, heads=16, ff_mult=4)
|
93 |
|
94 |
F5TTS_ema_model = load_model(
|
95 |
-
"Gregniuki", "F5-tts_English_German_Polish", "Polish", DiT, F5TTS_model_cfg,
|
96 |
)
|
97 |
#E2TTS_ema_model = load_model(
|
98 |
# "SWivid", "F5-TTS", "E2TTS_Base", UNetT, E2TTS_model_cfg, 1200000
|
|
|
62 |
|
63 |
|
64 |
def load_model(page_name, repo_name, exp_name, model_cls, model_cfg, ckpt_step):
|
65 |
+
ckpt_path = str(cached_path(f"hf://{page_name}/{repo_name}/{exp_name}/model_{ckpt_step}.safetensors"))
|
66 |
# ckpt_path = f"ckpts/{exp_name}/model_{ckpt_step}.pt" # .pt | .safetensors
|
67 |
vocab_char_map, vocab_size = get_tokenizer("Emilia_ZH_EN", "pinyin")
|
68 |
model = CFM(
|
|
|
92 |
E2TTS_model_cfg = dict(dim=1024, depth=24, heads=16, ff_mult=4)
|
93 |
|
94 |
F5TTS_ema_model = load_model(
|
95 |
+
"Gregniuki", "F5-tts_English_German_Polish", "Polish", DiT, F5TTS_model_cfg, 270000
|
96 |
)
|
97 |
#E2TTS_ema_model = load_model(
|
98 |
# "SWivid", "F5-TTS", "E2TTS_Base", UNetT, E2TTS_model_cfg, 1200000
|