M4xjunior commited on
Commit
64bfc26
·
verified ·
1 Parent(s): 6880a48

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -1
app.py CHANGED
@@ -10,9 +10,10 @@ CHAR_LIMIT = None if IS_DUPLICATE else 5000
10
 
11
  CUDA_AVAILABLE = torch.cuda.is_available()
12
  models = {gpu: KModel().to('cuda' if gpu else 'cpu').eval() for gpu in [False] + ([True] if CUDA_AVAILABLE else [])}
13
- pipelines = {lang_code: KPipeline(lang_code=lang_code, model=False) for lang_code in 'ab'}
14
  pipelines['a'].g2p.lexicon.golds['kokoro'] = 'kˈOkəɹO'
15
  pipelines['b'].g2p.lexicon.golds['kokoro'] = 'kˈQkəɹQ'
 
16
 
17
  @spaces.GPU(duration=30)
18
  def forward_gpu(ps, ref_s, speed):
@@ -118,6 +119,9 @@ CHOICES = {
118
  '🇬🇧 🚹 Fable': 'bm_fable',
119
  '🇬🇧 🚹 Lewis': 'bm_lewis',
120
  '🇬🇧 🚹 Daniel': 'bm_daniel',
 
 
 
121
  }
122
  for v in CHOICES.values():
123
  pipelines[v[0]].load_voice(v)
 
10
 
11
  CUDA_AVAILABLE = torch.cuda.is_available()
12
  models = {gpu: KModel().to('cuda' if gpu else 'cpu').eval() for gpu in [False] + ([True] if CUDA_AVAILABLE else [])}
13
+ pipelines = {lang_code: KPipeline(lang_code=lang_code, model=False) for lang_code in 'abp'}
14
  pipelines['a'].g2p.lexicon.golds['kokoro'] = 'kˈOkəɹO'
15
  pipelines['b'].g2p.lexicon.golds['kokoro'] = 'kˈQkəɹQ'
16
+ pipelines['p'].g2p.lexicon.golds['kokoro'] = 'kˈQkəɹQ'
17
 
18
  @spaces.GPU(duration=30)
19
  def forward_gpu(ps, ref_s, speed):
 
119
  '🇬🇧 🚹 Fable': 'bm_fable',
120
  '🇬🇧 🚹 Lewis': 'bm_lewis',
121
  '🇬🇧 🚹 Daniel': 'bm_daniel',
122
+ '🇧🇷 🚹 Alex': 'pm_alex',
123
+ '🇧🇷 🚹 Santana': 'pm_santa',
124
+ '🇧🇷 🚺 Dora': 'pf_dora',
125
  }
126
  for v in CHOICES.values():
127
  pipelines[v[0]].load_voice(v)