AnasPwnapple commited on
Commit
99890d7
1 Parent(s): 3bdfcbb
Files changed (5) hide show
  1. .gitignore +1 -0
  2. README.md +4 -4
  3. app.py +36 -0
  4. conf/gradio-asr.yaml +28 -0
  5. requirements.txt +4 -0
.gitignore ADDED
@@ -0,0 +1 @@
 
 
1
+ .idea
README.md CHANGED
@@ -1,8 +1,8 @@
1
  ---
2
- title: Latest Demo
3
- emoji: 🐨
4
- colorFrom: pink
5
- colorTo: green
6
  sdk: gradio
7
  sdk_version: 3.0.5
8
  app_file: app.py
 
1
  ---
2
+ title: Tarteel AI's - Latest Model Demo
3
+ emoji: 🤗
4
+ colorFrom: green
5
+ colorTo: blue
6
  sdk: gradio
7
  sdk_version: 3.0.5
8
  app_file: app.py
app.py ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from gradio import mix
3
+ from gradio import inputs, outputs # noqa: F401
4
+ import hydra
5
+ from omegaconf import DictConfig
6
+
7
+ from models_evaluation.models import BaseModel
8
+
9
+
10
+ @hydra.main(version_base=None, config_path="conf", config_name="gradio-asr.yaml")
11
+ def main(cfg: DictConfig):
12
+ audio_input = gr.inputs.Audio(source=cfg.gradio.source, type="file", label="Audio")
13
+
14
+ default_kwargs = {
15
+ "theme": "dark-huggingface",
16
+ "allow_flagging": "manual",
17
+ "title": cfg.gradio.title,
18
+ }
19
+
20
+ all_io = []
21
+ models = []
22
+ for model_cfg in cfg.models:
23
+ model = hydra.utils.instantiate(model_cfg) # type: BaseModel
24
+ model_cfg = model_cfg.cfg
25
+ models.append(model)
26
+ io = gr.Interface(
27
+ model.transcribe, inputs=audio_input, outputs=gr.outputs.JSON(label=model_cfg.name)
28
+ )
29
+ all_io.append(io)
30
+
31
+ parallel = mix.Parallel(*all_io, **default_kwargs)
32
+ parallel.launch(share=True, debug=True)
33
+
34
+
35
+ if __name__ == "__main__":
36
+ main()
conf/gradio-asr.yaml ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ sample_rate: &sample_rate 16000
2
+ device: &device "cuda"
3
+
4
+ gradio:
5
+ source: "upload" # 'upload' or 'microphone'
6
+ title: "CitriNet(Lg) Un/Diacritized Comparison"
7
+
8
+ models:
9
+ - _target_: "models_evaluation.models.citrinet.CitrinetTorch"
10
+ cfg:
11
+ model_target: "nemo.collections.asr.models.EncDecCTCModelBPE"
12
+ name: "CnLg_SpeUni1024_DI_EATL600"
13
+ sample_rate: *sample_rate
14
+ device: *device
15
+ model_path: "wandb://tarteel/nemo-experiments/CnLg_SpeUni1024_DI_EATL600:v1"
16
+
17
+ - _target_: "models_evaluation.models.citrinet.CitrinetTorch"
18
+ cfg:
19
+ model_target: "nemo.collections.asr.models.EncDecCTCModelBPE"
20
+ name: "CnLg-SpeUni256-EATL1300"
21
+ sample_rate: *sample_rate
22
+ device: *device
23
+ model_path: "wandb://tarteel/nemo-experiments/CnLg-SpeUni256-EATL1300:v1"
24
+
25
+
26
+ hydra:
27
+ run:
28
+ dir: ..
requirements.txt ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ nemo_toolkit[asr]==1.7.2
2
+ hydra-core
3
+ wandb
4
+ git+https://[email protected]/Tarteel-io/models_evaluation.git@main#egg=models_evaluation[nemo]