File size: 2,198 Bytes
67c46fd
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
import os
import argparse
from pathlib import Path

from funasr_detach.utils.types import str2bool


def main():
    parser = argparse.ArgumentParser()
    parser.add_argument("--model-name", type=str, required=True)
    parser.add_argument("--export-dir", type=str, required=True)
    parser.add_argument(
        "--export", type=str2bool, default=True, help="whether to export model"
    )
    parser.add_argument("--type", type=str, default="onnx", help='["onnx", "torch"]')
    parser.add_argument("--device", type=str, default="cpu", help='["cpu", "cuda"]')
    parser.add_argument(
        "--quantize", type=str2bool, default=False, help="export quantized model"
    )
    parser.add_argument(
        "--fallback-num", type=int, default=0, help="amp fallback number"
    )
    parser.add_argument("--audio_in", type=str, default=None, help='["wav", "wav.scp"]')
    parser.add_argument(
        "--model_revision", type=str, default=None, help="model_revision"
    )
    parser.add_argument("--calib_num", type=int, default=200, help="calib max num")
    args = parser.parse_args()

    model_dir = args.model_name
    if not Path(args.model_name).exists():
        from modelscope.hub.snapshot_download import snapshot_download

        try:
            model_dir = snapshot_download(
                args.model_name, cache_dir=args.export_dir, revision=args.model_revision
            )
        except:
            raise "model_dir must be model_name in modelscope or local path downloaded from modelscope, but is {}".format(
                model_dir
            )
    if args.export:
        model_file = os.path.join(model_dir, "model.onnx")
        if args.quantize:
            model_file = os.path.join(model_dir, "model_quant.onnx")
        if not os.path.exists(model_file):
            print(".onnx is not exist, begin to export onnx")
            from funasr_detach.bin.export_model import ModelExport

            export_model = ModelExport(
                cache_dir=args.export_dir,
                onnx=True,
                device="cpu",
                quant=args.quantize,
            )
            export_model.export(model_dir)


if __name__ == "__main__":
    main()