import os import traceback from collections import OrderedDict from time import time import torch from i18n.i18n import I18nAuto from infer.modules.vc import model_hash_ckpt, hash_id i18n = I18nAuto() # add author sign def save_small_model(ckpt, sr, if_f0, name, epoch, version, hps): try: opt = OrderedDict() opt["weight"] = {} for key in ckpt.keys(): if "enc_q" in key: continue opt["weight"][key] = ckpt[key].half() opt["config"] = [ hps.data.filter_length // 2 + 1, 32, hps.model.inter_channels, hps.model.hidden_channels, hps.model.filter_channels, hps.model.n_heads, hps.model.n_layers, hps.model.kernel_size, hps.model.p_dropout, hps.model.resblock, hps.model.resblock_kernel_sizes, hps.model.resblock_dilation_sizes, hps.model.upsample_rates, hps.model.upsample_initial_channel, hps.model.upsample_kernel_sizes, hps.model.spk_embed_dim, hps.model.gin_channels, hps.data.sampling_rate, ] opt["info"] = "%sepoch" % epoch opt["name"] = name opt["timestamp"] = int(time()) if hps.author: opt["author"] = hps.author opt["sr"] = sr opt["f0"] = if_f0 opt["version"] = version h = model_hash_ckpt(opt) opt["hash"] = h opt["id"] = hash_id(h) torch.save(opt, "assets/weights/%s.pth" % name) return "Success." except: return traceback.format_exc() def extract_small_model(path, name, author, sr, if_f0, info, version): try: ckpt = torch.load(path, map_location="cpu") if "model" in ckpt: ckpt = ckpt["model"] opt = OrderedDict() opt["weight"] = {} for key in ckpt.keys(): if "enc_q" in key: continue opt["weight"][key] = ckpt[key].half() if sr == "40k": opt["config"] = [ 1025, 32, 192, 192, 768, 2, 6, 3, 0, "1", [3, 7, 11], [[1, 3, 5], [1, 3, 5], [1, 3, 5]], [10, 10, 2, 2], 512, [16, 16, 4, 4], 109, 256, 40000, ] elif sr == "48k": if version == "v1": opt["config"] = [ 1025, 32, 192, 192, 768, 2, 6, 3, 0, "1", [3, 7, 11], [[1, 3, 5], [1, 3, 5], [1, 3, 5]], [10, 6, 2, 2, 2], 512, [16, 16, 4, 4, 4], 109, 256, 48000, ] else: opt["config"] = [ 1025, 32, 192, 192, 768, 2, 6, 3, 0, "1", [3, 7, 11], [[1, 3, 5], [1, 3, 5], [1, 3, 5]], [12, 10, 2, 2], 512, [24, 20, 4, 4], 109, 256, 48000, ] elif sr == "32k": if version == "v1": opt["config"] = [ 513, 32, 192, 192, 768, 2, 6, 3, 0, "1", [3, 7, 11], [[1, 3, 5], [1, 3, 5], [1, 3, 5]], [10, 4, 2, 2, 2], 512, [16, 16, 4, 4, 4], 109, 256, 32000, ] else: opt["config"] = [ 513, 32, 192, 192, 768, 2, 6, 3, 0, "1", [3, 7, 11], [[1, 3, 5], [1, 3, 5], [1, 3, 5]], [10, 8, 2, 2], 512, [20, 16, 4, 4], 109, 256, 32000, ] if info == "": info = "Extracted model." opt["info"] = info opt["name"] = name opt["timestamp"] = int(time()) if author: opt["author"] = author opt["version"] = version opt["sr"] = sr opt["f0"] = int(if_f0) h = model_hash_ckpt(opt) opt["hash"] = h opt["id"] = hash_id(h) torch.save(opt, "assets/weights/%s.pth" % name) return "Success." except: return traceback.format_exc() def change_info(path, info, name): try: ckpt = torch.load(path, map_location="cpu") ckpt["info"] = info if name == "": name = os.path.basename(path) torch.save(ckpt, "assets/weights/%s" % name) return "Success." except: return traceback.format_exc() def merge(path1, path2, alpha1, sr, f0, info, name, version): try: def extract(ckpt): a = ckpt["model"] opt = OrderedDict() opt["weight"] = {} for key in a.keys(): if "enc_q" in key: continue opt["weight"][key] = a[key] return opt def authors(c1, c2): a1, a2 = c1.get("author", ""), c2.get("author", "") if a1 == a2: return a1 if not a1: a1 = "Unknown" if not a2: a2 = "Unknown" return f"{a1} & {a2}" ckpt1 = torch.load(path1, map_location="cpu") ckpt2 = torch.load(path2, map_location="cpu") cfg = ckpt1["config"] if "model" in ckpt1: ckpt1 = extract(ckpt1) else: ckpt1 = ckpt1["weight"] if "model" in ckpt2: ckpt2 = extract(ckpt2) else: ckpt2 = ckpt2["weight"] if sorted(list(ckpt1.keys())) != sorted(list(ckpt2.keys())): return "Fail to merge the models. The model architectures are not the same." opt = OrderedDict() opt["weight"] = {} for key in ckpt1.keys(): # try: if key == "emb_g.weight" and ckpt1[key].shape != ckpt2[key].shape: min_shape0 = min(ckpt1[key].shape[0], ckpt2[key].shape[0]) opt["weight"][key] = ( alpha1 * (ckpt1[key][:min_shape0].float()) + (1 - alpha1) * (ckpt2[key][:min_shape0].float()) ).half() else: opt["weight"][key] = ( alpha1 * (ckpt1[key].float()) + (1 - alpha1) * (ckpt2[key].float()) ).half() author = authors(ckpt1, ckpt2) opt["config"] = cfg """ if(sr=="40k"):opt["config"] = [1025, 32, 192, 192, 768, 2, 6, 3, 0, "1", [3, 7, 11], [[1, 3, 5], [1, 3, 5], [1, 3, 5]], [10, 10, 2, 2], 512, [16, 16, 4, 4,4], 109, 256, 40000] elif(sr=="48k"):opt["config"] = [1025, 32, 192, 192, 768, 2, 6, 3, 0, "1", [3, 7, 11], [[1, 3, 5], [1, 3, 5], [1, 3, 5]], [10,6,2,2,2], 512, [16, 16, 4, 4], 109, 256, 48000] elif(sr=="32k"):opt["config"] = [513, 32, 192, 192, 768, 2, 6, 3, 0, "1", [3, 7, 11], [[1, 3, 5], [1, 3, 5], [1, 3, 5]], [10, 4, 2, 2, 2], 512, [16, 16, 4, 4,4], 109, 256, 32000] """ opt["name"] = name opt["timestamp"] = int(time()) if author: opt["author"] = author opt["sr"] = sr opt["f0"] = 1 if f0 == i18n("Yes") else 0 opt["version"] = version opt["info"] = info h = model_hash_ckpt(opt) opt["hash"] = h opt["id"] = hash_id(h) torch.save(opt, "assets/weights/%s.pth" % name) return "Success." except: return traceback.format_exc()